Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

martins.js 993KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791157921579315794157951579615797157981579915800158011580215803158041580515806158071580815809158101581115812158131581415815158161581715818158191582015821158221582315824158251582615827158281582915830158311583215833158341583515836158371583815839158401584115842158431584415845158461584715848158491585015851158521585315854158551585615857158581585915860158611586215863158641586515866158671586815869158701587115872158731587415875158761587715878158791588015881158821588315884158851588615887158881588915890158911589215893158941589515896158971589815899159001590115902159031590415905159061590715908159091591015911159121591315914159151591615917159181591915920159211592215923159241592515926159271592815929159301593115932159331593415935159361593715938159391594015941159421594315944159451594615947159481594915950159511595215953159541595515956159571595815959159601596115962159631596415965159661596715968159691597015971159721597315974159751597615977159781597915980159811598215983159841598515986159871598815989159901599115992159931599415995159961599715998159991600016001160021600316004160051600616007160081600916010160111601216013160141601516016160171601816019160201602116022160231602416025160261602716028160291603016031160321603316034160351603616037160381603916040160411604216043160441604516046160471604816049160501605116052160531605416055160561605716058160591606016061160621606316064160651606616067160681606916070160711607216073160741607516076160771607816079160801608116082160831608416085160861608716088160891609016091160921609316094160951609616097160981609916100161011610216103161041610516106161071610816109161101611116112161131611416115161161611716118161191612016121161221612316124161251612616127161281612916130161311613216133161341613516136161371613816139161401614116142161431614416145161461614716148161491615016151161521615316154161551615616157161581615916160161611616216163161641616516166161671616816169161701617116172161731617416175161761617716178161791618016181161821618316184161851618616187161881618916190161911619216193161941619516196161971619816199162001620116202162031620416205162061620716208162091621016211162121621316214162151621616217162181621916220162211622216223162241622516226162271622816229162301623116232162331623416235162361623716238162391624016241162421624316244162451624616247162481624916250162511625216253162541625516256162571625816259162601626116262162631626416265162661626716268162691627016271162721627316274162751627616277162781627916280162811628216283162841628516286162871628816289162901629116292162931629416295162961629716298162991630016301163021630316304163051630616307163081630916310163111631216313163141631516316163171631816319163201632116322163231632416325163261632716328163291633016331163321633316334163351633616337163381633916340163411634216343163441634516346163471634816349163501635116352163531635416355163561635716358163591636016361163621636316364163651636616367163681636916370163711637216373163741637516376163771637816379163801638116382163831638416385163861638716388163891639016391163921639316394163951639616397163981639916400164011640216403164041640516406164071640816409164101641116412164131641416415164161641716418164191642016421164221642316424164251642616427164281642916430164311643216433164341643516436164371643816439164401644116442164431644416445164461644716448164491645016451164521645316454164551645616457164581645916460164611646216463164641646516466164671646816469164701647116472164731647416475164761647716478164791648016481164821648316484164851648616487164881648916490164911649216493164941649516496164971649816499165001650116502165031650416505165061650716508165091651016511165121651316514165151651616517165181651916520165211652216523165241652516526165271652816529165301653116532165331653416535165361653716538165391654016541165421654316544165451654616547165481654916550165511655216553165541655516556165571655816559165601656116562165631656416565165661656716568165691657016571165721657316574165751657616577165781657916580165811658216583165841658516586165871658816589165901659116592165931659416595165961659716598165991660016601166021660316604166051660616607166081660916610166111661216613166141661516616166171661816619166201662116622166231662416625166261662716628166291663016631166321663316634166351663616637166381663916640166411664216643166441664516646166471664816649166501665116652166531665416655166561665716658166591666016661166621666316664166651666616667166681666916670166711667216673166741667516676166771667816679166801668116682166831668416685166861668716688166891669016691166921669316694166951669616697166981669916700167011670216703167041670516706167071670816709167101671116712167131671416715167161671716718167191672016721167221672316724167251672616727167281672916730167311673216733167341673516736167371673816739167401674116742167431674416745167461674716748167491675016751167521675316754167551675616757167581675916760167611676216763167641676516766167671676816769167701677116772167731677416775167761677716778167791678016781167821678316784167851678616787167881678916790167911679216793167941679516796167971679816799168001680116802168031680416805168061680716808168091681016811168121681316814168151681616817168181681916820168211682216823168241682516826168271682816829168301683116832168331683416835168361683716838168391684016841168421684316844168451684616847168481684916850168511685216853168541685516856168571685816859168601686116862168631686416865168661686716868168691687016871168721687316874168751687616877168781687916880168811688216883168841688516886168871688816889168901689116892168931689416895168961689716898168991690016901169021690316904169051690616907169081690916910169111691216913169141691516916169171691816919169201692116922169231692416925169261692716928169291693016931169321693316934169351693616937169381693916940169411694216943169441694516946169471694816949169501695116952169531695416955169561695716958169591696016961169621696316964169651696616967169681696916970169711697216973169741697516976169771697816979169801698116982169831698416985169861698716988169891699016991169921699316994169951699616997169981699917000170011700217003170041700517006170071700817009170101701117012170131701417015170161701717018170191702017021170221702317024170251702617027170281702917030170311703217033170341703517036170371703817039170401704117042170431704417045170461704717048170491705017051170521705317054170551705617057170581705917060170611706217063170641706517066170671706817069170701707117072170731707417075170761707717078170791708017081170821708317084170851708617087170881708917090170911709217093170941709517096170971709817099171001710117102171031710417105171061710717108171091711017111171121711317114171151711617117171181711917120171211712217123171241712517126171271712817129171301713117132171331713417135171361713717138171391714017141171421714317144171451714617147171481714917150171511715217153171541715517156171571715817159171601716117162171631716417165171661716717168171691717017171171721717317174171751717617177171781717917180171811718217183171841718517186171871718817189171901719117192171931719417195171961719717198171991720017201172021720317204172051720617207172081720917210172111721217213172141721517216172171721817219172201722117222172231722417225172261722717228172291723017231172321723317234172351723617237172381723917240172411724217243172441724517246172471724817249172501725117252172531725417255172561725717258172591726017261172621726317264172651726617267172681726917270172711727217273172741727517276172771727817279172801728117282172831728417285172861728717288172891729017291172921729317294172951729617297172981729917300173011730217303173041730517306173071730817309173101731117312173131731417315173161731717318173191732017321173221732317324173251732617327173281732917330173311733217333173341733517336173371733817339173401734117342173431734417345173461734717348173491735017351173521735317354173551735617357173581735917360173611736217363173641736517366173671736817369173701737117372173731737417375173761737717378173791738017381173821738317384173851738617387173881738917390173911739217393173941739517396173971739817399174001740117402174031740417405174061740717408174091741017411174121741317414174151741617417174181741917420174211742217423174241742517426174271742817429174301743117432174331743417435174361743717438174391744017441174421744317444174451744617447174481744917450174511745217453174541745517456174571745817459174601746117462174631746417465174661746717468174691747017471174721747317474174751747617477174781747917480174811748217483174841748517486174871748817489174901749117492174931749417495174961749717498174991750017501175021750317504175051750617507175081750917510175111751217513175141751517516175171751817519175201752117522175231752417525175261752717528175291753017531175321753317534175351753617537175381753917540175411754217543175441754517546175471754817549175501755117552175531755417555175561755717558175591756017561175621756317564175651756617567175681756917570175711757217573175741757517576175771757817579175801758117582175831758417585175861758717588175891759017591175921759317594175951759617597175981759917600176011760217603176041760517606176071760817609176101761117612176131761417615176161761717618176191762017621176221762317624176251762617627176281762917630176311763217633176341763517636176371763817639176401764117642176431764417645176461764717648176491765017651176521765317654176551765617657176581765917660176611766217663176641766517666176671766817669176701767117672176731767417675176761767717678176791768017681176821768317684176851768617687176881768917690176911769217693176941769517696176971769817699177001770117702177031770417705177061770717708177091771017711177121771317714177151771617717177181771917720177211772217723177241772517726177271772817729177301773117732177331773417735177361773717738177391774017741177421774317744177451774617747177481774917750177511775217753177541775517756177571775817759177601776117762177631776417765177661776717768177691777017771177721777317774177751777617777177781777917780177811778217783177841778517786177871778817789177901779117792177931779417795177961779717798177991780017801178021780317804178051780617807178081780917810178111781217813178141781517816178171781817819178201782117822178231782417825178261782717828178291783017831178321783317834178351783617837178381783917840178411784217843178441784517846178471784817849178501785117852178531785417855178561785717858178591786017861178621786317864178651786617867178681786917870178711787217873178741787517876178771787817879178801788117882178831788417885178861788717888178891789017891178921789317894178951789617897178981789917900179011790217903179041790517906179071790817909179101791117912179131791417915179161791717918179191792017921179221792317924179251792617927179281792917930179311793217933179341793517936179371793817939179401794117942179431794417945179461794717948179491795017951179521795317954179551795617957179581795917960179611796217963179641796517966179671796817969179701797117972179731797417975179761797717978179791798017981179821798317984179851798617987179881798917990179911799217993179941799517996179971799817999180001800118002180031800418005180061800718008180091801018011180121801318014180151801618017180181801918020180211802218023180241802518026180271802818029180301803118032180331803418035180361803718038180391804018041180421804318044180451804618047180481804918050180511805218053180541805518056180571805818059180601806118062180631806418065180661806718068180691807018071180721807318074180751807618077180781807918080180811808218083180841808518086180871808818089180901809118092180931809418095180961809718098180991810018101181021810318104181051810618107181081810918110181111811218113181141811518116181171811818119181201812118122181231812418125181261812718128181291813018131181321813318134181351813618137181381813918140181411814218143181441814518146181471814818149181501815118152181531815418155181561815718158181591816018161181621816318164181651816618167181681816918170181711817218173181741817518176181771817818179181801818118182181831818418185181861818718188181891819018191181921819318194181951819618197181981819918200182011820218203182041820518206182071820818209182101821118212182131821418215182161821718218182191822018221182221822318224182251822618227182281822918230182311823218233182341823518236182371823818239182401824118242182431824418245182461824718248182491825018251182521825318254182551825618257182581825918260182611826218263182641826518266182671826818269182701827118272182731827418275182761827718278182791828018281182821828318284182851828618287182881828918290182911829218293182941829518296182971829818299183001830118302183031830418305183061830718308183091831018311183121831318314183151831618317183181831918320183211832218323183241832518326183271832818329183301833118332183331833418335183361833718338183391834018341183421834318344183451834618347183481834918350183511835218353183541835518356183571835818359183601836118362183631836418365183661836718368183691837018371183721837318374183751837618377183781837918380183811838218383183841838518386183871838818389183901839118392183931839418395183961839718398183991840018401184021840318404184051840618407184081840918410184111841218413184141841518416184171841818419184201842118422184231842418425184261842718428184291843018431184321843318434184351843618437184381843918440184411844218443184441844518446184471844818449184501845118452184531845418455184561845718458184591846018461184621846318464184651846618467184681846918470184711847218473184741847518476184771847818479184801848118482184831848418485184861848718488184891849018491184921849318494184951849618497184981849918500185011850218503185041850518506185071850818509185101851118512185131851418515185161851718518185191852018521185221852318524185251852618527185281852918530185311853218533185341853518536185371853818539185401854118542185431854418545185461854718548185491855018551185521855318554185551855618557185581855918560185611856218563185641856518566185671856818569185701857118572185731857418575185761857718578185791858018581185821858318584185851858618587185881858918590185911859218593185941859518596185971859818599186001860118602186031860418605186061860718608186091861018611186121861318614186151861618617186181861918620186211862218623186241862518626186271862818629186301863118632186331863418635186361863718638186391864018641186421864318644186451864618647186481864918650186511865218653186541865518656186571865818659186601866118662186631866418665186661866718668186691867018671186721867318674186751867618677186781867918680186811868218683186841868518686186871868818689186901869118692186931869418695186961869718698186991870018701187021870318704187051870618707187081870918710187111871218713187141871518716187171871818719187201872118722187231872418725187261872718728187291873018731187321873318734187351873618737187381873918740187411874218743187441874518746187471874818749187501875118752187531875418755187561875718758187591876018761187621876318764187651876618767187681876918770187711877218773187741877518776187771877818779187801878118782187831878418785187861878718788187891879018791187921879318794187951879618797187981879918800188011880218803188041880518806188071880818809188101881118812188131881418815188161881718818188191882018821188221882318824188251882618827188281882918830188311883218833188341883518836188371883818839188401884118842188431884418845188461884718848188491885018851188521885318854188551885618857188581885918860188611886218863188641886518866188671886818869188701887118872188731887418875188761887718878188791888018881188821888318884188851888618887188881888918890188911889218893188941889518896188971889818899189001890118902189031890418905189061890718908189091891018911189121891318914189151891618917189181891918920189211892218923189241892518926189271892818929189301893118932189331893418935189361893718938189391894018941189421894318944189451894618947189481894918950189511895218953189541895518956189571895818959189601896118962189631896418965189661896718968189691897018971189721897318974189751897618977189781897918980189811898218983189841898518986189871898818989189901899118992189931899418995189961899718998189991900019001190021900319004190051900619007190081900919010190111901219013190141901519016190171901819019190201902119022190231902419025190261902719028190291903019031190321903319034190351903619037190381903919040190411904219043190441904519046190471904819049190501905119052190531905419055190561905719058190591906019061190621906319064190651906619067190681906919070190711907219073190741907519076190771907819079190801908119082190831908419085190861908719088190891909019091190921909319094190951909619097190981909919100191011910219103191041910519106191071910819109191101911119112191131911419115191161911719118191191912019121191221912319124191251912619127191281912919130191311913219133191341913519136191371913819139191401914119142191431914419145191461914719148191491915019151191521915319154191551915619157191581915919160191611916219163191641916519166191671916819169191701917119172191731917419175191761917719178191791918019181191821918319184191851918619187191881918919190191911919219193191941919519196191971919819199192001920119202192031920419205192061920719208192091921019211192121921319214192151921619217192181921919220192211922219223192241922519226192271922819229192301923119232192331923419235192361923719238192391924019241192421924319244192451924619247192481924919250192511925219253192541925519256192571925819259192601926119262192631926419265192661926719268192691927019271192721927319274192751927619277192781927919280192811928219283192841928519286192871928819289192901929119292192931929419295192961929719298192991930019301193021930319304193051930619307193081930919310193111931219313193141931519316193171931819319193201932119322193231932419325193261932719328193291933019331193321933319334193351933619337193381933919340193411934219343193441934519346193471934819349193501935119352193531935419355193561935719358193591936019361193621936319364193651936619367193681936919370193711937219373193741937519376193771937819379193801938119382193831938419385193861938719388193891939019391193921939319394193951939619397193981939919400194011940219403194041940519406194071940819409194101941119412194131941419415194161941719418194191942019421194221942319424194251942619427194281942919430194311943219433194341943519436194371943819439194401944119442194431944419445194461944719448194491945019451194521945319454194551945619457194581945919460194611946219463194641946519466194671946819469194701947119472194731947419475194761947719478194791948019481194821948319484194851948619487194881948919490194911949219493194941949519496194971949819499195001950119502195031950419505195061950719508195091951019511195121951319514195151951619517195181951919520195211952219523195241952519526195271952819529195301953119532195331953419535195361953719538195391954019541195421954319544195451954619547195481954919550195511955219553195541955519556195571955819559195601956119562195631956419565195661956719568195691957019571195721957319574195751957619577195781957919580195811958219583195841958519586195871958819589195901959119592195931959419595195961959719598195991960019601196021960319604196051960619607196081960919610196111961219613196141961519616196171961819619196201962119622196231962419625196261962719628196291963019631196321963319634196351963619637196381963919640196411964219643196441964519646196471964819649196501965119652196531965419655196561965719658196591966019661196621966319664196651966619667196681966919670196711967219673196741967519676196771967819679196801968119682196831968419685196861968719688196891969019691196921969319694196951969619697196981969919700197011970219703197041970519706197071970819709197101971119712197131971419715197161971719718197191972019721197221972319724197251972619727197281972919730197311973219733197341973519736197371973819739197401974119742197431974419745197461974719748197491975019751197521975319754197551975619757197581975919760197611976219763197641976519766197671976819769197701977119772197731977419775197761977719778197791978019781197821978319784197851978619787197881978919790197911979219793197941979519796197971979819799198001980119802198031980419805198061980719808198091981019811198121981319814198151981619817198181981919820198211982219823198241982519826198271982819829198301983119832198331983419835198361983719838198391984019841198421984319844198451984619847198481984919850198511985219853198541985519856198571985819859198601986119862198631986419865198661986719868198691987019871198721987319874198751987619877198781987919880198811988219883198841988519886198871988819889198901989119892198931989419895198961989719898198991990019901199021990319904199051990619907199081990919910199111991219913199141991519916199171991819919199201992119922199231992419925199261992719928199291993019931199321993319934199351993619937199381993919940199411994219943199441994519946199471994819949199501995119952199531995419955199561995719958199591996019961199621996319964199651996619967199681996919970199711997219973199741997519976199771997819979199801998119982199831998419985199861998719988199891999019991199921999319994199951999619997199981999920000200012000220003200042000520006200072000820009200102001120012200132001420015200162001720018200192002020021200222002320024200252002620027200282002920030200312003220033200342003520036200372003820039200402004120042200432004420045200462004720048200492005020051200522005320054200552005620057200582005920060200612006220063200642006520066200672006820069200702007120072200732007420075200762007720078200792008020081200822008320084200852008620087200882008920090200912009220093200942009520096200972009820099201002010120102201032010420105201062010720108201092011020111201122011320114201152011620117201182011920120201212012220123201242012520126201272012820129201302013120132201332013420135201362013720138201392014020141201422014320144201452014620147201482014920150201512015220153201542015520156201572015820159201602016120162201632016420165201662016720168201692017020171201722017320174201752017620177201782017920180201812018220183201842018520186201872018820189201902019120192201932019420195201962019720198201992020020201202022020320204202052020620207202082020920210202112021220213202142021520216202172021820219202202022120222202232022420225202262022720228202292023020231202322023320234202352023620237202382023920240202412024220243202442024520246202472024820249202502025120252202532025420255202562025720258202592026020261202622026320264202652026620267202682026920270202712027220273202742027520276202772027820279202802028120282202832028420285202862028720288202892029020291202922029320294202952029620297202982029920300203012030220303203042030520306203072030820309203102031120312203132031420315203162031720318203192032020321203222032320324203252032620327203282032920330203312033220333203342033520336203372033820339203402034120342203432034420345203462034720348203492035020351203522035320354203552035620357203582035920360203612036220363203642036520366203672036820369203702037120372203732037420375203762037720378203792038020381203822038320384203852038620387203882038920390203912039220393203942039520396203972039820399204002040120402204032040420405204062040720408204092041020411204122041320414204152041620417204182041920420204212042220423204242042520426204272042820429204302043120432204332043420435204362043720438204392044020441204422044320444204452044620447204482044920450204512045220453204542045520456204572045820459204602046120462204632046420465204662046720468204692047020471204722047320474204752047620477204782047920480204812048220483204842048520486204872048820489204902049120492204932049420495204962049720498204992050020501205022050320504205052050620507205082050920510205112051220513205142051520516205172051820519205202052120522205232052420525205262052720528205292053020531205322053320534205352053620537205382053920540205412054220543205442054520546205472054820549205502055120552205532055420555205562055720558205592056020561205622056320564205652056620567205682056920570205712057220573205742057520576205772057820579205802058120582205832058420585205862058720588205892059020591205922059320594205952059620597205982059920600206012060220603206042060520606206072060820609206102061120612206132061420615206162061720618206192062020621206222062320624206252062620627206282062920630206312063220633206342063520636206372063820639206402064120642206432064420645206462064720648206492065020651206522065320654206552065620657206582065920660206612066220663206642066520666206672066820669206702067120672206732067420675206762067720678206792068020681206822068320684206852068620687206882068920690206912069220693206942069520696206972069820699207002070120702207032070420705207062070720708207092071020711207122071320714207152071620717207182071920720207212072220723207242072520726207272072820729207302073120732207332073420735207362073720738207392074020741207422074320744207452074620747207482074920750207512075220753207542075520756207572075820759207602076120762207632076420765207662076720768207692077020771207722077320774207752077620777207782077920780207812078220783207842078520786207872078820789207902079120792207932079420795207962079720798207992080020801208022080320804208052080620807208082080920810208112081220813208142081520816208172081820819208202082120822208232082420825208262082720828208292083020831208322083320834208352083620837208382083920840208412084220843208442084520846208472084820849208502085120852208532085420855208562085720858208592086020861208622086320864208652086620867208682086920870208712087220873208742087520876208772087820879208802088120882208832088420885208862088720888208892089020891208922089320894208952089620897208982089920900209012090220903209042090520906209072090820909209102091120912209132091420915209162091720918209192092020921209222092320924209252092620927209282092920930209312093220933209342093520936209372093820939209402094120942209432094420945209462094720948209492095020951209522095320954209552095620957209582095920960209612096220963209642096520966209672096820969209702097120972209732097420975209762097720978209792098020981209822098320984209852098620987209882098920990209912099220993209942099520996209972099820999210002100121002210032100421005210062100721008210092101021011210122101321014210152101621017210182101921020210212102221023210242102521026210272102821029210302103121032210332103421035210362103721038210392104021041210422104321044210452104621047210482104921050210512105221053210542105521056210572105821059210602106121062210632106421065210662106721068210692107021071210722107321074210752107621077210782107921080210812108221083210842108521086210872108821089210902109121092210932109421095210962109721098210992110021101211022110321104211052110621107211082110921110211112111221113211142111521116211172111821119211202112121122211232112421125211262112721128211292113021131211322113321134211352113621137211382113921140211412114221143211442114521146211472114821149211502115121152211532115421155211562115721158211592116021161211622116321164211652116621167211682116921170211712117221173211742117521176211772117821179211802118121182211832118421185211862118721188211892119021191211922119321194211952119621197211982119921200212012120221203212042120521206212072120821209212102121121212212132121421215212162121721218212192122021221212222122321224212252122621227212282122921230212312123221233212342123521236212372123821239212402124121242212432124421245212462124721248212492125021251212522125321254212552125621257212582125921260212612126221263212642126521266212672126821269212702127121272212732127421275212762127721278212792128021281212822128321284212852128621287212882128921290212912129221293212942129521296212972129821299213002130121302213032130421305213062130721308213092131021311213122131321314213152131621317213182131921320213212132221323213242132521326213272132821329213302133121332213332133421335213362133721338213392134021341213422134321344213452134621347213482134921350213512135221353213542135521356213572135821359213602136121362213632136421365213662136721368213692137021371213722137321374213752137621377213782137921380213812138221383213842138521386213872138821389213902139121392213932139421395213962139721398213992140021401214022140321404214052140621407214082140921410214112141221413214142141521416214172141821419214202142121422214232142421425214262142721428214292143021431214322143321434214352143621437214382143921440214412144221443214442144521446214472144821449214502145121452214532145421455214562145721458214592146021461214622146321464214652146621467214682146921470214712147221473214742147521476214772147821479214802148121482214832148421485214862148721488214892149021491214922149321494214952149621497214982149921500215012150221503215042150521506215072150821509215102151121512215132151421515215162151721518215192152021521215222152321524215252152621527215282152921530215312153221533215342153521536215372153821539215402154121542215432154421545215462154721548215492155021551215522155321554215552155621557215582155921560215612156221563215642156521566215672156821569215702157121572215732157421575215762157721578215792158021581215822158321584215852158621587215882158921590215912159221593215942159521596215972159821599216002160121602216032160421605216062160721608216092161021611216122161321614216152161621617216182161921620216212162221623216242162521626216272162821629216302163121632216332163421635216362163721638216392164021641216422164321644216452164621647216482164921650216512165221653216542165521656216572165821659216602166121662216632166421665216662166721668216692167021671216722167321674216752167621677216782167921680216812168221683216842168521686216872168821689216902169121692216932169421695216962169721698216992170021701217022170321704217052170621707217082170921710217112171221713217142171521716217172171821719217202172121722217232172421725217262172721728217292173021731217322173321734217352173621737217382173921740217412174221743217442174521746217472174821749217502175121752217532175421755217562175721758217592176021761217622176321764217652176621767217682176921770217712177221773217742177521776217772177821779217802178121782217832178421785217862178721788217892179021791217922179321794217952179621797217982179921800218012180221803218042180521806218072180821809218102181121812218132181421815218162181721818218192182021821218222182321824218252182621827218282182921830218312183221833218342183521836218372183821839218402184121842218432184421845218462184721848218492185021851218522185321854218552185621857218582185921860218612186221863218642186521866218672186821869218702187121872218732187421875218762187721878218792188021881218822188321884218852188621887218882188921890218912189221893218942189521896218972189821899219002190121902219032190421905219062190721908219092191021911219122191321914219152191621917219182191921920219212192221923219242192521926219272192821929219302193121932219332193421935219362193721938219392194021941219422194321944219452194621947219482194921950219512195221953219542195521956219572195821959219602196121962219632196421965219662196721968219692197021971219722197321974219752197621977219782197921980219812198221983219842198521986219872198821989219902199121992219932199421995219962199721998219992200022001220022200322004220052200622007220082200922010220112201222013220142201522016220172201822019220202202122022220232202422025220262202722028220292203022031220322203322034220352203622037220382203922040220412204222043220442204522046220472204822049220502205122052220532205422055220562205722058220592206022061220622206322064220652206622067220682206922070220712207222073220742207522076220772207822079220802208122082220832208422085220862208722088220892209022091220922209322094220952209622097220982209922100221012210222103221042210522106221072210822109221102211122112221132211422115221162211722118221192212022121221222212322124221252212622127221282212922130221312213222133221342213522136221372213822139221402214122142221432214422145221462214722148221492215022151221522215322154221552215622157221582215922160221612216222163221642216522166221672216822169221702217122172221732217422175221762217722178221792218022181221822218322184221852218622187221882218922190221912219222193221942219522196221972219822199222002220122202222032220422205222062220722208222092221022211222122221322214222152221622217222182221922220222212222222223222242222522226222272222822229222302223122232222332223422235222362223722238222392224022241222422224322244222452224622247222482224922250222512225222253222542225522256222572225822259222602226122262222632226422265222662226722268222692227022271222722227322274222752227622277222782227922280222812228222283222842228522286222872228822289222902229122292222932229422295222962229722298222992230022301223022230322304223052230622307223082230922310223112231222313223142231522316223172231822319223202232122322223232232422325223262232722328223292233022331223322233322334223352233622337223382233922340223412234222343223442234522346223472234822349223502235122352223532235422355223562235722358223592236022361223622236322364223652236622367223682236922370223712237222373223742237522376223772237822379223802238122382223832238422385223862238722388223892239022391223922239322394223952239622397223982239922400224012240222403224042240522406224072240822409224102241122412224132241422415224162241722418224192242022421224222242322424224252242622427224282242922430224312243222433224342243522436224372243822439224402244122442224432244422445224462244722448224492245022451224522245322454224552245622457224582245922460224612246222463224642246522466224672246822469224702247122472224732247422475224762247722478224792248022481224822248322484224852248622487224882248922490224912249222493224942249522496224972249822499225002250122502225032250422505225062250722508225092251022511225122251322514225152251622517225182251922520225212252222523225242252522526225272252822529225302253122532225332253422535225362253722538225392254022541225422254322544225452254622547225482254922550225512255222553225542255522556225572255822559225602256122562225632256422565225662256722568225692257022571225722257322574225752257622577225782257922580225812258222583225842258522586225872258822589225902259122592225932259422595225962259722598225992260022601226022260322604226052260622607226082260922610226112261222613226142261522616226172261822619226202262122622226232262422625226262262722628226292263022631226322263322634226352263622637226382263922640226412264222643226442264522646226472264822649226502265122652226532265422655226562265722658226592266022661226622266322664226652266622667226682266922670226712267222673226742267522676226772267822679226802268122682226832268422685226862268722688226892269022691226922269322694226952269622697226982269922700227012270222703227042270522706227072270822709227102271122712227132271422715227162271722718227192272022721227222272322724227252272622727227282272922730227312273222733227342273522736227372273822739227402274122742227432274422745227462274722748227492275022751227522275322754227552275622757227582275922760227612276222763227642276522766227672276822769227702277122772227732277422775227762277722778227792278022781227822278322784227852278622787227882278922790227912279222793227942279522796227972279822799228002280122802228032280422805228062280722808228092281022811228122281322814228152281622817228182281922820228212282222823228242282522826228272282822829228302283122832228332283422835228362283722838228392284022841228422284322844228452284622847228482284922850228512285222853228542285522856228572285822859228602286122862228632286422865228662286722868228692287022871228722287322874228752287622877228782287922880228812288222883228842288522886228872288822889228902289122892228932289422895228962289722898228992290022901229022290322904229052290622907229082290922910229112291222913229142291522916229172291822919229202292122922229232292422925229262292722928229292293022931229322293322934229352293622937229382293922940229412294222943229442294522946229472294822949229502295122952229532295422955229562295722958229592296022961229622296322964229652296622967229682296922970229712297222973229742297522976229772297822979229802298122982229832298422985229862298722988229892299022991229922299322994229952299622997229982299923000230012300223003230042300523006230072300823009230102301123012230132301423015230162301723018230192302023021230222302323024230252302623027230282302923030230312303223033230342303523036230372303823039230402304123042230432304423045230462304723048230492305023051230522305323054230552305623057230582305923060230612306223063230642306523066230672306823069230702307123072230732307423075230762307723078230792308023081230822308323084230852308623087230882308923090230912309223093230942309523096230972309823099231002310123102231032310423105231062310723108231092311023111231122311323114231152311623117231182311923120231212312223123231242312523126231272312823129231302313123132231332313423135231362313723138231392314023141231422314323144231452314623147231482314923150231512315223153231542315523156231572315823159231602316123162231632316423165231662316723168231692317023171231722317323174231752317623177231782317923180231812318223183231842318523186231872318823189231902319123192231932319423195231962319723198231992320023201232022320323204232052320623207232082320923210232112321223213232142321523216232172321823219232202322123222232232322423225232262322723228232292323023231232322323323234232352323623237232382323923240232412324223243232442324523246232472324823249232502325123252232532325423255232562325723258232592326023261232622326323264232652326623267232682326923270232712327223273232742327523276232772327823279232802328123282232832328423285232862328723288232892329023291232922329323294232952329623297232982329923300233012330223303233042330523306233072330823309233102331123312233132331423315233162331723318233192332023321233222332323324233252332623327233282332923330233312333223333233342333523336233372333823339233402334123342233432334423345233462334723348233492335023351233522335323354233552335623357233582335923360233612336223363233642336523366233672336823369233702337123372233732337423375233762337723378233792338023381233822338323384233852338623387233882338923390233912339223393233942339523396233972339823399234002340123402234032340423405234062340723408234092341023411234122341323414234152341623417234182341923420234212342223423234242342523426234272342823429234302343123432234332343423435234362343723438234392344023441234422344323444234452344623447234482344923450234512345223453234542345523456234572345823459234602346123462234632346423465234662346723468234692347023471234722347323474234752347623477234782347923480234812348223483234842348523486234872348823489234902349123492234932349423495234962349723498234992350023501235022350323504235052350623507235082350923510235112351223513235142351523516235172351823519235202352123522235232352423525235262352723528235292353023531235322353323534235352353623537235382353923540235412354223543235442354523546235472354823549235502355123552235532355423555235562355723558235592356023561235622356323564235652356623567235682356923570235712357223573235742357523576235772357823579235802358123582235832358423585235862358723588235892359023591235922359323594235952359623597235982359923600236012360223603236042360523606236072360823609236102361123612236132361423615236162361723618236192362023621236222362323624236252362623627236282362923630236312363223633236342363523636236372363823639236402364123642236432364423645236462364723648236492365023651236522365323654236552365623657236582365923660236612366223663236642366523666236672366823669236702367123672236732367423675236762367723678236792368023681236822368323684236852368623687236882368923690236912369223693236942369523696236972369823699237002370123702237032370423705237062370723708237092371023711237122371323714237152371623717237182371923720237212372223723237242372523726237272372823729237302373123732237332373423735237362373723738237392374023741237422374323744237452374623747237482374923750237512375223753237542375523756237572375823759237602376123762237632376423765237662376723768237692377023771237722377323774237752377623777237782377923780237812378223783237842378523786237872378823789237902379123792237932379423795237962379723798237992380023801238022380323804238052380623807238082380923810238112381223813238142381523816238172381823819238202382123822238232382423825238262382723828238292383023831238322383323834238352383623837238382383923840238412384223843238442384523846238472384823849238502385123852238532385423855238562385723858238592386023861238622386323864238652386623867238682386923870238712387223873238742387523876238772387823879238802388123882238832388423885238862388723888238892389023891238922389323894238952389623897238982389923900239012390223903239042390523906239072390823909239102391123912239132391423915239162391723918239192392023921239222392323924239252392623927239282392923930239312393223933239342393523936239372393823939239402394123942239432394423945239462394723948239492395023951239522395323954239552395623957239582395923960239612396223963239642396523966239672396823969239702397123972239732397423975239762397723978239792398023981239822398323984239852398623987239882398923990239912399223993239942399523996239972399823999240002400124002240032400424005240062400724008240092401024011240122401324014240152401624017240182401924020240212402224023240242402524026240272402824029240302403124032240332403424035240362403724038240392404024041240422404324044240452404624047240482404924050240512405224053240542405524056240572405824059240602406124062240632406424065240662406724068240692407024071240722407324074240752407624077240782407924080240812408224083240842408524086240872408824089240902409124092240932409424095240962409724098240992410024101241022410324104241052410624107241082410924110241112411224113241142411524116241172411824119241202412124122241232412424125241262412724128241292413024131241322413324134241352413624137241382413924140241412414224143241442414524146241472414824149241502415124152241532415424155241562415724158241592416024161241622416324164241652416624167241682416924170241712417224173241742417524176241772417824179241802418124182241832418424185241862418724188241892419024191241922419324194241952419624197241982419924200242012420224203242042420524206242072420824209242102421124212242132421424215242162421724218242192422024221242222422324224242252422624227242282422924230242312423224233242342423524236242372423824239242402424124242242432424424245242462424724248242492425024251242522425324254242552425624257242582425924260242612426224263242642426524266242672426824269242702427124272242732427424275242762427724278242792428024281242822428324284242852428624287242882428924290242912429224293242942429524296242972429824299243002430124302243032430424305243062430724308243092431024311243122431324314243152431624317243182431924320243212432224323243242432524326243272432824329243302433124332243332433424335243362433724338243392434024341243422434324344243452434624347243482434924350243512435224353243542435524356243572435824359243602436124362243632436424365243662436724368243692437024371243722437324374243752437624377243782437924380243812438224383243842438524386243872438824389243902439124392243932439424395243962439724398243992440024401244022440324404244052440624407244082440924410244112441224413244142441524416244172441824419244202442124422244232442424425244262442724428244292443024431244322443324434244352443624437244382443924440244412444224443244442444524446244472444824449244502445124452244532445424455244562445724458244592446024461244622446324464244652446624467244682446924470244712447224473244742447524476244772447824479244802448124482244832448424485244862448724488244892449024491244922449324494244952449624497244982449924500245012450224503245042450524506245072450824509245102451124512245132451424515245162451724518245192452024521245222452324524245252452624527245282452924530245312453224533245342453524536245372453824539245402454124542245432454424545245462454724548245492455024551245522455324554245552455624557245582455924560245612456224563245642456524566245672456824569245702457124572245732457424575245762457724578245792458024581245822458324584245852458624587245882458924590245912459224593245942459524596245972459824599246002460124602246032460424605246062460724608246092461024611246122461324614246152461624617246182461924620246212462224623246242462524626246272462824629246302463124632246332463424635246362463724638246392464024641246422464324644246452464624647246482464924650246512465224653246542465524656246572465824659246602466124662246632466424665246662466724668246692467024671246722467324674246752467624677246782467924680246812468224683246842468524686246872468824689246902469124692246932469424695246962469724698246992470024701247022470324704247052470624707247082470924710247112471224713247142471524716247172471824719247202472124722247232472424725247262472724728247292473024731247322473324734247352473624737247382473924740247412474224743247442474524746247472474824749247502475124752247532475424755247562475724758247592476024761247622476324764247652476624767247682476924770247712477224773247742477524776247772477824779247802478124782247832478424785247862478724788247892479024791247922479324794247952479624797247982479924800248012480224803248042480524806248072480824809248102481124812248132481424815248162481724818248192482024821248222482324824248252482624827248282482924830248312483224833248342483524836248372483824839248402484124842248432484424845248462484724848248492485024851248522485324854248552485624857248582485924860248612486224863248642486524866248672486824869248702487124872248732487424875248762487724878248792488024881248822488324884248852488624887248882488924890248912489224893248942489524896248972489824899249002490124902249032490424905249062490724908249092491024911249122491324914249152491624917249182491924920249212492224923249242492524926249272492824929249302493124932249332493424935249362493724938249392494024941249422494324944249452494624947249482494924950249512495224953249542495524956249572495824959249602496124962249632496424965249662496724968249692497024971249722497324974249752497624977249782497924980249812498224983249842498524986249872498824989249902499124992249932499424995249962499724998249992500025001250022500325004250052500625007250082500925010250112501225013250142501525016250172501825019250202502125022250232502425025250262502725028250292503025031250322503325034250352503625037250382503925040250412504225043250442504525046250472504825049250502505125052250532505425055250562505725058250592506025061250622506325064250652506625067250682506925070250712507225073250742507525076250772507825079250802508125082250832508425085250862508725088250892509025091250922509325094250952509625097250982509925100251012510225103251042510525106251072510825109251102511125112251132511425115251162511725118251192512025121251222512325124251252512625127251282512925130251312513225133251342513525136251372513825139251402514125142251432514425145251462514725148251492515025151251522515325154251552515625157251582515925160251612516225163251642516525166251672516825169251702517125172251732517425175251762517725178251792518025181251822518325184251852518625187251882518925190251912519225193251942519525196251972519825199252002520125202252032520425205252062520725208252092521025211252122521325214252152521625217252182521925220252212522225223252242522525226252272522825229252302523125232252332523425235252362523725238252392524025241252422524325244252452524625247252482524925250252512525225253252542525525256252572525825259252602526125262252632526425265252662526725268252692527025271252722527325274252752527625277252782527925280252812528225283252842528525286252872528825289252902529125292252932529425295252962529725298252992530025301253022530325304253052530625307253082530925310253112531225313253142531525316253172531825319253202532125322253232532425325253262532725328253292533025331253322533325334253352533625337253382533925340253412534225343253442534525346253472534825349253502535125352253532535425355253562535725358253592536025361253622536325364253652536625367253682536925370253712537225373253742537525376253772537825379253802538125382253832538425385253862538725388253892539025391253922539325394253952539625397253982539925400254012540225403254042540525406254072540825409254102541125412254132541425415254162541725418254192542025421254222542325424254252542625427254282542925430254312543225433254342543525436254372543825439254402544125442254432544425445254462544725448254492545025451254522545325454254552545625457254582545925460254612546225463254642546525466254672546825469254702547125472254732547425475254762547725478254792548025481254822548325484254852548625487254882548925490254912549225493254942549525496254972549825499255002550125502255032550425505255062550725508255092551025511255122551325514255152551625517255182551925520255212552225523255242552525526255272552825529255302553125532255332553425535255362553725538255392554025541255422554325544255452554625547255482554925550255512555225553255542555525556255572555825559255602556125562255632556425565255662556725568255692557025571255722557325574255752557625577255782557925580255812558225583255842558525586255872558825589255902559125592255932559425595255962559725598255992560025601256022560325604256052560625607256082560925610256112561225613256142561525616256172561825619256202562125622256232562425625256262562725628256292563025631256322563325634256352563625637256382563925640256412564225643256442564525646256472564825649256502565125652256532565425655256562565725658256592566025661256622566325664256652566625667256682566925670256712567225673256742567525676256772567825679256802568125682256832568425685256862568725688256892569025691256922569325694256952569625697256982569925700257012570225703257042570525706257072570825709257102571125712257132571425715257162571725718257192572025721257222572325724257252572625727257282572925730257312573225733257342573525736257372573825739257402574125742257432574425745257462574725748257492575025751257522575325754257552575625757257582575925760257612576225763257642576525766257672576825769257702577125772257732577425775257762577725778257792578025781257822578325784257852578625787257882578925790257912579225793257942579525796257972579825799258002580125802258032580425805258062580725808258092581025811
  1. /*!
  2. * MARTINS.js version 0.2.0
  3. * GPU-accelerated Augmented Reality for the web
  4. * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  5. * https://github.com/alemart/martins-js
  6. *
  7. * @license LGPL-3.0-or-later
  8. * Date: 2024-07-02T20:36:32.182Z
  9. */
  10. (function webpackUniversalModuleDefinition(root, factory) {
  11. if(typeof exports === 'object' && typeof module === 'object')
  12. module.exports = factory();
  13. else if(typeof define === 'function' && define.amd)
  14. define([], factory);
  15. else if(typeof exports === 'object')
  16. exports["Martins"] = factory();
  17. else
  18. root["Martins"] = factory();
  19. })(self, () => {
  20. return /******/ (() => { // webpackBootstrap
  21. /******/ var __webpack_modules__ = ({
  22. /***/ 774:
  23. /***/ ((module) => {
  24. /*!
  25. * Speedy Vision version 0.9.1
  26. * GPU-accelerated Computer Vision for JavaScript
  27. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  28. * https://github.com/alemart/speedy-vision
  29. *
  30. * @license Apache-2.0
  31. * Date: 2024-07-02T20:26:01.993Z
  32. */
  33. (function webpackUniversalModuleDefinition(root, factory) {
  34. if(true)
  35. module.exports = factory();
  36. else {}
  37. })(self, () => {
  38. return /******/ (() => { // webpackBootstrap
  39. /******/ var __webpack_modules__ = ({
  40. /***/ 2199:
  41. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
  42. "use strict";
  43. /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
  44. /* harmony export */ w: () => (/* binding */ Settings)
  45. /* harmony export */ });
  46. /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(6634);
  47. /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1001);
  48. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(9037);
  49. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(8581);
  50. /*
  51. * speedy-vision.js
  52. * GPU-accelerated Computer Vision for JavaScript
  53. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  54. *
  55. * Licensed under the Apache License, Version 2.0 (the "License");
  56. * you may not use this file except in compliance with the License.
  57. * You may obtain a copy of the License at
  58. *
  59. * http://www.apache.org/licenses/LICENSE-2.0
  60. *
  61. * Unless required by applicable law or agreed to in writing, software
  62. * distributed under the License is distributed on an "AS IS" BASIS,
  63. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  64. * See the License for the specific language governing permissions and
  65. * limitations under the License.
  66. *
  67. * settings.js
  68. * Global settings
  69. */
  70. /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
  71. /** @typedef {"raf" | "asap"} GPUPollingMode */
  72. /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
  73. /** @type {GPUPollingMode} Default GPU polling mode */
  74. const DEFAULT_GPU_POLLING_MODE = 'raf';
  75. /** @type {GPUPollingMode} GPU polling mode */
  76. let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
  77. /** @type {LoggingMode} logging mode */
  78. let loggingMode = 'default';
  79. /**
  80. * Global settings
  81. */
  82. class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q {
  83. /**
  84. * Power preference of the WebGL context
  85. * @returns {PowerPreference}
  86. */
  87. static get powerPreference() {
  88. return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
  89. }
  90. /**
  91. * Power preference of the WebGL context
  92. * @param {PowerPreference} value
  93. */
  94. static set powerPreference(value) {
  95. _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
  96. }
  97. /**
  98. * GPU polling mode
  99. * @returns {GPUPollingMode}
  100. */
  101. static get gpuPollingMode() {
  102. return gpuPollingMode;
  103. }
  104. /**
  105. * GPU polling mode
  106. * @param {GPUPollingMode} value
  107. */
  108. static set gpuPollingMode(value) {
  109. if (value !== 'raf' && value !== 'asap') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
  110. gpuPollingMode = value;
  111. }
  112. /**
  113. * Logging mode
  114. * @returns {LoggingMode}
  115. */
  116. static get logging() {
  117. return loggingMode;
  118. }
  119. /**
  120. * Logging mode
  121. * @param {LoggingMode} mode
  122. */
  123. static set logging(mode) {
  124. if (mode !== 'default' && mode !== 'none' && mode !== 'diagnostic') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);else if (mode === 'diagnostic') _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
  125. loggingMode = mode;
  126. }
  127. }
  128. /***/ }),
  129. /***/ 6306:
  130. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4248__) => {
  131. "use strict";
  132. /* harmony export */ __nested_webpack_require_4248__.d(__nested_webpack_exports__, {
  133. /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
  134. /* harmony export */ });
  135. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4248__(6465);
  136. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4248__(9037);
  137. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4248__(8581);
  138. /*
  139. * speedy-vision.js
  140. * GPU-accelerated Computer Vision for JavaScript
  141. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  142. *
  143. * Licensed under the Apache License, Version 2.0 (the "License");
  144. * you may not use this file except in compliance with the License.
  145. * You may obtain a copy of the License at
  146. *
  147. * http://www.apache.org/licenses/LICENSE-2.0
  148. *
  149. * Unless required by applicable law or agreed to in writing, software
  150. * distributed under the License is distributed on an "AS IS" BASIS,
  151. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  152. * See the License for the specific language governing permissions and
  153. * limitations under the License.
  154. *
  155. * speedy-matrix-expr.js
  156. * Symbolic matrix expressions
  157. */
  158. /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
  159. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
  160. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
  161. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  162. /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
  163. /** @const {Dtype2BufferType} */
  164. const DTYPE_TO_BUFFER_TYPE = Object.freeze({
  165. 'float32': Float32Array
  166. });
  167. /**
  168. * @abstract Matrix expression
  169. * It's an opaque object representing an algebraic
  170. * expression. It has no data attached to it.
  171. */
  172. class SpeedyMatrixExpr {
  173. /**
  174. * Constructor
  175. * @param {number} rows
  176. * @param {number} columns
  177. * @param {SpeedyMatrixDtype} dtype
  178. */
  179. constructor(rows, columns, dtype) {
  180. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
  181. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
  182. /** @type {number} number of rows */
  183. this._rows = rows | 0;
  184. /** @type {number} number of columns */
  185. this._columns = columns | 0;
  186. /** @type {SpeedyMatrixDtype} data type */
  187. this._dtype = dtype;
  188. }
  189. /**
  190. * Number of rows
  191. * @returns {number}
  192. */
  193. get rows() {
  194. return this._rows;
  195. }
  196. /**
  197. * Number of columns
  198. * @returns {number}
  199. */
  200. get columns() {
  201. return this._columns;
  202. }
  203. /**
  204. * Data type
  205. * @returns {SpeedyMatrixDtype}
  206. */
  207. get dtype() {
  208. return this._dtype;
  209. }
  210. /**
  211. * Default data type
  212. * @returns {SpeedyMatrixDtype}
  213. */
  214. static get DEFAULT_DTYPE() {
  215. return 'float32';
  216. }
  217. /**
  218. * Buffer types
  219. * @returns {Dtype2BufferType}
  220. */
  221. static get BUFFER_TYPE() {
  222. return DTYPE_TO_BUFFER_TYPE;
  223. }
  224. /**
  225. * Matrix addition
  226. * @param {SpeedyMatrixExpr} expr
  227. * @returns {SpeedyMatrixExpr}
  228. */
  229. plus(expr) {
  230. return new SpeedyMatrixAddExpr(this, expr);
  231. }
  232. /**
  233. * Matrix subtraction
  234. * @param {SpeedyMatrixExpr} expr
  235. * @returns {SpeedyMatrixExpr}
  236. */
  237. minus(expr) {
  238. return new SpeedyMatrixSubtractExpr(this, expr);
  239. }
  240. /**
  241. * Matrix multiplication
  242. * @param {SpeedyMatrixExpr|number} expr
  243. * @returns {SpeedyMatrixExpr}
  244. */
  245. times(expr) {
  246. if (typeof expr === 'number') return new SpeedyMatrixScaleExpr(this, expr);else return new SpeedyMatrixMultiplyExpr(this, expr);
  247. }
  248. /**
  249. * Matrix transposition
  250. * @returns {SpeedyMatrixExpr}
  251. */
  252. transpose() {
  253. return new SpeedyMatrixTransposeExpr(this);
  254. }
  255. /**
  256. * Matrix inversion
  257. * @returns {SpeedyMatrixExpr}
  258. */
  259. inverse() {
  260. return new SpeedyMatrixInvertExpr(this);
  261. }
  262. /**
  263. * Component-wise multiplication
  264. * @param {SpeedyMatrixExpr} expr
  265. * @returns {SpeedyMatrixExpr}
  266. */
  267. compMult(expr) {
  268. return new SpeedyMatrixCompMultExpr(this, expr);
  269. }
  270. /**
  271. * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
  272. * @param {SpeedyMatrixExpr} expr
  273. * @returns {SpeedyMatrixExpr}
  274. */
  275. ldiv(expr) {
  276. return new SpeedyMatrixLdivExpr(this, expr);
  277. }
  278. /**
  279. * Returns a human-readable string representation of the matrix expression
  280. * @returns {string}
  281. */
  282. toString() {
  283. return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
  284. }
  285. /**
  286. * Evaluate this expression
  287. * @abstract
  288. * @param {WebAssembly.Instance} wasm
  289. * @param {SpeedyMatrixWASMMemory} memory
  290. * @returns {SpeedyMatrix}
  291. */
  292. _evaluate(wasm, memory) {
  293. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  294. }
  295. }
  296. const {
  297. SpeedyMatrix
  298. } = __nested_webpack_require_4248__(4188);
  299. /**
  300. * @abstract operation storing a temporary matrix
  301. */
  302. class SpeedyMatrixTempExpr extends SpeedyMatrixExpr {
  303. /**
  304. * Constructor
  305. * @param {number} rows
  306. * @param {number} columns
  307. * @param {SpeedyMatrixDtype} dtype
  308. */
  309. constructor(rows, columns, dtype) {
  310. super(rows, columns, dtype);
  311. /** @type {SpeedyMatrix} holds the results of a computation */
  312. this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
  313. }
  314. }
  315. /**
  316. * @abstract unary operation
  317. */
  318. class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr {
  319. /**
  320. * Constructor
  321. * @param {number} rows rows of the output matrix
  322. * @param {number} columns columns of the output matrix
  323. * @param {SpeedyMatrixExpr} operand
  324. */
  325. constructor(rows, columns, operand) {
  326. super(rows, columns, operand.dtype);
  327. /** @type {SpeedyMatrixExpr} operand */
  328. this._operand = operand;
  329. }
  330. /**
  331. * Evaluate this expression
  332. * @param {WebAssembly.Instance} wasm
  333. * @param {SpeedyMatrixWASMMemory} memory
  334. * @returns {SpeedyMatrix}
  335. */
  336. _evaluate(wasm, memory) {
  337. const operand = this._operand._evaluate(wasm, memory);
  338. const result = this._tempMatrix;
  339. // allocate matrices
  340. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  341. const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
  342. // copy operand to WASM memory
  343. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
  344. // run the WASM routine
  345. this._compute(wasm, memory, resultptr, operandptr);
  346. // copy result from WASM memory
  347. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  348. // deallocate matrices
  349. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
  350. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  351. // done!
  352. return result;
  353. }
  354. /**
  355. * Compute the result of this operation
  356. * @abstract
  357. * @param {WebAssembly.Instance} wasm
  358. * @param {SpeedyMatrixWASMMemory} memory
  359. * @param {number} resultptr pointer to Mat32
  360. * @param {number} operandptr pointer to Mat32
  361. */
  362. _compute(wasm, memory, resultptr, operandptr) {
  363. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  364. }
  365. }
  366. /**
  367. * @abstract binary operation
  368. */
  369. class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr {
  370. /**
  371. * Constructor
  372. * @param {number} rows rows of the output matrix
  373. * @param {number} columns columns of the output matrix
  374. * @param {SpeedyMatrixExpr} left left operand
  375. * @param {SpeedyMatrixExpr} right right operand
  376. */
  377. constructor(rows, columns, left, right) {
  378. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
  379. super(rows, columns, left.dtype);
  380. /** @type {SpeedyMatrixExpr} left operand */
  381. this._left = left;
  382. /** @type {SpeedyMatrixExpr} right operand */
  383. this._right = right;
  384. }
  385. /**
  386. * Evaluate this expression
  387. * @param {WebAssembly.Instance} wasm
  388. * @param {SpeedyMatrixWASMMemory} memory
  389. * @returns {SpeedyMatrix}
  390. */
  391. _evaluate(wasm, memory) {
  392. const left = this._left._evaluate(wasm, memory);
  393. const right = this._right._evaluate(wasm, memory);
  394. const result = this._tempMatrix;
  395. // allocate matrices
  396. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  397. const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
  398. const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
  399. // copy input matrices to WASM memory
  400. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
  401. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
  402. // run the WASM routine
  403. this._compute(wasm, memory, resultptr, leftptr, rightptr);
  404. // copy output matrix from WASM memory
  405. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  406. // deallocate matrices
  407. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
  408. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
  409. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  410. // done!
  411. return result;
  412. }
  413. /**
  414. * Compute the result of this operation
  415. * @abstract
  416. * @param {WebAssembly.Instance} wasm
  417. * @param {SpeedyMatrixWASMMemory} memory
  418. * @param {number} resultptr pointer to Mat32
  419. * @param {number} leftptr pointer to Mat32
  420. * @param {number} rightptr pointer to Mat32
  421. */
  422. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  423. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  424. }
  425. }
  426. /**
  427. * Transpose matrix
  428. */
  429. class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr {
  430. /**
  431. * Constructor
  432. * @param {SpeedyMatrixExpr} operand
  433. */
  434. constructor(operand) {
  435. super(operand.columns, operand.rows, operand);
  436. }
  437. /**
  438. * Compute result = operand^T
  439. * @param {WebAssembly.Instance} wasm
  440. * @param {SpeedyMatrixWASMMemory} memory
  441. * @param {number} resultptr pointer to Mat32
  442. * @param {number} operandptr pointer to Mat32
  443. */
  444. _compute(wasm, memory, resultptr, operandptr) {
  445. wasm.exports.Mat32_transpose(resultptr, operandptr);
  446. }
  447. }
  448. /**
  449. * Invert square matrix
  450. */
  451. class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr {
  452. /**
  453. * Constructor
  454. * @param {SpeedyMatrixExpr} operand
  455. */
  456. constructor(operand) {
  457. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
  458. super(operand.rows, operand.columns, operand);
  459. /** @type {number} size of the matrix */
  460. this._size = operand.rows;
  461. }
  462. /**
  463. * Compute result = operand ^ (-1)
  464. * @param {WebAssembly.Instance} wasm
  465. * @param {SpeedyMatrixWASMMemory} memory
  466. * @param {number} resultptr pointer to Mat32
  467. * @param {number} operandptr pointer to Mat32
  468. */
  469. _compute(wasm, memory, resultptr, operandptr) {
  470. switch (this._size) {
  471. case 0:
  472. break;
  473. case 1:
  474. wasm.exports.Mat32_inverse1(resultptr, operandptr);
  475. break;
  476. case 2:
  477. wasm.exports.Mat32_inverse2(resultptr, operandptr);
  478. break;
  479. case 3:
  480. wasm.exports.Mat32_inverse3(resultptr, operandptr);
  481. break;
  482. default:
  483. wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
  484. break;
  485. }
  486. }
  487. }
  488. /**
  489. * Multiply matrix by a scalar value
  490. */
  491. class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr {
  492. /**
  493. * Constructor
  494. * @param {SpeedyMatrixExpr} operand
  495. * @param {number} scalar
  496. */
  497. constructor(operand, scalar) {
  498. super(operand.rows, operand.columns, operand);
  499. /** @type {number} scalar value */
  500. this._scalar = +scalar;
  501. }
  502. /**
  503. * Compute result = scalar * operand
  504. * @param {WebAssembly.Instance} wasm
  505. * @param {SpeedyMatrixWASMMemory} memory
  506. * @param {number} resultptr pointer to Mat32
  507. * @param {number} operandptr pointer to Mat32
  508. */
  509. _compute(wasm, memory, resultptr, operandptr) {
  510. wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
  511. }
  512. }
  513. /**
  514. * Matrix addition
  515. */
  516. class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr {
  517. /**
  518. * Constructor
  519. * @param {SpeedyMatrixExpr} left left operand
  520. * @param {SpeedyMatrixExpr} right right operand
  521. */
  522. constructor(left, right) {
  523. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  524. super(left.rows, left.columns, left, right);
  525. }
  526. /**
  527. * Compute result = left + right
  528. * @param {WebAssembly.Instance} wasm
  529. * @param {SpeedyMatrixWASMMemory} memory
  530. * @param {number} resultptr pointer to Mat32
  531. * @param {number} leftptr pointer to Mat32
  532. * @param {number} rightptr pointer to Mat32
  533. */
  534. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  535. wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
  536. }
  537. }
  538. /**
  539. * Matrix subtraction
  540. */
  541. class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr {
  542. /**
  543. * Constructor
  544. * @param {SpeedyMatrixExpr} left left operand
  545. * @param {SpeedyMatrixExpr} right right operand
  546. */
  547. constructor(left, right) {
  548. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  549. super(left.rows, left.columns, left, right);
  550. }
  551. /**
  552. * Compute result = left - right
  553. * @param {WebAssembly.Instance} wasm
  554. * @param {SpeedyMatrixWASMMemory} memory
  555. * @param {number} resultptr pointer to Mat32
  556. * @param {number} leftptr pointer to Mat32
  557. * @param {number} rightptr pointer to Mat32
  558. */
  559. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  560. wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
  561. }
  562. }
  563. /**
  564. * Matrix multiplication
  565. */
  566. class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr {
  567. /**
  568. * Constructor
  569. * @param {SpeedyMatrixExpr} left left operand
  570. * @param {SpeedyMatrixExpr} right right operand
  571. */
  572. constructor(left, right) {
  573. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
  574. super(left.rows, right.columns, left, right);
  575. }
  576. /**
  577. * Compute result = left * right
  578. * @param {WebAssembly.Instance} wasm
  579. * @param {SpeedyMatrixWASMMemory} memory
  580. * @param {number} resultptr pointer to Mat32
  581. * @param {number} leftptr pointer to Mat32
  582. * @param {number} rightptr pointer to Mat32
  583. */
  584. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  585. wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
  586. }
  587. }
  588. /**
  589. * Component-wise multiplication
  590. */
  591. class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr {
  592. /**
  593. * Constructor
  594. * @param {SpeedyMatrixExpr} left left operand
  595. * @param {SpeedyMatrixExpr} right right operand
  596. */
  597. constructor(left, right) {
  598. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  599. super(right.rows, right.columns, left, right);
  600. }
  601. /**
  602. * Compute result = left <compMult> right
  603. * @param {WebAssembly.Instance} wasm
  604. * @param {SpeedyMatrixWASMMemory} memory
  605. * @param {number} resultptr pointer to Mat32
  606. * @param {number} leftptr pointer to Mat32
  607. * @param {number} rightptr pointer to Mat32
  608. */
  609. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  610. wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
  611. }
  612. }
  613. /**
  614. * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
  615. */
  616. class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr {
  617. /**
  618. * Constructor
  619. * @param {SpeedyMatrixExpr} left left operand
  620. * @param {SpeedyMatrixExpr} right right operand
  621. */
  622. constructor(left, right) {
  623. const m = left.rows,
  624. n = left.columns;
  625. // TODO right doesn't need to be a column vector
  626. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
  627. super(n, 1, left, right);
  628. }
  629. /**
  630. * Compute result = left \ right
  631. * @param {WebAssembly.Instance} wasm
  632. * @param {SpeedyMatrixWASMMemory} memory
  633. * @param {number} resultptr pointer to Mat32
  634. * @param {number} leftptr pointer to Mat32
  635. * @param {number} rightptr pointer to Mat32
  636. */
  637. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  638. wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
  639. }
  640. }
  641. /***/ }),
  642. /***/ 6465:
  643. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_21592__) => {
  644. "use strict";
  645. /* harmony export */ __nested_webpack_require_21592__.d(__nested_webpack_exports__, {
  646. /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
  647. /* harmony export */ });
  648. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_21592__(9192);
  649. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_21592__(8581);
  650. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_21592__(9037);
  651. /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_21592__(3816);
  652. /*
  653. * speedy-vision.js
  654. * GPU-accelerated Computer Vision for JavaScript
  655. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  656. *
  657. * Licensed under the Apache License, Version 2.0 (the "License");
  658. * you may not use this file except in compliance with the License.
  659. * You may obtain a copy of the License at
  660. *
  661. * http://www.apache.org/licenses/LICENSE-2.0
  662. *
  663. * Unless required by applicable law or agreed to in writing, software
  664. * distributed under the License is distributed on an "AS IS" BASIS,
  665. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  666. * See the License for the specific language governing permissions and
  667. * limitations under the License.
  668. *
  669. * speedy-matrix-wasm.js
  670. * WebAssembly bridge
  671. */
  672. /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  673. /**
  674. * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
  675. * @property {object} as
  676. * @property {WebAssembly.Memory} as.object
  677. * @property {Uint8Array} as.uint8
  678. * @property {Int32Array} as.int32
  679. * @property {Uint32Array} as.uint32
  680. * @property {Float32Array} as.float32
  681. * @property {Float64Array} as.float64
  682. */
  683. /**
  684. * @typedef {object} SpeedyMatrixWASMHandle
  685. * @property {WebAssembly.Instance} wasm
  686. * @property {SpeedyMatrixWASMMemory} memory
  687. * @property {WebAssembly.Module} module
  688. */
  689. /** @type {Uint8Array} WebAssembly binary */
  690. const WASM_BINARY = __nested_webpack_require_21592__(3575);
  691. /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
  692. let _instance = null;
  693. /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
  694. let _module = null;
  695. /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
  696. const _memory = (mem => ({
  697. as: {
  698. object: mem,
  699. uint8: new Uint8Array(mem.buffer),
  700. int32: new Int32Array(mem.buffer),
  701. uint32: new Uint32Array(mem.buffer),
  702. float32: new Float32Array(mem.buffer),
  703. float64: new Float64Array(mem.buffer)
  704. }
  705. }))(typeof WebAssembly === 'undefined' ? new Uint8Array(1024) :
  706. // use a filler
  707. new WebAssembly.Memory({
  708. initial: 16,
  709. // 1 MB
  710. maximum: 256
  711. }));
  712. /**
  713. * WebAssembly utilities
  714. */
  715. class SpeedyMatrixWASM {
  716. /**
  717. * Gets you the WASM instance, augmented memory & module
  718. * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
  719. */
  720. static ready() {
  721. // Check if WebAssembly is supported
  722. if (typeof WebAssembly === 'undefined') return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM('This application requires WebAssembly. Please update your system.'));
  723. // Endianness check
  724. if (!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN) return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`));
  725. // Get the WASM instance
  726. return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
  727. SpeedyMatrixWASM._ready(resolve, reject);
  728. });
  729. }
  730. /**
  731. * Synchronously gets you the WASM instance, augmented memory & module
  732. * @returns {SpeedyMatrixWASMHandle}
  733. */
  734. static get handle() {
  735. if (!_instance || !_module) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
  736. return {
  737. wasm: _instance,
  738. memory: _memory,
  739. module: _module
  740. };
  741. }
  742. /**
  743. * Gets you the WASM imports bound to a memory object
  744. * @param {SpeedyMatrixWASMMemory} memory
  745. * @returns {Object<string,Function>}
  746. */
  747. static imports(memory) {
  748. const obj = new SpeedyMatrixWASMImports(memory);
  749. return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype).filter(property => typeof obj[property] === 'function' && property !== 'constructor').reduce((imports, methodName) => (imports[methodName] = obj[methodName], imports), Object.create(null));
  750. }
  751. /**
  752. * Allocate a Mat32 in WebAssembly memory without copying any data
  753. * @param {WebAssembly.Instance} wasm
  754. * @param {SpeedyMatrixWASMMemory} memory
  755. * @param {SpeedyMatrix} matrix
  756. * @returns {number} pointer to the new Mat32
  757. */
  758. static allocateMat32(wasm, memory, matrix) {
  759. const dataptr = wasm.exports.malloc(matrix.data.byteLength);
  760. const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
  761. return matptr;
  762. }
  763. /**
  764. * Deallocate a Mat32 in WebAssembly
  765. * @param {WebAssembly.Instance} wasm
  766. * @param {SpeedyMatrixWASMMemory} memory
  767. * @param {number} matptr pointer to the allocated Mat32
  768. * @returns {number} NULL
  769. */
  770. static deallocateMat32(wasm, memory, matptr) {
  771. const dataptr = wasm.exports.Mat32_data(matptr);
  772. wasm.exports.free(matptr);
  773. wasm.exports.free(dataptr);
  774. return 0;
  775. }
  776. /**
  777. * Copy the data of a matrix to a WebAssembly Mat32
  778. * @param {WebAssembly.Instance} wasm
  779. * @param {SpeedyMatrixWASMMemory} memory
  780. * @param {number} matptr pointer to a Mat32
  781. * @param {SpeedyMatrix} matrix
  782. * @returns {number} matptr
  783. */
  784. static copyToMat32(wasm, memory, matptr, matrix) {
  785. // We assume the following:
  786. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  787. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  788. // 3. the data type is float32
  789. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  790. //matrix.dtype === 'float32' &&
  791. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  792. const dataptr = wasm.exports.Mat32_data(matptr);
  793. memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
  794. return matptr;
  795. }
  796. /**
  797. * Copy the data of a WebAssembly Mat32 to a matrix
  798. * @param {WebAssembly.Instance} wasm
  799. * @param {SpeedyMatrixWASMMemory} memory
  800. * @param {number} matptr pointer to a Mat32
  801. * @param {SpeedyMatrix} matrix
  802. * @returns {number} matptr
  803. */
  804. static copyFromMat32(wasm, memory, matptr, matrix) {
  805. // We assume the following:
  806. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  807. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  808. // 3. the data type is float32
  809. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  810. //matrix.dtype === 'float32' &&
  811. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  812. const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
  813. for (let offset = matrix.data.length - 1; offset >= 0; offset--) matrix.data[offset] = memory.as.float32[base + offset];
  814. return matptr;
  815. }
  816. /**
  817. * Polls the WebAssembly instance until it's ready
  818. * @param {function(SpeedyMatrixWASMHandle): void} resolve
  819. * @param {function(Error): void} reject
  820. * @param {number} [counter]
  821. */
  822. static _ready(resolve, reject, counter = 1000) {
  823. if (_instance !== null && _module !== null) resolve({
  824. wasm: _instance,
  825. memory: _memory,
  826. module: _module
  827. });else if (counter <= 0) reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));else setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
  828. }
  829. }
  830. /**
  831. * Methods called from WASM
  832. */
  833. class SpeedyMatrixWASMImports {
  834. /**
  835. * Constructor
  836. * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
  837. */
  838. constructor(memory) {
  839. // find all methods of this object
  840. const methodNames = Object.getOwnPropertyNames(this.constructor.prototype).filter(property => typeof this[property] === 'function').filter(property => property !== 'constructor');
  841. // bind all methods to this object
  842. methodNames.forEach(methodName => {
  843. this[methodName] = this[methodName].bind(this);
  844. });
  845. /** @type {SpeedyMatrixWASMMemory} WASM memory */
  846. this.memory = memory;
  847. /** @type {CStringUtils} utilities related to C strings */
  848. this.cstring = new CStringUtils(memory);
  849. // done!
  850. return Object.freeze(this);
  851. }
  852. /**
  853. * Prints a message
  854. * @param {number} ptr pointer to char
  855. */
  856. print(ptr) {
  857. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
  858. }
  859. /**
  860. * Throws an error
  861. * @param {number} ptr pointer to char
  862. */
  863. fatal(ptr) {
  864. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
  865. }
  866. /**
  867. * Fills a memory segment with a byte
  868. * @param {number} value byte
  869. * @param {number} start memory address, inclusive
  870. * @param {number} end memory address greater than start, exclusive
  871. */
  872. bytefill(value, start, end) {
  873. this.memory.as.uint8.fill(value, start, end);
  874. }
  875. /**
  876. * Copy a memory segment to another segment
  877. * @param {number} target memory address, where we'll start writing
  878. * @param {number} start memory address, where we'll start copying (inclusive)
  879. * @param {number} end memory address, where we'll end the copy (exclusive)
  880. */
  881. copyWithin(target, start, end) {
  882. this.memory.as.uint8.copyWithin(target, start, end);
  883. }
  884. }
  885. /**
  886. * Utilities related to C strings
  887. */
  888. class CStringUtils {
  889. /**
  890. * Constructor
  891. * @param {SpeedyMatrixWASMMemory} memory
  892. */
  893. constructor(memory) {
  894. /** @type {TextDecoder} */
  895. this._decoder = new TextDecoder('utf-8');
  896. /** @type {SpeedyMatrixWASMMemory} */
  897. this._memory = memory;
  898. }
  899. /**
  900. * Convert a C string to a JavaScript string
  901. * @param {number} ptr pointer to char
  902. * @returns {string}
  903. */
  904. get(ptr) {
  905. const byte = this._memory.as.uint8;
  906. const size = this._memory.as.uint8.byteLength;
  907. let p = ptr;
  908. while (p < size && 0 !== byte[p]) ++p;
  909. return this._decoder.decode(byte.subarray(ptr, p));
  910. }
  911. }
  912. /**
  913. * WebAssembly loader
  914. * @param {SpeedyMatrixWASMMemory} memory
  915. */
  916. (function loadWASM(memory) {
  917. const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
  918. // Skip if WebAssembly is unsupported
  919. if (typeof WebAssembly === 'undefined') return;
  920. // Load the WASM binary
  921. _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY).then(data => base64decode(data)).then(bytes => WebAssembly.instantiate(bytes, {
  922. env: Object.assign({
  923. memory: memory.as.object
  924. }, SpeedyMatrixWASM.imports(memory))
  925. })).then(wasm => {
  926. _instance = wasm.instance;
  927. _module = wasm.module;
  928. wasm.instance.exports.srand(Date.now() * 0.001 & 0xffffffff); // srand(time(NULL))
  929. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
  930. }).catch(err => {
  931. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
  932. });
  933. })(_memory);
  934. /***/ }),
  935. /***/ 4188:
  936. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_33268__) => {
  937. "use strict";
  938. __nested_webpack_require_33268__.r(__nested_webpack_exports__);
  939. /* harmony export */ __nested_webpack_require_33268__.d(__nested_webpack_exports__, {
  940. /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
  941. /* harmony export */ });
  942. /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_33268__(6306);
  943. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_33268__(6465);
  944. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_33268__(9192);
  945. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_33268__(9037);
  946. /*
  947. * speedy-vision.js
  948. * GPU-accelerated Computer Vision for JavaScript
  949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  950. *
  951. * Licensed under the Apache License, Version 2.0 (the "License");
  952. * you may not use this file except in compliance with the License.
  953. * You may obtain a copy of the License at
  954. *
  955. * http://www.apache.org/licenses/LICENSE-2.0
  956. *
  957. * Unless required by applicable law or agreed to in writing, software
  958. * distributed under the License is distributed on an "AS IS" BASIS,
  959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  960. * See the License for the specific language governing permissions and
  961. * limitations under the License.
  962. *
  963. * speedy-matrix.js
  964. * Matrix class
  965. */
  966. /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
  967. /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
  968. /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
  969. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  970. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
  971. /**
  972. * Matrix class
  973. */
  974. class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r {
  975. /**
  976. * @private
  977. *
  978. * Low-level constructor
  979. * @param {number} rows number of rows
  980. * @param {number} columns number of columns
  981. * @param {number} step0 step size between two consecutive elements (e.g., 1)
  982. * @param {number} step1 step size between two consecutive columns (e.g., rows)
  983. * @param {SpeedyMatrixBufferType} data entries in column-major format
  984. */
  985. constructor(rows, columns, step0, step1, data) {
  986. super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
  987. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
  988. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
  989. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.length + rows * columns === 0 ||
  990. // empty matrix and empty buffer, or
  991. data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
  992. );
  993. /** @type {number} step size between two consecutive elements */
  994. this._step0 = step0 | 0;
  995. /** @type {number} step size between two consecutive columns */
  996. this._step1 = step1 | 0;
  997. /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
  998. this._data = data;
  999. }
  1000. /**
  1001. * Create a new matrix with the specified size and entries
  1002. * @param {number} rows number of rows
  1003. * @param {number} columns number of columns
  1004. * @param {number[]} entries in column-major format
  1005. * @param {SpeedyMatrixDtype} [dtype] data type
  1006. * @returns {SpeedyMatrix}
  1007. */
  1008. static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1009. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1010. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
  1011. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1012. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
  1013. }
  1014. /**
  1015. * Create a new matrix filled with zeros with the specified size
  1016. * @param {number} rows number of rows
  1017. * @param {number} [columns] number of columns
  1018. * @param {SpeedyMatrixDtype} [dtype] data type
  1019. * @returns {SpeedyMatrix}
  1020. */
  1021. static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1022. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1023. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1024. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
  1025. }
  1026. /**
  1027. * Create a new matrix filled with ones with the specified size
  1028. * @param {number} rows number of rows
  1029. * @param {number} [columns] number of columns
  1030. * @param {SpeedyMatrixDtype} [dtype] data type
  1031. * @returns {SpeedyMatrix}
  1032. */
  1033. static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1034. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1035. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1036. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
  1037. }
  1038. /**
  1039. * Create a new identity matrix with the specified size
  1040. * @param {number} rows number of rows
  1041. * @param {number} [columns] number of columns
  1042. * @param {SpeedyMatrixDtype} [dtype] data type
  1043. * @returns {SpeedyMatrix}
  1044. */
  1045. static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1046. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1047. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1048. const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
  1049. for (let j = Math.min(rows, columns) - 1; j >= 0; j--) data[j * rows + j] = 1;
  1050. return new SpeedyMatrix(rows, columns, 1, rows, data);
  1051. }
  1052. /**
  1053. * Evaluate an expression synchronously and store the result in a new matrix
  1054. * @param {SpeedyMatrixExpr} expr matrix expression
  1055. * @returns {SpeedyMatrix}
  1056. */
  1057. static From(expr) {
  1058. return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
  1059. }
  1060. /**
  1061. * Returns a promise that resolves immediately if the WebAssembly routines
  1062. * are ready to be used, or as soon as they do become ready
  1063. * @returns {SpeedyPromise<void>}
  1064. */
  1065. static ready() {
  1066. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void 0);
  1067. }
  1068. /**
  1069. * Get the underlying buffer
  1070. * @returns {SpeedyMatrixBufferType}
  1071. */
  1072. get data() {
  1073. return this._data;
  1074. }
  1075. /**
  1076. * Row-step
  1077. * @returns {number} defaults to 1
  1078. */
  1079. get step0() {
  1080. return this._step0;
  1081. }
  1082. /**
  1083. * Column-step
  1084. * @returns {number} defaults to this.rows
  1085. */
  1086. get step1() {
  1087. return this._step1;
  1088. }
  1089. /**
  1090. * Extract a block from this matrix. Use a shared underlying buffer
  1091. * @param {number} firstRow
  1092. * @param {number} lastRow
  1093. * @param {number} firstColumn
  1094. * @param {number} lastColumn
  1095. * @returns {SpeedyMatrix}
  1096. */
  1097. block(firstRow, lastRow, firstColumn, lastColumn) {
  1098. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(firstRow <= lastRow && firstColumn <= lastColumn, `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`);
  1099. // ensure that the indices are within bounds
  1100. firstRow = Math.max(firstRow, 0);
  1101. lastRow = Math.min(lastRow, this._rows - 1);
  1102. firstColumn = Math.max(firstColumn, 0);
  1103. lastColumn = Math.min(lastColumn, this._columns - 1);
  1104. // compute the dimensions of the new submatrix
  1105. const rows = lastRow - firstRow + 1;
  1106. const columns = lastColumn - firstColumn + 1;
  1107. // obtain the relevant portion of the data
  1108. const step0 = this._step0,
  1109. step1 = this._step1;
  1110. const begin = firstRow * step0 + firstColumn * step1; // inclusive
  1111. const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
  1112. // create new matrix
  1113. return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
  1114. }
  1115. /**
  1116. * Extract a row from this matrix
  1117. * @param {number} index 0-based
  1118. * @returns {SpeedyMatrix}
  1119. */
  1120. row(index) {
  1121. return this.block(index, index, 0, this._columns - 1);
  1122. }
  1123. /**
  1124. * Extract a column from this matrix
  1125. * @param {number} index 0-based
  1126. * @returns {SpeedyMatrix}
  1127. */
  1128. column(index) {
  1129. return this.block(0, this._rows - 1, index, index);
  1130. }
  1131. /**
  1132. * Extract the main diagonal from this matrix
  1133. * @returns {SpeedyMatrix} as a column-vector
  1134. */
  1135. diagonal() {
  1136. const diagsize = Math.min(this._rows, this._columns);
  1137. // compute the dimensions of the new submatrix
  1138. const rows = diagsize; // make it a column vector
  1139. const columns = 1;
  1140. // obtain the relevant portion of the data
  1141. const diagstep = this._step0 + this._step1; // jump a row and a column
  1142. const begin = 0; // inclusive
  1143. const end = 1 + (diagsize - 1) * diagstep; // exclusive
  1144. // create new matrix
  1145. return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
  1146. }
  1147. /**
  1148. * Read a single entry of this matrix
  1149. * @param {number} row 0-based index
  1150. * @param {number} column 0-based index
  1151. * @returns {number}
  1152. */
  1153. at(row, column) {
  1154. if (row >= 0 && row < this._rows && column >= 0 && column < this._columns) return this._data[this._step0 * row + this._step1 * column];else return Number.NaN;
  1155. }
  1156. /**
  1157. * Read the entries of the matrix in column-major format
  1158. * @returns {number[]}
  1159. */
  1160. read() {
  1161. const entries = new Array(this._rows * this._columns);
  1162. const step0 = this._step0,
  1163. step1 = this._step1;
  1164. let i = 0;
  1165. for (let column = 0; column < this._columns; column++) {
  1166. for (let row = 0; row < this._rows; row++) entries[i++] = this._data[row * step0 + column * step1];
  1167. }
  1168. return entries;
  1169. }
  1170. /**
  1171. * Returns a human-readable string representation of the matrix
  1172. * @returns {string}
  1173. */
  1174. toString() {
  1175. const DECIMALS = 5;
  1176. const rows = this.rows,
  1177. columns = this.columns;
  1178. const entries = this.read();
  1179. const mat = /** @type {number[][]} */new Array(rows);
  1180. for (let i = 0; i < rows; i++) {
  1181. mat[i] = new Array(columns);
  1182. for (let j = 0; j < columns; j++) mat[i][j] = entries[j * rows + i];
  1183. }
  1184. const fix = x => x.toFixed(DECIMALS);
  1185. const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
  1186. const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
  1187. return str;
  1188. }
  1189. /**
  1190. * Set the contents of this matrix to the result of an expression
  1191. * @param {SpeedyMatrixExpr} expr matrix expression
  1192. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1193. */
  1194. setTo(expr) {
  1195. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
  1196. // TODO: add support for WebWorkers
  1197. return this.setToSync(expr);
  1198. });
  1199. }
  1200. /**
  1201. * Synchronously set the contents of this matrix to the result of an expression
  1202. * @param {SpeedyMatrixExpr} expr matrix expression
  1203. * @returns {SpeedyMatrix} this
  1204. */
  1205. setToSync(expr) {
  1206. const {
  1207. wasm,
  1208. memory
  1209. } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
  1210. // evaluate the expression
  1211. const result = expr._evaluate(wasm, memory);
  1212. /*
  1213. // shallow copy the results to this matrix
  1214. // limitation: can't handle blocks properly
  1215. // (a tree-like structure could be useful)
  1216. this._rows = result.rows;
  1217. this._columns = result.columns;
  1218. //this._dtype = result.dtype;
  1219. this._data = result.data;
  1220. this._step0 = result.step0;
  1221. this._step1 = result.step1;
  1222. */
  1223. // validate shape
  1224. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype, `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`);
  1225. // deep copy
  1226. const step0 = this._step0,
  1227. step1 = this._step1,
  1228. rstep0 = result._step0,
  1229. rstep1 = result._step1;
  1230. if (step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
  1231. // fast copy
  1232. this._data.set(result._data);
  1233. } else {
  1234. // copy each element
  1235. for (let column = this._columns - 1; column >= 0; column--) {
  1236. for (let row = this._rows - 1; row >= 0; row--) this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
  1237. }
  1238. }
  1239. // done!
  1240. return this;
  1241. }
  1242. /**
  1243. * Fill this matrix with a scalar value
  1244. * @param {number} value
  1245. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1246. */
  1247. fill(value) {
  1248. this.fillSync(value);
  1249. return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
  1250. }
  1251. /**
  1252. * Synchronously fill this matrix with a scalar value
  1253. * @param {number} value
  1254. * @returns {SpeedyMatrix} this
  1255. */
  1256. fillSync(value) {
  1257. value = +value;
  1258. if (this._rows * this._columns === this._data.length) {
  1259. this._data.fill(value);
  1260. return this;
  1261. }
  1262. for (let column = 0; column < this._columns; column++) {
  1263. for (let row = 0; row < this._rows; row++) {
  1264. this._data[row * this._step0 + column * this._step1] = value;
  1265. }
  1266. }
  1267. return this;
  1268. }
  1269. /**
  1270. * Evaluate this expression
  1271. * @param {WebAssembly.Instance} wasm
  1272. * @param {SpeedyMatrixWASMMemory} memory
  1273. * @returns {SpeedyMatrix}
  1274. */
  1275. _evaluate(wasm, memory) {
  1276. return this;
  1277. }
  1278. }
  1279. /***/ }),
  1280. /***/ 6634:
  1281. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_48547__) => {
  1282. "use strict";
  1283. /* harmony export */ __nested_webpack_require_48547__.d(__nested_webpack_exports__, {
  1284. /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
  1285. /* harmony export */ });
  1286. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_48547__(8581);
  1287. /*
  1288. * speedy-vision.js
  1289. * GPU-accelerated Computer Vision for JavaScript
  1290. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1291. *
  1292. * Licensed under the Apache License, Version 2.0 (the "License");
  1293. * you may not use this file except in compliance with the License.
  1294. * You may obtain a copy of the License at
  1295. *
  1296. * http://www.apache.org/licenses/LICENSE-2.0
  1297. *
  1298. * Unless required by applicable law or agreed to in writing, software
  1299. * distributed under the License is distributed on an "AS IS" BASIS,
  1300. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1301. * See the License for the specific language governing permissions and
  1302. * limitations under the License.
  1303. *
  1304. * speedy-namespace.js
  1305. * Symbolizes a namespace
  1306. */
  1307. /**
  1308. * An abstract namespace
  1309. * @abstract
  1310. */
  1311. class SpeedyNamespace {
  1312. /**
  1313. * Namespaces can't be instantiated.
  1314. * Only static methods are allowed.
  1315. * @abstract
  1316. * @throws SpeedyError
  1317. */
  1318. constructor() {
  1319. // only static methods are allowed
  1320. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
  1321. }
  1322. }
  1323. /***/ }),
  1324. /***/ 9192:
  1325. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_50059__) => {
  1326. "use strict";
  1327. /* harmony export */ __nested_webpack_require_50059__.d(__nested_webpack_exports__, {
  1328. /* harmony export */ i: () => (/* binding */ SpeedyPromise)
  1329. /* harmony export */ });
  1330. /*
  1331. * speedy-vision.js
  1332. * GPU-accelerated Computer Vision for JavaScript
  1333. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1334. *
  1335. * Licensed under the Apache License, Version 2.0 (the "License");
  1336. * you may not use this file except in compliance with the License.
  1337. * You may obtain a copy of the License at
  1338. *
  1339. * http://www.apache.org/licenses/LICENSE-2.0
  1340. *
  1341. * Unless required by applicable law or agreed to in writing, software
  1342. * distributed under the License is distributed on an "AS IS" BASIS,
  1343. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1344. * See the License for the specific language governing permissions and
  1345. * limitations under the License.
  1346. *
  1347. * speedy-promise.js
  1348. * Speedy Promises: a fast implementation of Promises
  1349. */
  1350. const PENDING = 0;
  1351. const FULFILLED = 1;
  1352. const REJECTED = 2;
  1353. const SUSPEND_ASYNC = 1;
  1354. const asap = typeof queueMicrotask !== 'undefined' && queueMicrotask ||
  1355. // browsers
  1356. typeof process !== 'undefined' && process.nextTick || (
  1357. // node.js
  1358. f => Promise.resolve().then(() => f())); // most compatible
  1359. /**
  1360. * SpeedyPromise: Super Fast Promises. SpeedyPromises can
  1361. * interoperate with ES6 Promises. This implementation is
  1362. * based on the Promises/A+ specification.
  1363. * @template T
  1364. */
  1365. class SpeedyPromise {
  1366. /**
  1367. * Constructor
  1368. * @param {function(function(T=): void, function(Error): void): void} callback
  1369. */
  1370. constructor(callback) {
  1371. this._state = PENDING;
  1372. this._value = undefined;
  1373. this._onFulfillment = null;
  1374. this._onRejection = null;
  1375. this._children = 0;
  1376. this[0] = this;
  1377. this._parent = undefined;
  1378. this._flags = 0;
  1379. this._fulfill = this._fulfill.bind(this);
  1380. this._reject = this._reject.bind(this);
  1381. this._resolve = this._resolve.bind(this);
  1382. this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
  1383. callback(this._fulfill, this._reject);
  1384. }
  1385. /**
  1386. * Setup handlers
  1387. * @template U, V=never
  1388. * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
  1389. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1390. * @returns {SpeedyPromise<U>}
  1391. */
  1392. then(onFulfillment, onRejection = null) {
  1393. const child = new SpeedyPromise(this._nop);
  1394. child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
  1395. child._onRejection = typeof onRejection === 'function' && onRejection;
  1396. child._parent = this;
  1397. this[this._children++] = child; // attach child
  1398. this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
  1399. this._notify();
  1400. return child;
  1401. }
  1402. /**
  1403. * Setup rejection handler
  1404. * @template U, V=never
  1405. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1406. * @returns {SpeedyPromise<V>}
  1407. */
  1408. catch(onRejection) {
  1409. return this.then(null, onRejection);
  1410. }
  1411. /**
  1412. * Execute a callback when the promise is settled
  1413. * (i.e., fulfilled or rejected)
  1414. * @param {function(): void} onFinally
  1415. * @returns {SpeedyPromise<T>}
  1416. */
  1417. finally(onFinally) {
  1418. const fn = val => {
  1419. onFinally();
  1420. return val;
  1421. };
  1422. return this.then(fn, fn);
  1423. }
  1424. /**
  1425. * Start the computation immediately, synchronously.
  1426. * Can't afford to spend any time at all waiting for micro-tasks, etc.
  1427. * @returns {SpeedyPromise<T>} this
  1428. */
  1429. turbocharge() {
  1430. let my = this;
  1431. // suspend the async behavior
  1432. this._flags |= SUSPEND_ASYNC;
  1433. while (my._parent !== undefined) {
  1434. my = my._parent;
  1435. my._flags |= SUSPEND_ASYNC;
  1436. }
  1437. // notify the children of the root
  1438. my._notify(); // will be synchronous
  1439. // return this SpeedyPromise
  1440. return this;
  1441. }
  1442. /**
  1443. * Convert to string
  1444. * @returns {string}
  1445. */
  1446. toString() {
  1447. switch (this._state) {
  1448. case PENDING:
  1449. return `SpeedyPromise { <pending> }`;
  1450. case FULFILLED:
  1451. return `SpeedyPromise { <fulfilled> ${this._value} }`;
  1452. case REJECTED:
  1453. return `SpeedyPromise { <rejected> ${this._value} }`;
  1454. default:
  1455. return '';
  1456. }
  1457. }
  1458. /**
  1459. * Symbol.toStringTag
  1460. * @returns {string}
  1461. */
  1462. get [Symbol.toStringTag]() {
  1463. return 'SpeedyPromise';
  1464. }
  1465. /**
  1466. * Creates a resolved SpeedyPromise
  1467. * @template U
  1468. * @param {U} [value]
  1469. * @returns {SpeedyPromise<U>}
  1470. */
  1471. static resolve(value) {
  1472. const promise = new SpeedyPromise(this._snop);
  1473. if (typeof value === 'object' && value !== null && 'then' in value || typeof value === 'function' && 'then' in value) {
  1474. // resolve asynchronously
  1475. promise._resolve(value);
  1476. } else {
  1477. // fulfill synchronously
  1478. promise._value = value;
  1479. promise._state = FULFILLED;
  1480. }
  1481. return promise;
  1482. }
  1483. /**
  1484. * Creates a rejected SpeedyPromise
  1485. * @template U
  1486. * @param {Error} reason
  1487. * @returns {SpeedyPromise<U>}
  1488. */
  1489. static reject(reason) {
  1490. const promise = new SpeedyPromise(this._snop);
  1491. promise._value = reason;
  1492. promise._state = REJECTED;
  1493. return promise;
  1494. }
  1495. /**
  1496. * Returns a SpeedyPromise that resolves to an array
  1497. * containing the results of the input promises/values,
  1498. * in their given order. The returned SpeedyPromise will
  1499. * resolve if all input promises resolve, or reject if
  1500. * any input promise rejects.
  1501. * @template U
  1502. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1503. * @returns {SpeedyPromise<U[]>}
  1504. *
  1505. * FIXME iterables need not be all <U>
  1506. */
  1507. static all(iterable) {
  1508. return new SpeedyPromise((resolve, reject) => {
  1509. const input = [];
  1510. // get elements
  1511. for (const element of iterable) input.push(element);
  1512. // resolve synchronously if there are no elements
  1513. const length = input.length;
  1514. if (length == 0) {
  1515. resolve([]);
  1516. return;
  1517. }
  1518. // resolve asynchronously
  1519. let counter = length;
  1520. const output = new Array(length);
  1521. const partialResolve = i => val => {
  1522. output[i] = val;
  1523. if (0 == --counter) resolve(output);
  1524. };
  1525. for (let i = 0; i < length; i++) {
  1526. const element = input[i];
  1527. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(partialResolve(i), reject);else SpeedyPromise.resolve(element).then(partialResolve(i), reject);
  1528. }
  1529. });
  1530. }
  1531. /**
  1532. * Returns a promise that gets fulfilled or rejected as soon
  1533. * as the first promise in the iterable gets fulfilled or
  1534. * rejected (with its value/reason).
  1535. * @template U
  1536. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1537. * @returns {SpeedyPromise<U>}
  1538. */
  1539. static race(iterable) {
  1540. return new SpeedyPromise((resolve, reject) => {
  1541. const input = [];
  1542. // get elements
  1543. for (const element of iterable) input.push(element);
  1544. // if the iterable is empty, the promise
  1545. // will be pending forever...
  1546. // resolve asynchronously
  1547. const length = input.length;
  1548. for (let i = 0; i < length; i++) {
  1549. const element = input[i];
  1550. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(resolve, reject);else SpeedyPromise.resolve(element).then(resolve, reject);
  1551. }
  1552. });
  1553. }
  1554. /**
  1555. * Fulfill this promise with a value
  1556. * @param {T} value
  1557. */
  1558. _fulfill(value) {
  1559. this._setState(FULFILLED, value);
  1560. }
  1561. /**
  1562. * Reject this promise with a reason
  1563. * @param {Error} reason
  1564. */
  1565. _reject(reason) {
  1566. this._setState(REJECTED, reason);
  1567. }
  1568. /**
  1569. * Set the state and the value of this promise
  1570. * @param {number} state
  1571. * @param {T|Error} value
  1572. */
  1573. _setState(state, value) {
  1574. // the promise is already fulfilled or rejected
  1575. if (this._state != PENDING) return;
  1576. // set the new state
  1577. this._state = state;
  1578. this._value = value;
  1579. this._notify();
  1580. }
  1581. /**
  1582. * Notify my children that this promise is no
  1583. * longer pending. This is an async operation:
  1584. * my childen will be notified "as soon
  1585. * as possible" (it will be scheduled).
  1586. * We may force this to be synchronous, though
  1587. */
  1588. _notify() {
  1589. // nothing to do
  1590. if (this._state == PENDING) return;
  1591. // have we turbocharged this promise?
  1592. if (this._flags & SUSPEND_ASYNC) {
  1593. this._broadcast(); // execute synchronously
  1594. return;
  1595. }
  1596. // install a timer (default behavior)
  1597. asap(this._broadcastIfAsync);
  1598. }
  1599. /**
  1600. * Helper method
  1601. */
  1602. _broadcastIfAsync() {
  1603. // we may have installed a timer at some
  1604. // point, but turbocharged the promise later
  1605. if (!(this._flags & SUSPEND_ASYNC)) this._broadcast();
  1606. }
  1607. /**
  1608. * Tell my children that this promise
  1609. * is either fulfilled or rejected.
  1610. * This is a synchronous operation
  1611. */
  1612. _broadcast() {
  1613. const children = this._children;
  1614. const state = this._state;
  1615. if (state === FULFILLED) {
  1616. for (let i = 0; i < children; i++) {
  1617. const child = this[i];
  1618. const callback = child._onFulfillment;
  1619. try {
  1620. if (callback) {
  1621. if (callback !== child._nop) {
  1622. child._resolve(callback(this._value)); // promise resolution procedure
  1623. child._onFulfillment = child._nop; // will not be called again
  1624. }
  1625. } else child._fulfill(this._value);
  1626. } catch (e) {
  1627. child._reject(e);
  1628. }
  1629. }
  1630. } else if (state === REJECTED) {
  1631. for (let i = 0; i < children; i++) {
  1632. const child = this[i];
  1633. const callback = child._onRejection;
  1634. try {
  1635. if (callback) {
  1636. if (callback !== child._nop) {
  1637. child._resolve(callback(this._value)); // promise resolution procedure
  1638. child._onRejection = child._nop; // will not be called again
  1639. }
  1640. } else child._reject(this._value);
  1641. } catch (e) {
  1642. child._reject(e);
  1643. }
  1644. }
  1645. }
  1646. }
  1647. /**
  1648. * Promise Resolution Procedure
  1649. * based on the Promises/A+ spec
  1650. * @param {T} x
  1651. */
  1652. _resolve(x) {
  1653. if (typeof x !== 'object' && typeof x !== 'function' || x === null) {
  1654. // if(x !== Object(x))
  1655. this._fulfill(x);
  1656. return;
  1657. }
  1658. if (x === this) throw new TypeError(); // Circular reference
  1659. if (x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
  1660. x.then(this._resolve, this._reject);
  1661. return;
  1662. }
  1663. try {
  1664. const then = x.then;
  1665. if (typeof then === 'function') {
  1666. let resolve = this._resolve,
  1667. reject = this._reject;
  1668. try {
  1669. then.call(x, y => {
  1670. resolve(y);
  1671. resolve = reject = this._nop;
  1672. }, r => {
  1673. reject(r);
  1674. resolve = reject = this._nop;
  1675. });
  1676. } catch (e) {
  1677. if (resolve !== this._nop && reject !== this._nop) this._reject(e);
  1678. }
  1679. } else {
  1680. this._fulfill(x);
  1681. }
  1682. } catch (e) {
  1683. this._reject(e);
  1684. }
  1685. }
  1686. /**
  1687. * No-operation
  1688. */
  1689. _nop() {}
  1690. /**
  1691. * Static no-operation
  1692. */
  1693. static _snop() {}
  1694. }
  1695. //module.exports = { SpeedyPromise };
  1696. /*
  1697. // Uncomment to test performance with regular Promises
  1698. module.exports = { SpeedyPromise: Promise };
  1699. Promise.prototype.turbocharge = function() { return this };
  1700. */
  1701. /***/ }),
  1702. /***/ 9420:
  1703. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_61794__) => {
  1704. "use strict";
  1705. // EXPORTS
  1706. __nested_webpack_require_61794__.d(__nested_webpack_exports__, {
  1707. gx: () => (/* binding */ createShader),
  1708. bf: () => (/* binding */ importShader)
  1709. });
  1710. // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
  1711. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  1712. var speedy_gl = __nested_webpack_require_61794__(1001);
  1713. // EXTERNAL MODULE: ./src/utils/utils.js
  1714. var utils = __nested_webpack_require_61794__(9037);
  1715. // EXTERNAL MODULE: ./src/utils/types.js
  1716. var types = __nested_webpack_require_61794__(6049);
  1717. // EXTERNAL MODULE: ./src/utils/errors.js
  1718. var errors = __nested_webpack_require_61794__(8581);
  1719. ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
  1720. function _wrapRegExp() { _wrapRegExp = function (e, r) { return new BabelRegExp(e, void 0, r); }; var e = RegExp.prototype, r = new WeakMap(); function BabelRegExp(e, t, p) { var o = RegExp(e, t); return r.set(o, p || r.get(e)), _setPrototypeOf(o, BabelRegExp.prototype); } function buildGroups(e, t) { var p = r.get(t); return Object.keys(p).reduce(function (r, t) { var o = p[t]; if ("number" == typeof o) r[t] = e[o];else { for (var i = 0; void 0 === e[o[i]] && i + 1 < o.length;) i++; r[t] = e[o[i]]; } return r; }, Object.create(null)); } return _inherits(BabelRegExp, RegExp), BabelRegExp.prototype.exec = function (r) { var t = e.exec.call(this, r); if (t) { t.groups = buildGroups(t, this); var p = t.indices; p && (p.groups = buildGroups(p, this)); } return t; }, BabelRegExp.prototype[Symbol.replace] = function (t, p) { if ("string" == typeof p) { var o = r.get(this); return e[Symbol.replace].call(this, t, p.replace(/\$<([^>]+)>/g, function (e, r) { var t = o[r]; return "$" + (Array.isArray(t) ? t.join("$") : t); })); } if ("function" == typeof p) { var i = this; return e[Symbol.replace].call(this, t, function () { var e = arguments; return "object" != typeof e[e.length - 1] && (e = [].slice.call(e)).push(buildGroups(e, i)), p.apply(this, e); }); } return e[Symbol.replace].call(this, t, p); }, _wrapRegExp.apply(this, arguments); }
  1721. function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
  1722. function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
  1723. /*
  1724. * speedy-vision.js
  1725. * GPU-accelerated Computer Vision for JavaScript
  1726. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1727. *
  1728. * Licensed under the Apache License, Version 2.0 (the "License");
  1729. * you may not use this file except in compliance with the License.
  1730. * You may obtain a copy of the License at
  1731. *
  1732. * http://www.apache.org/licenses/LICENSE-2.0
  1733. *
  1734. * Unless required by applicable law or agreed to in writing, software
  1735. * distributed under the License is distributed on an "AS IS" BASIS,
  1736. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1737. * See the License for the specific language governing permissions and
  1738. * limitations under the License.
  1739. *
  1740. * shader-preprocessor.js
  1741. * Custom preprocessor for shaders
  1742. */
  1743. /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
  1744. /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
  1745. // Import numeric globals
  1746. const globals = __nested_webpack_require_61794__(3816);
  1747. const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */
  1748. Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce((obj, key) => (obj[key] = globals[key], obj), {});
  1749. /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
  1750. const basicConstants = Object.freeze(Object.assign(Object.assign({}, numericGlobals), {}, {
  1751. // fragment shader
  1752. 'FS_USE_CUSTOM_PRECISION': 0,
  1753. // use default precision settings
  1754. 'FS_OUTPUT_TYPE': 0,
  1755. // normalized RGBA
  1756. // colors
  1757. 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
  1758. 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
  1759. 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
  1760. 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA
  1761. }));
  1762. /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
  1763. const platformConstants = (platform, glRenderer) => Object.freeze({
  1764. 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0,
  1765. // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
  1766. 'APPLE_GPU': /Apple/.test(glRenderer) | 0,
  1767. // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
  1768. 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0 // Intel[(R)] ... [HD] Graphics xyz ...
  1769. });
  1770. // Regular Expressions
  1771. const commentsRegex = [/\/\*(.|\s)*?\*\//g, /\/\/.*$/gm];
  1772. const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
  1773. const constantRegex = /@(\w+)@/g;
  1774. const unrollRegex = [/*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1775. counter: 2
  1776. }), /*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+=\s*(\x2D?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1777. counter: 2
  1778. })];
  1779. /**
  1780. * Custom preprocessor for the shaders
  1781. */
  1782. class ShaderPreprocessor {
  1783. /**
  1784. * Runs the preprocessor and generates GLSL code
  1785. * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
  1786. * @param {string} infix annotated GLSL code
  1787. * @param {string} [prefix]
  1788. * @param {string} [suffix]
  1789. * @returns {string} preprocessed GLSL code
  1790. */
  1791. static generateGLSL(defines, infix, prefix = null, suffix = null) {
  1792. //
  1793. // The preprocessor will remove comments from GLSL code,
  1794. // include requested GLSL files and import global constants
  1795. // defined for all shaders (see above)
  1796. //
  1797. const errors = []; // compile-time errors
  1798. const constants = generateConstants(defines);
  1799. const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
  1800. return unrollLoops(annotatedGLSL.replace(commentsRegex[0], '').replace(commentsRegex[1], '').replace(constantRegex, (_, name) => String(
  1801. // Replace preprocessor @CONSTANTS@ by their numeric values
  1802. constants.has(name) ? Number(constants.get(name)) : (errors.push(`Undefined constant ${name}`), 0))).replace(includeRegex, (_, filename) =>
  1803. // Included files may include other files.
  1804. // XXX no cycle detection!
  1805. ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))), defines) + errors.map(msg => `\n#error ${msg}\n`).join('');
  1806. }
  1807. }
  1808. /**
  1809. * Generate GLSL code based on the input arguments
  1810. * @param {ShaderPreprocessorConstants} defines
  1811. * @param {string} infix
  1812. * @param {string} [prefix]
  1813. * @param {string} [suffix]
  1814. * @returns {string} GLSL code
  1815. */
  1816. function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null) {
  1817. const parts = [];
  1818. if (prefix !== null) parts.push(prefix);
  1819. for (const [key, value] of defines) parts.push(`#define ${key} ${Number(value)}`);
  1820. parts.push(infix);
  1821. if (suffix !== null) parts.push(suffix);
  1822. return parts.join('\n');
  1823. }
  1824. /**
  1825. * Generate pre-processor constants. Constants provided by the
  1826. * user have higher priority than globally available constants.
  1827. * @param {ShaderPreprocessorConstants} defines user-provided
  1828. * @returns {ShaderPreprocessorConstants}
  1829. */
  1830. function generateConstants(defines) {
  1831. utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
  1832. const myConstants = /** @type {ShaderPreprocessorConstants} */new Map();
  1833. const globalConstants = Object.assign(Object.create(null), basicConstants, platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer));
  1834. // globally available constants have lower priority
  1835. for (const key in globalConstants) {
  1836. //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
  1837. myConstants.set(key, globalConstants[key]);
  1838. }
  1839. // user-defined constants have higher priority
  1840. for (const [key, value] of defines) myConstants.set(key, value);
  1841. // done!
  1842. return myConstants;
  1843. }
  1844. /**
  1845. * Reads a shader from the shaders/include/ folder
  1846. * @param {string} filename
  1847. * @returns {string}
  1848. */
  1849. function readfileSync(filename) {
  1850. if (String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/)) return __nested_webpack_require_61794__(5235)("./" + filename);
  1851. throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
  1852. }
  1853. /**
  1854. * Unroll for loops in our own preprocessor
  1855. * @param {string} code
  1856. * @param {ShaderPreprocessorConstants} defines
  1857. * @returns {string}
  1858. */
  1859. function unrollLoops(code, defines) {
  1860. //
  1861. // Currently, only integer for loops with positive step values
  1862. // can be unrolled. (TODO: negative step values?)
  1863. //
  1864. // The current implementation does not support curly braces
  1865. // inside unrolled loops. You may define macros to get around
  1866. // this, but do you actually need to unroll such loops?
  1867. //
  1868. // Loops that don't fit the supported pattern will crash
  1869. // the preprocessor if you try to unroll them.
  1870. //
  1871. const fn = unroll.bind(defines); // CRAZY!
  1872. const n = unrollRegex.length;
  1873. for (let i = 0; i < n; i++) code = code.replace(unrollRegex[i], fn);
  1874. return code;
  1875. }
  1876. /**
  1877. * Unroll a loop pattern (regexp)
  1878. * @param {string} match the matched for loop
  1879. * @param {string} type
  1880. * @param {string} counter
  1881. * @param {string} start
  1882. * @param {string} cmp
  1883. * @param {string} end
  1884. * @param {string} step
  1885. * @param {string} loopcode
  1886. * @returns {string} unrolled loop
  1887. */
  1888. function unroll(match, type, counter, start, cmp, end, step, loopcode) {
  1889. const defines = /** @type {ShaderPreprocessorConstants} */this;
  1890. // check if the loop limits are numeric constants or #defined numbers from the outside
  1891. const hasStart = Number.isFinite(+start) || defines.has(start);
  1892. const hasEnd = Number.isFinite(+end) || defines.has(end);
  1893. if (!hasStart || !hasEnd) {
  1894. if (defines.size > 0) throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);else return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
  1895. }
  1896. // parse and validate limits & step
  1897. let istart = defines.has(start) ? defines.get(start) : parseInt(start);
  1898. let iend = defines.has(end) ? defines.get(end) : parseInt(end);
  1899. let istep = step.length == 0 ? 1 : parseInt(step);
  1900. utils/* Utils */.A.assert(istart <= iend && istep > 0);
  1901. /*
  1902. // debug
  1903. console.log(`Encontrei "${match}"`);
  1904. console.log(`type="${type}"`);
  1905. console.log(`counter="${counter}"`);
  1906. console.log(`start="${start}"`);
  1907. console.log(`cmp="${cmp}"`);
  1908. console.log(`end="${end}"`);
  1909. console.log(`step="${step}"`);
  1910. console.log(`loopcode="${loopcode}"`)
  1911. console.log('Defines:', defines);
  1912. */
  1913. // continue statements are not supported inside unrolled loops
  1914. // and will generate a compiler error. Using break is ok.
  1915. const hasBreak = loopcode.match(/\bbreak\s*;/) !== null;
  1916. // create a new scope
  1917. let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
  1918. // declare counter
  1919. unrolledCode += `${type} ${counter};\n`;
  1920. // unroll loop
  1921. iend += cmp == '<=' ? 1 : 0;
  1922. for (let i = istart; i < iend; i += istep) unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
  1923. // close scope
  1924. unrolledCode += '}\n';
  1925. //console.log('Unrolled code:\n\n' + unrolledCode);
  1926. // done!
  1927. return unrolledCode;
  1928. }
  1929. ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
  1930. /*
  1931. * speedy-vision.js
  1932. * GPU-accelerated Computer Vision for JavaScript
  1933. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1934. *
  1935. * Licensed under the Apache License, Version 2.0 (the "License");
  1936. * you may not use this file except in compliance with the License.
  1937. * You may obtain a copy of the License at
  1938. *
  1939. * http://www.apache.org/licenses/LICENSE-2.0
  1940. *
  1941. * Unless required by applicable law or agreed to in writing, software
  1942. * distributed under the License is distributed on an "AS IS" BASIS,
  1943. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1944. * See the License for the specific language governing permissions and
  1945. * limitations under the License.
  1946. *
  1947. * shader-declaration.js
  1948. * Encapsulates a shader declaration
  1949. */
  1950. const DEFAULT_ATTRIBUTES = Object.freeze({
  1951. position: 'a_position',
  1952. texCoord: 'a_texCoord'
  1953. });
  1954. const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
  1955. position: 0,
  1956. // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  1957. texCoord: 1
  1958. });
  1959. const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
  1960. precision highp float;
  1961. precision highp int;
  1962. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
  1963. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
  1964. out highp vec2 texCoord;
  1965. uniform highp vec2 texSize;
  1966. #define vsinit() \
  1967. gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
  1968. texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
  1969. \n\n`;
  1970. const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
  1971. const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
  1972. const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
  1973. #if @FS_USE_CUSTOM_PRECISION@ == 0
  1974. precision mediump float; // ~float16
  1975. precision mediump sampler2D;
  1976. precision highp int; // int32
  1977. #endif
  1978. #if @FS_OUTPUT_TYPE@ == 0
  1979. #define OUT_TYPE mediump vec4
  1980. #elif @FS_OUTPUT_TYPE@ == 1
  1981. #define OUT_TYPE mediump ivec4
  1982. #elif @FS_OUTPUT_TYPE@ == 2
  1983. #define OUT_TYPE mediump uvec4
  1984. #else
  1985. #error Unknown FS_OUTPUT_TYPE
  1986. #endif
  1987. out OUT_TYPE color;
  1988. in highp vec2 texCoord;
  1989. uniform highp vec2 texSize;
  1990. @include "global.glsl"\n\n`;
  1991. const PRIVATE_TOKEN = Symbol();
  1992. /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
  1993. /** @typedef {string[]} ShaderDeclarationArgumentList */
  1994. /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
  1995. /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
  1996. /**
  1997. * Shader Declaration
  1998. * @abstract
  1999. */
  2000. class ShaderDeclaration {
  2001. /**
  2002. * @private Constructor
  2003. * @param {Symbol} privateToken
  2004. * @param {ShaderDeclarationArgumentList} argumentList
  2005. * @param {ShaderDeclarationPreprocessorConstants} defines
  2006. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2007. * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
  2008. */
  2009. constructor(privateToken, argumentList, defines, fsSource, vsSource) {
  2010. // private constructor!
  2011. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er();
  2012. /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
  2013. this._arguments = [...argumentList];
  2014. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2015. this._defines = new Map(defines);
  2016. /** @type {string} preprocessed source code of the fragment shader */
  2017. this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
  2018. /** @type {string} preprocessed source code of the vertex shader */
  2019. this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
  2020. /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
  2021. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2022. // validate arguments
  2023. this._validateArguments(this._arguments, this._uniforms);
  2024. }
  2025. /**
  2026. * Return the preprocessed GLSL source code of the fragment shader
  2027. * @returns {string}
  2028. */
  2029. get fragmentSource() {
  2030. return this._fragmentSource;
  2031. }
  2032. /**
  2033. * Return the preprocessed GLSL source code of the vertex shader
  2034. * @returns {string}
  2035. */
  2036. get vertexSource() {
  2037. return this._vertexSource;
  2038. }
  2039. /**
  2040. * Get the names of the vertex shader attributes
  2041. * @returns {typeof DEFAULT_ATTRIBUTES}
  2042. */
  2043. get attributes() {
  2044. return DEFAULT_ATTRIBUTES;
  2045. }
  2046. /**
  2047. * Get the pre-defined locations of the vertex shader attributes
  2048. * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
  2049. */
  2050. get locationOfAttributes() {
  2051. return DEFAULT_ATTRIBUTES_LOCATION;
  2052. }
  2053. /**
  2054. * Names of the arguments that will be passed to the Shader,
  2055. * corresponding to GLSL uniforms, in the order they will be passed
  2056. * @returns {string[]}
  2057. */
  2058. get arguments() {
  2059. return [].concat(this._arguments);
  2060. }
  2061. /**
  2062. * Names of the uniforms declared in the shader
  2063. * @returns {string[]}
  2064. */
  2065. get uniforms() {
  2066. return Array.from(this._uniforms.keys());
  2067. }
  2068. /**
  2069. * The GLSL type of a uniform variable declared in the shader
  2070. * @param {string} name
  2071. * @returns {string}
  2072. */
  2073. uniformType(name) {
  2074. if (!this._uniforms.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
  2075. return this._uniforms.get(name);
  2076. }
  2077. /**
  2078. * The value of an externally defined constant, i.e., via withDefines()
  2079. * @param {string} name
  2080. * @returns {number}
  2081. */
  2082. definedConstant(name) {
  2083. if (!this._defines.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
  2084. return this._defines.get(name);
  2085. }
  2086. /**
  2087. * Parses a GLSL source and detects the uniform variables,
  2088. * as well as their types
  2089. * @param {string} preprocessedSource
  2090. * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
  2091. */
  2092. _autodetectUniforms(preprocessedSource) {
  2093. const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
  2094. const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
  2095. const uniforms = /** @type {ShaderDeclarationUniformTypes} */new Map();
  2096. let match;
  2097. while ((match = regex.exec(sourceWithoutComments)) !== null) {
  2098. const type = match[2];
  2099. const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
  2100. for (const name of names) {
  2101. if (name.endsWith(']')) {
  2102. // is it an array?
  2103. if (!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/))) throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
  2104. // read array name & size
  2105. const [array, size] = [match[1], Number(match[2])];
  2106. // register uniforms
  2107. for (let i = 0; i < size; i++) uniforms.set(`${array}[${i}]`, type);
  2108. } else {
  2109. // register a regular uniform
  2110. if (!uniforms.has(name) || uniforms.get(name) === type) uniforms.set(name, type);else throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
  2111. }
  2112. }
  2113. }
  2114. return uniforms;
  2115. }
  2116. /**
  2117. * Checks if all the arguments of the shader declaration are backed by a
  2118. * uniform variable in GLSL code
  2119. * @param {ShaderDeclarationArgumentList} argumentList
  2120. * @param {ShaderDeclarationUniformTypes} uniforms
  2121. * @throws {IllegalArgumentError}
  2122. */
  2123. _validateArguments(argumentList, uniforms) {
  2124. for (const argname of argumentList) {
  2125. if (!uniforms.has(argname)) {
  2126. if (!uniforms.has(argname + '[0]')) throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
  2127. }
  2128. }
  2129. }
  2130. }
  2131. /**
  2132. * A ShaderDeclaration that has its GLSL code stored in-memory
  2133. */
  2134. class MemoryShaderDeclaration extends ShaderDeclaration {
  2135. /**
  2136. * @private Constructor
  2137. * @param {Symbol} privateToken
  2138. * @param {ShaderDeclarationArgumentList} argumentList
  2139. * @param {ShaderDeclarationPreprocessorConstants} defines
  2140. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2141. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
  2142. */
  2143. constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER) {
  2144. super(privateToken, argumentList, defines, fsSource, vsSource);
  2145. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
  2146. this._fsUnprocessedSource = String(fsSource);
  2147. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
  2148. this._vsUnprocessedSource = String(vsSource);
  2149. }
  2150. }
  2151. /**
  2152. * A ShaderDeclaration that has its GLSL code stored in a file
  2153. */
  2154. class FileShaderDeclaration extends ShaderDeclaration {
  2155. /**
  2156. * @private Constructor
  2157. * @param {Symbol} privateToken
  2158. * @param {ShaderDeclarationArgumentList} argumentList
  2159. * @param {ShaderDeclarationPreprocessorConstants} defines
  2160. * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
  2161. * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
  2162. */
  2163. constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '') {
  2164. // validate paths
  2165. if (!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);else if (vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
  2166. // import files
  2167. const fsSource = __nested_webpack_require_61794__(4606)("./" + String(fsFilepath));
  2168. const vsSource = vsFilepath != '' ? __nested_webpack_require_61794__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
  2169. // super class
  2170. super(privateToken, argumentList, defines, fsSource, vsSource);
  2171. /** @type {string} filepath of the fragment shader */
  2172. this._fsFilepath = String(fsFilepath);
  2173. /** @type {string} filepath of the vertex shader */
  2174. this._vsFilepath = String(vsFilepath);
  2175. }
  2176. /**
  2177. * Return the preprocessed GLSL source code of the fragment shader
  2178. * @returns {string}
  2179. */
  2180. get fragmentSource() {
  2181. // we override this method to include the filepath. The motivation
  2182. // is to easily identify the file when debugging compiling errors.
  2183. return this._addHeader('// File: ' + this._fsFilepath, super.fragmentSource);
  2184. }
  2185. /**
  2186. * Return the preprocessed GLSL source code of the vertex shader
  2187. * @returns {string}
  2188. */
  2189. get vertexSource() {
  2190. // we override this method to include the filepath. The motivation
  2191. // is to easily identify the file when debugging compiling errors.
  2192. return this._addHeader('// File: ' + (this._vsFilepath != '' ? this._vsFilepath : '(default-vs) ' + this._fsFilepath), super.vertexSource);
  2193. }
  2194. /**
  2195. * Add a header to a GLSL code
  2196. * @param {string} header code to be added
  2197. * @param {string} src pre-processed GLSL code
  2198. * @returns {string} src with an added header
  2199. */
  2200. _addHeader(header, src) {
  2201. utils/* Utils */.A.assert(header.startsWith('//') && !header.includes('\n'));
  2202. const j = src.indexOf('\n');
  2203. const versionDirective = src.substr(0, j);
  2204. const body = src.substr(j);
  2205. utils/* Utils */.A.assert(versionDirective.startsWith('#version '));
  2206. const head = versionDirective + '\n' + header;
  2207. return head + body;
  2208. }
  2209. }
  2210. /**
  2211. * A builder of a ShaderDeclaration
  2212. * @abstract
  2213. */
  2214. class ShaderDeclarationBuilder {
  2215. /**
  2216. * @private Constructor
  2217. * @param {Symbol} privateToken
  2218. */
  2219. constructor(privateToken) {
  2220. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er(); // private constructor!
  2221. /** @type {string[]} ordered list of uniform names */
  2222. this._arguments = [];
  2223. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2224. this._defines = new Map();
  2225. }
  2226. /**
  2227. * Specify the list & order of arguments to be
  2228. * passed to the shader
  2229. * @param {string[]} args argument names
  2230. * @returns {this}
  2231. */
  2232. withArguments(...args) {
  2233. // the list of arguments may be declared only once
  2234. if (this._arguments.length > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
  2235. // get arguments
  2236. for (let j = 0; j < args.length; j++) this._arguments.push(String(args[j]));
  2237. // done!
  2238. return this;
  2239. }
  2240. /**
  2241. * Specify a set of #defines to be prepended to the shader
  2242. * @param {Object<string,number>} defines key-value pairs
  2243. * @returns {this}
  2244. */
  2245. withDefines(defines) {
  2246. // the list of #defines may be defined only once
  2247. if (this._defines.size > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
  2248. // store and write the #defines
  2249. const keys = Object.keys(defines);
  2250. for (const key of keys) {
  2251. const value = Number(defines[key]); // force numeric values (just in case)
  2252. this._defines.set(key, value);
  2253. }
  2254. // done!
  2255. return this;
  2256. }
  2257. /**
  2258. * Build a ShaderDeclaration
  2259. * @returns {ShaderDeclaration}
  2260. */
  2261. build() {
  2262. throw new errors/* AbstractMethodError */.aQ();
  2263. }
  2264. }
  2265. /**
  2266. * A builder of a MemoryShaderDeclaration
  2267. */
  2268. class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2269. /**
  2270. * @private Constructor
  2271. * @param {Symbol} privateToken
  2272. * @param {ShaderDeclarationUnprocessedGLSL} fsSource
  2273. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
  2274. */
  2275. constructor(privateToken, fsSource, vsSource) {
  2276. super(privateToken);
  2277. /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
  2278. this._fsSource = String(fsSource);
  2279. /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
  2280. this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
  2281. }
  2282. /**
  2283. * Build a MemoryShaderDeclaration
  2284. * @returns {ShaderDeclaration}
  2285. */
  2286. build() {
  2287. return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
  2288. }
  2289. }
  2290. /**
  2291. * A builder of a FileShaderDeclaration
  2292. */
  2293. class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2294. /**
  2295. * @private Constructor
  2296. * @param {Symbol} privateToken
  2297. * @param {string} fsFilepath
  2298. * @param {string} [vsFilepath]
  2299. */
  2300. constructor(privateToken, fsFilepath, vsFilepath) {
  2301. super(privateToken);
  2302. /** @type {string} path to the unprocessed GLSL code of the fragment shader */
  2303. this._fsFilepath = String(fsFilepath);
  2304. /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
  2305. this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
  2306. }
  2307. /**
  2308. * Build a FileShaderDeclaration
  2309. * @returns {ShaderDeclaration}
  2310. */
  2311. build() {
  2312. return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
  2313. }
  2314. }
  2315. /**
  2316. * Import a ShaderDeclaration from a GLSL file
  2317. * @param {string} filepath relative to the shaders/ folder (a .glsl file)
  2318. * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
  2319. * @returns {ShaderDeclaration}
  2320. */
  2321. function importShader(filepath, vsfilepath = undefined) {
  2322. return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
  2323. }
  2324. /**
  2325. * Create a ShaderDeclaration from a GLSL source code
  2326. * @param {string} source fragment shader
  2327. * @param {string} [vssource] optional vertex shader
  2328. * @returns {ShaderDeclaration}
  2329. */
  2330. function createShader(source, vssource = undefined) {
  2331. return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
  2332. }
  2333. /***/ }),
  2334. /***/ 1672:
  2335. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_89681__) => {
  2336. "use strict";
  2337. __nested_webpack_require_89681__.r(__nested_webpack_exports__);
  2338. /* harmony export */ __nested_webpack_require_89681__.d(__nested_webpack_exports__, {
  2339. /* harmony export */ conv2D: () => (/* binding */ conv2D),
  2340. /* harmony export */ convX: () => (/* binding */ convX),
  2341. /* harmony export */ convY: () => (/* binding */ convY)
  2342. /* harmony export */ });
  2343. /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_89681__(9420);
  2344. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_89681__(9037);
  2345. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_89681__(8581);
  2346. /*
  2347. * speedy-vision.js
  2348. * GPU-accelerated Computer Vision for JavaScript
  2349. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2350. *
  2351. * Licensed under the Apache License, Version 2.0 (the "License");
  2352. * you may not use this file except in compliance with the License.
  2353. * You may obtain a copy of the License at
  2354. *
  2355. * http://www.apache.org/licenses/LICENSE-2.0
  2356. *
  2357. * Unless required by applicable law or agreed to in writing, software
  2358. * distributed under the License is distributed on an "AS IS" BASIS,
  2359. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2360. * See the License for the specific language governing permissions and
  2361. * limitations under the License.
  2362. *
  2363. * convolution.js
  2364. * Convolution shader generators
  2365. */
  2366. /**
  2367. * Generate a 2D convolution with a square kernel
  2368. * @param {number[]} kernel convolution kernel
  2369. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2370. * @returns {ShaderDeclarationBuilder}
  2371. */
  2372. function conv2D(kernel, normalizationConstant = 1.0) {
  2373. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2374. const kSize = Math.sqrt(kernel32.length) | 0;
  2375. const N = kSize >> 1; // idiv 2
  2376. // validate input
  2377. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);else if (kSize * kSize != kernel32.length) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
  2378. // select the appropriate pixel function
  2379. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2380. // code generator
  2381. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(cur => fn(kernel32[(cur[0] + N) * kSize + (cur[1] + N)], cur[0], cur[1])).join('\n');
  2382. const generateCode = (k, dy, dx) => `
  2383. result += ${pixelAtOffset}(image, ivec2(${-dx | 0}, ${-dy | 0})) * float(${+k});
  2384. `;
  2385. // shader
  2386. const source = `
  2387. uniform sampler2D image;
  2388. void main()
  2389. {
  2390. float alpha = threadPixel(image).a;
  2391. vec4 result = vec4(0.0f);
  2392. ${foreachKernelElement(generateCode)}
  2393. color = vec4(result.rgb, alpha);
  2394. }
  2395. `;
  2396. // done!
  2397. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2398. }
  2399. /**
  2400. * Generate a 1D convolution function on the x-axis
  2401. * @param {number[]} kernel convolution kernel
  2402. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2403. * @returns {ShaderDeclarationBuilder}
  2404. */
  2405. function convX(kernel, normalizationConstant = 1.0) {
  2406. return conv1D('x', kernel, normalizationConstant);
  2407. }
  2408. /**
  2409. * Generate a 1D convolution function on the y-axis
  2410. * @param {number[]} kernel convolution kernel
  2411. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2412. * @returns {ShaderDeclarationBuilder}
  2413. */
  2414. function convY(kernel, normalizationConstant = 1.0) {
  2415. return conv1D('y', kernel, normalizationConstant);
  2416. }
  2417. /**
  2418. * 1D convolution function generator
  2419. * @param {string} axis either "x" or "y"
  2420. * @param {number[]} kernel convolution kernel
  2421. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2422. * @returns {ShaderDeclarationBuilder}
  2423. */
  2424. function conv1D(axis, kernel, normalizationConstant = 1.0) {
  2425. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2426. const kSize = kernel32.length;
  2427. const N = kSize >> 1; // idiv 2
  2428. // validate input
  2429. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);else if (axis != 'x' && axis != 'y') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
  2430. // select the appropriate pixel function
  2431. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2432. // code generator
  2433. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce((acc, cur) => acc + fn(kernel32[cur + N], cur), '');
  2434. const generateCode = (k, i) => axis == 'x' ? `
  2435. pixel += ${pixelAtOffset}(image, ivec2(${-i | 0}, 0)) * float(${+k});
  2436. ` : `
  2437. pixel += ${pixelAtOffset}(image, ivec2(0, ${-i | 0})) * float(${+k});
  2438. `;
  2439. // shader
  2440. const source = `
  2441. uniform sampler2D image;
  2442. void main()
  2443. {
  2444. float alpha = threadPixel(image).a;
  2445. vec4 pixel = vec4(0.0f);
  2446. ${foreachKernelElement(generateCode)}
  2447. color = vec4(pixel.rgb, alpha);
  2448. }
  2449. `;
  2450. // done!
  2451. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2452. }
  2453. /***/ }),
  2454. /***/ 1001:
  2455. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_95546__) => {
  2456. "use strict";
  2457. /* harmony export */ __nested_webpack_require_95546__.d(__nested_webpack_exports__, {
  2458. /* harmony export */ c: () => (/* binding */ SpeedyGL)
  2459. /* harmony export */ });
  2460. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_95546__(9037);
  2461. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_95546__(2199);
  2462. /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_95546__(3211);
  2463. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_95546__(9192);
  2464. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_95546__(8581);
  2465. /*
  2466. * speedy-vision.js
  2467. * GPU-accelerated Computer Vision for JavaScript
  2468. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2469. *
  2470. * Licensed under the Apache License, Version 2.0 (the "License");
  2471. * you may not use this file except in compliance with the License.
  2472. * You may obtain a copy of the License at
  2473. *
  2474. * http://www.apache.org/licenses/LICENSE-2.0
  2475. *
  2476. * Unless required by applicable law or agreed to in writing, software
  2477. * distributed under the License is distributed on an "AS IS" BASIS,
  2478. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2479. * See the License for the specific language governing permissions and
  2480. * limitations under the License.
  2481. *
  2482. * speedy-gl.js
  2483. * A wrapper around the WebGL Rendering Context
  2484. */
  2485. /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
  2486. // Constants
  2487. const SINGLETON_KEY = Symbol();
  2488. const DEFAULT_POWER_PREFERENCE = 'default';
  2489. //
  2490. // We use a small canvas to improve the performance
  2491. // of createImageBitmap() on Firefox.
  2492. //
  2493. // A large canvas (2048x2048) causes a FPS drop, even
  2494. // if we only extract a small region of it (this is
  2495. // unlike Chrome, which is fast).
  2496. //
  2497. // Note: we automatically increase the size of the
  2498. // canvas (as needed) when rendering to it.
  2499. //
  2500. const CANVAS_WIDTH = 16,
  2501. CANVAS_HEIGHT = 16;
  2502. /** @type {SpeedyGL} Singleton */
  2503. let instance = null;
  2504. /** @type {PowerPreference} power preference */
  2505. let powerPreference = DEFAULT_POWER_PREFERENCE;
  2506. /**
  2507. * A wrapper around a WebGL Rendering Context
  2508. */
  2509. class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c {
  2510. /**
  2511. * Constructor
  2512. * @param {Symbol} key
  2513. * @private
  2514. */
  2515. constructor(key) {
  2516. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
  2517. super();
  2518. /** @type {boolean} internal flag */
  2519. this._reinitializeOnContextLoss = true;
  2520. /** @type {HTMLCanvasElement} internal canvas */
  2521. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2522. /** @type {WebGL2RenderingContext} WebGL rendering context */
  2523. this._gl = this._createContext(this._canvas);
  2524. /** @type {string} vendor string of the video driver */
  2525. this._vendor = '';
  2526. /** @type {string} renderer string of the video driver */
  2527. this._renderer = '';
  2528. // read driver info
  2529. this._readDriverInfo();
  2530. // log driver info
  2531. if (_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic') this._logDriverInfo();
  2532. }
  2533. /**
  2534. * Get Singleton
  2535. * @returns {SpeedyGL}
  2536. */
  2537. static get instance() {
  2538. return instance || (instance = new SpeedyGL(SINGLETON_KEY));
  2539. }
  2540. /**
  2541. * The WebGL Rendering Context
  2542. * Be careful not to cache this rendering context, as it may be lost!
  2543. * @returns {WebGL2RenderingContext}
  2544. */
  2545. get gl() {
  2546. return this._gl;
  2547. }
  2548. /**
  2549. * The internal canvas
  2550. * @returns {HTMLCanvasElement}
  2551. */
  2552. get canvas() {
  2553. return this._canvas;
  2554. }
  2555. /**
  2556. * Renderer string of the video driver
  2557. * @returns {string}
  2558. */
  2559. get renderer() {
  2560. return this._renderer;
  2561. }
  2562. /**
  2563. * Vendor string of the video driver
  2564. * @returns {string}
  2565. */
  2566. get vendor() {
  2567. return this._vendor;
  2568. }
  2569. /**
  2570. * Create a WebGL-capable canvas
  2571. * @param {Function} reinitialize to be called if we get a WebGL context loss event
  2572. * @returns {HTMLCanvasElement}
  2573. */
  2574. _createCanvas(reinitialize) {
  2575. const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
  2576. canvas.addEventListener('webglcontextlost', ev => {
  2577. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
  2578. setTimeout(reinitialize, 0);
  2579. ev.preventDefault();
  2580. }, false);
  2581. /*canvas.addEventListener('webglcontextrestored', ev => {
  2582. Utils.warning(`Restored WebGL2 context`);
  2583. ev.preventDefault();
  2584. }, false);*/
  2585. return canvas;
  2586. }
  2587. /**
  2588. * Create a WebGL2 Rendering Context
  2589. * @param {HTMLCanvasElement} canvas
  2590. * @returns {WebGL2RenderingContext}
  2591. */
  2592. _createContext(canvas) {
  2593. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
  2594. // does the browser support WebGL2?
  2595. if (typeof WebGL2RenderingContext === 'undefined') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please update your system.`);
  2596. const gl = canvas.getContext('webgl2', {
  2597. premultipliedAlpha: false,
  2598. preserveDrawingBuffer: false,
  2599. powerPreference: powerPreference,
  2600. alpha: true,
  2601. // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
  2602. antialias: false,
  2603. depth: false,
  2604. stencil: false,
  2605. desynchronized: true
  2606. });
  2607. if (!gl) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
  2608. return gl;
  2609. }
  2610. /**
  2611. * Reinitialize WebGL
  2612. */
  2613. _reinitialize() {
  2614. // disable reinitialization?
  2615. if (!this._reinitializeOnContextLoss) return;
  2616. // warning
  2617. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
  2618. // create new canvas
  2619. this._canvas.remove();
  2620. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2621. // create new context
  2622. this._gl = this._createContext(this._canvas);
  2623. // is this needed?
  2624. this._readDriverInfo();
  2625. // notify observers: we have a new context!
  2626. // we need to recreate all textures...
  2627. this._notify();
  2628. }
  2629. /**
  2630. * Read debugging information about the video driver of the user
  2631. */
  2632. _readDriverInfo() {
  2633. // Depending on the privacy settings of the browser, this information
  2634. // may be unavailable. When available, it may not be entirely correct.
  2635. // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
  2636. const gl = this._gl;
  2637. let debugInfo = null;
  2638. if (navigator.userAgent.includes('Firefox')) {
  2639. this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
  2640. this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
  2641. } else if (null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
  2642. this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
  2643. this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
  2644. } else {
  2645. this._vendor = ''; // unavailable information
  2646. this._renderer = '';
  2647. }
  2648. }
  2649. /**
  2650. * Log debugging information about the video driver and the platform
  2651. */
  2652. _logDriverInfo() {
  2653. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
  2654. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
  2655. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
  2656. }
  2657. /**
  2658. * Lose the WebGL context. This is used to manually
  2659. * free resources, and also for purposes of testing
  2660. * @returns {WEBGL_lose_context}
  2661. */
  2662. loseContext() {
  2663. const gl = this._gl;
  2664. // find the appropriate extension
  2665. const ext = gl.getExtension('WEBGL_lose_context');
  2666. if (!ext) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
  2667. // nothing to do?
  2668. if (gl.isContextLost()) return ext;
  2669. // disable reinitialization
  2670. this._reinitializeOnContextLoss = false;
  2671. // lose context
  2672. ext.loseContext();
  2673. // done!
  2674. return ext;
  2675. }
  2676. /**
  2677. * Lose & restore the WebGL context
  2678. * @param {number} [secondsToRestore]
  2679. * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
  2680. */
  2681. loseAndRestoreContext(secondsToRestore = 1) {
  2682. const ms = Math.max(secondsToRestore, 0) * 1000;
  2683. const ext = this.loseContext();
  2684. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
  2685. setTimeout(() => {
  2686. //ext.restoreContext();
  2687. this._reinitializeOnContextLoss = true;
  2688. this._reinitialize();
  2689. setTimeout(() => resolve(ext), 0); // next frame
  2690. }, ms);
  2691. });
  2692. }
  2693. /**
  2694. * Power preference for the WebGL context
  2695. * @returns {PowerPreference}
  2696. */
  2697. static get powerPreference() {
  2698. return powerPreference;
  2699. }
  2700. /**
  2701. * Power preference for the WebGL context
  2702. * @param {PowerPreference} value
  2703. */
  2704. static set powerPreference(value) {
  2705. // validate
  2706. if (!(value === 'default' || value === 'low-power' || value === 'high-performance')) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
  2707. // the power preference should be set before we create the WebGL context
  2708. if (instance == null || powerPreference !== value) {
  2709. powerPreference = value;
  2710. // recreate the context if it already exists. Experimental.
  2711. if (instance != null) instance.loseAndRestoreContext();
  2712. }
  2713. }
  2714. /**
  2715. * Check if an instance of SpeedyGL has already been created
  2716. * @returns {boolean}
  2717. */
  2718. static isInitialized() {
  2719. return instance != null;
  2720. }
  2721. }
  2722. /***/ }),
  2723. /***/ 8581:
  2724. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_105678__) => {
  2725. "use strict";
  2726. /* harmony export */ __nested_webpack_require_105678__.d(__nested_webpack_exports__, {
  2727. /* harmony export */ EM: () => (/* binding */ NotSupportedError),
  2728. /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
  2729. /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
  2730. /* harmony export */ MU: () => (/* binding */ TimeoutError),
  2731. /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
  2732. /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
  2733. /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
  2734. /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
  2735. /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
  2736. /* harmony export */ mB: () => (/* binding */ ParseError),
  2737. /* harmony export */ pf: () => (/* binding */ AssertionError),
  2738. /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
  2739. /* harmony export */ wB: () => (/* binding */ GLError),
  2740. /* harmony export */ xB: () => (/* binding */ SpeedyError)
  2741. /* harmony export */ });
  2742. /* unused harmony export NotImplementedError */
  2743. /*
  2744. * speedy-vision.js
  2745. * GPU-accelerated Computer Vision for JavaScript
  2746. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2747. *
  2748. * Licensed under the Apache License, Version 2.0 (the "License");
  2749. * you may not use this file except in compliance with the License.
  2750. * You may obtain a copy of the License at
  2751. *
  2752. * http://www.apache.org/licenses/LICENSE-2.0
  2753. *
  2754. * Unless required by applicable law or agreed to in writing, software
  2755. * distributed under the License is distributed on an "AS IS" BASIS,
  2756. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2757. * See the License for the specific language governing permissions and
  2758. * limitations under the License.
  2759. *
  2760. * errors.js
  2761. * Error classes
  2762. */
  2763. /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
  2764. /**
  2765. * Generic error class for Speedy
  2766. */
  2767. class SpeedyError extends Error {
  2768. /**
  2769. * Class constructor
  2770. * @param {string} message message text
  2771. * @param {SpeedyErrorCause} [cause] cause of the error
  2772. */
  2773. constructor(message, cause = null) {
  2774. super([message, cause ? cause.toString() : '[speedy-vision.js]'].join('\n-> '));
  2775. /** @type {SpeedyErrorCause} cause of the error */
  2776. this._cause = cause;
  2777. }
  2778. /**
  2779. * Error name
  2780. * @returns {string}
  2781. */
  2782. get name() {
  2783. return this.constructor.name;
  2784. }
  2785. /**
  2786. * Set error name (ignored)
  2787. * @param {string} _ ignored
  2788. */
  2789. set name(_) {
  2790. void 0;
  2791. }
  2792. /**
  2793. * Get the cause of the error. Available if
  2794. * it has been specified in the constructor
  2795. * @returns {SpeedyErrorCause}
  2796. */
  2797. get cause() {
  2798. return this._cause;
  2799. }
  2800. }
  2801. /**
  2802. * Unsupported operation error
  2803. * The requested operation is not supported
  2804. */
  2805. class NotSupportedError extends SpeedyError {
  2806. /**
  2807. * Class constructor
  2808. * @param {string} [message] additional text
  2809. * @param {SpeedyErrorCause} [cause] cause of the error
  2810. */
  2811. constructor(message = '', cause = null) {
  2812. super(`Unsupported operation. ${message}`, cause);
  2813. }
  2814. }
  2815. /**
  2816. * Not implemented error
  2817. * The called method is not implemented
  2818. */
  2819. class NotImplementedError extends SpeedyError {
  2820. /**
  2821. * Class constructor
  2822. * @param {string} [message] additional text
  2823. * @param {SpeedyErrorCause} [cause] cause of the error
  2824. */
  2825. constructor(message = '', cause = null) {
  2826. super(`Method not implemented. ${message}`, cause);
  2827. }
  2828. }
  2829. /**
  2830. * WebGL error
  2831. */
  2832. class GLError extends SpeedyError {
  2833. /**
  2834. * Class constructor
  2835. * @param {string} [message] additional text
  2836. * @param {SpeedyErrorCause} [cause] cause of the error
  2837. */
  2838. constructor(message = '', cause = null) {
  2839. super(`WebGL error. ${message}`, cause);
  2840. }
  2841. /**
  2842. * Get an error object describing the latest WebGL error
  2843. * @param {WebGL2RenderingContext} gl
  2844. * @returns {GLError}
  2845. */
  2846. static from(gl) {
  2847. const recognizedErrors = ['NO_ERROR', 'INVALID_ENUM', 'INVALID_VALUE', 'INVALID_OPERATION', 'INVALID_FRAMEBUFFER_OPERATION', 'OUT_OF_MEMORY', 'CONTEXT_LOST_WEBGL'];
  2848. const glError = gl.getError();
  2849. const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
  2850. return new GLError(message);
  2851. }
  2852. }
  2853. /**
  2854. * AbstractMethodError
  2855. * Thrown when one tries to call an abstract method
  2856. */
  2857. class AbstractMethodError extends SpeedyError {
  2858. /**
  2859. * Class constructor
  2860. * @param {string} [message] additional text
  2861. * @param {SpeedyErrorCause} [cause] cause of the error
  2862. */
  2863. constructor(message = '', cause = null) {
  2864. super(`Can't call abstract method. ${message}`, cause);
  2865. }
  2866. }
  2867. /**
  2868. * Illegal argument error
  2869. * A method has received one or more illegal arguments
  2870. */
  2871. class IllegalArgumentError extends SpeedyError {
  2872. /**
  2873. * Class constructor
  2874. * @param {string} [message] additional text
  2875. * @param {SpeedyErrorCause} [cause] cause of the error
  2876. */
  2877. constructor(message = '', cause = null) {
  2878. super(`Illegal argument. ${message}`, cause);
  2879. }
  2880. }
  2881. /**
  2882. * Illegal operation error
  2883. * The method arguments are valid, but the method can't
  2884. * be called due to the current the state of the object
  2885. */
  2886. class IllegalOperationError extends SpeedyError {
  2887. /**
  2888. * Class constructor
  2889. * @param {string} [message] additional text
  2890. * @param {SpeedyErrorCause} [cause] cause of the error
  2891. */
  2892. constructor(message = '', cause = null) {
  2893. super(`Illegal operation. ${message}`, cause);
  2894. }
  2895. }
  2896. /**
  2897. * Out of memory
  2898. */
  2899. class OutOfMemoryError extends SpeedyError {
  2900. /**
  2901. * Class constructor
  2902. * @param {string} [message] additional text
  2903. * @param {SpeedyErrorCause} [cause] cause of the error
  2904. */
  2905. constructor(message = '', cause = null) {
  2906. super(`Out of memory. ${message}`, cause);
  2907. }
  2908. }
  2909. /**
  2910. * File not found error
  2911. */
  2912. class FileNotFoundError extends SpeedyError {
  2913. /**
  2914. * Class constructor
  2915. * @param {string} [message] additional text
  2916. * @param {SpeedyErrorCause} [cause] cause of the error
  2917. */
  2918. constructor(message = '', cause = null) {
  2919. super(`File not found. ${message}`, cause);
  2920. }
  2921. }
  2922. /**
  2923. * Resource not loaded error
  2924. */
  2925. class ResourceNotLoadedError extends SpeedyError {
  2926. /**
  2927. * Class constructor
  2928. * @param {string} [message] additional text
  2929. * @param {SpeedyErrorCause} [cause] cause of the error
  2930. */
  2931. constructor(message = '', cause = null) {
  2932. super(`Resource not loaded. ${message}`, cause);
  2933. }
  2934. }
  2935. /**
  2936. * Timeout error
  2937. */
  2938. class TimeoutError extends SpeedyError {
  2939. /**
  2940. * Class constructor
  2941. * @param {string} [message] additional text
  2942. * @param {SpeedyErrorCause} [cause] cause of the error
  2943. */
  2944. constructor(message = '', cause = null) {
  2945. super(`Timeout error. ${message}`, cause);
  2946. }
  2947. }
  2948. /**
  2949. * Parse error
  2950. */
  2951. class ParseError extends SpeedyError {
  2952. /**
  2953. * Class constructor
  2954. * @param {string} [message] additional text
  2955. * @param {SpeedyErrorCause} [cause] cause of the error
  2956. */
  2957. constructor(message = '', cause = null) {
  2958. super(`Parse error. ${message}`, cause);
  2959. }
  2960. }
  2961. /**
  2962. * Assertion error
  2963. */
  2964. class AssertionError extends SpeedyError {
  2965. /**
  2966. * Class constructor
  2967. * @param {string} [message] additional text
  2968. * @param {SpeedyErrorCause} [cause] cause of the error
  2969. */
  2970. constructor(message = '', cause = null) {
  2971. super(`Assertion failed. ${message}`, cause);
  2972. }
  2973. }
  2974. /**
  2975. * Access denied
  2976. */
  2977. class AccessDeniedError extends SpeedyError {
  2978. /**
  2979. * Class constructor
  2980. * @param {string} [message] additional text
  2981. * @param {SpeedyErrorCause} [cause] cause of the error
  2982. */
  2983. constructor(message = '', cause = null) {
  2984. super(`Access denied. ${message}`, cause);
  2985. }
  2986. }
  2987. /**
  2988. * WebAssembly error
  2989. */
  2990. class WebAssemblyError extends SpeedyError {
  2991. /**
  2992. * Class constructor
  2993. * @param {string} [message] additional text
  2994. * @param {SpeedyErrorCause} [cause] cause of the error
  2995. */
  2996. constructor(message = '', cause = null) {
  2997. super(`WebAssembly error. ${message}`, cause);
  2998. }
  2999. }
  3000. /***/ }),
  3001. /***/ 3816:
  3002. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_113692__) => {
  3003. "use strict";
  3004. __nested_webpack_require_113692__.r(__nested_webpack_exports__);
  3005. /* harmony export */ __nested_webpack_require_113692__.d(__nested_webpack_exports__, {
  3006. /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
  3007. /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
  3008. /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
  3009. /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
  3010. /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
  3011. /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
  3012. /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
  3013. /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
  3014. /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
  3015. /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
  3016. /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
  3017. /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
  3018. /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
  3019. /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
  3020. /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
  3021. /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
  3022. /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
  3023. /* harmony export */ });
  3024. /*
  3025. * speedy-vision.js
  3026. * GPU-accelerated Computer Vision for JavaScript
  3027. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3028. *
  3029. * Licensed under the Apache License, Version 2.0 (the "License");
  3030. * you may not use this file except in compliance with the License.
  3031. * You may obtain a copy of the License at
  3032. *
  3033. * http://www.apache.org/licenses/LICENSE-2.0
  3034. *
  3035. * Unless required by applicable law or agreed to in writing, software
  3036. * distributed under the License is distributed on an "AS IS" BASIS,
  3037. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3038. * See the License for the specific language governing permissions and
  3039. * limitations under the License.
  3040. *
  3041. * globals.js
  3042. * Global constants
  3043. */
  3044. // -----------------------------------------------------------------
  3045. // IMAGE PYRAMIDS & SCALE-SPACE
  3046. // -----------------------------------------------------------------
  3047. /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
  3048. const PYRAMID_MAX_LEVELS = 8;
  3049. /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
  3050. const LOG2_PYRAMID_MAX_SCALE = 0;
  3051. /** @type {number} The maximum supported scale for a pyramid level */
  3052. const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
  3053. // -----------------------------------------------------------------
  3054. // FIXED-POINT MATH
  3055. // -----------------------------------------------------------------
  3056. /** @type {number} How many bits do we use to store fractional data? */
  3057. const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
  3058. /** @type {number} Fixed-point resolution */
  3059. const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
  3060. // -----------------------------------------------------------------
  3061. // TEXTURE LIMITS
  3062. // -----------------------------------------------------------------
  3063. /** @type {number} Maximum texture length (width, height) */
  3064. const MAX_TEXTURE_LENGTH = (1 << 16 - FIX_BITS) - 1; // must be 2^n - 1 due to keypoint encoding
  3065. // -----------------------------------------------------------------
  3066. // KEYPOINTS
  3067. // -----------------------------------------------------------------
  3068. /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
  3069. const MIN_KEYPOINT_SIZE = 8;
  3070. /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
  3071. const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
  3072. /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
  3073. const MAX_ENCODER_CAPACITY = 8192;
  3074. /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
  3075. const DEFAULT_ENCODER_CAPACITY = 2048;
  3076. /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
  3077. const LOG2_MAX_DESCRIPTOR_SIZE = 6;
  3078. /** @type {number} maximum size of a keypoint descriptor, in bytes */
  3079. const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
  3080. /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
  3081. const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
  3082. /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
  3083. const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
  3084. /** @type {number} Maximum size of the database of keypoints for matching */
  3085. const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
  3086. /** @type {number} The maximum distance that can be stored in a match */
  3087. const MATCH_MAX_DISTANCE = (1 << 32 - MATCH_INDEX_BITS) - 1;
  3088. // -----------------------------------------------------------------
  3089. // MISC
  3090. // -----------------------------------------------------------------
  3091. /** @type {boolean} Are we in a little-endian machine? */
  3092. const LITTLE_ENDIAN = function () {
  3093. return 0xCAFE === new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer)[0];
  3094. }();
  3095. /***/ }),
  3096. /***/ 3211:
  3097. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_119275__) => {
  3098. "use strict";
  3099. /* harmony export */ __nested_webpack_require_119275__.d(__nested_webpack_exports__, {
  3100. /* harmony export */ c: () => (/* binding */ Observable)
  3101. /* harmony export */ });
  3102. /*
  3103. * speedy-vision.js
  3104. * GPU-accelerated Computer Vision for JavaScript
  3105. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3106. *
  3107. * Licensed under the Apache License, Version 2.0 (the "License");
  3108. * you may not use this file except in compliance with the License.
  3109. * You may obtain a copy of the License at
  3110. *
  3111. * http://www.apache.org/licenses/LICENSE-2.0
  3112. *
  3113. * Unless required by applicable law or agreed to in writing, software
  3114. * distributed under the License is distributed on an "AS IS" BASIS,
  3115. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3116. * See the License for the specific language governing permissions and
  3117. * limitations under the License.
  3118. *
  3119. * observable.js
  3120. * Observer design pattern
  3121. */
  3122. /**
  3123. * Implementation of the Observer design pattern
  3124. * @abstract
  3125. */
  3126. class Observable {
  3127. /**
  3128. * Constructor
  3129. */
  3130. constructor() {
  3131. /** @type {Function[]} subscribers / callbacks */
  3132. this._subscribers = [];
  3133. /** @type {object[]} "this" pointers */
  3134. this._thisptr = [];
  3135. /** @type {Array<any[]>} function arguments */
  3136. this._args = [];
  3137. }
  3138. /**
  3139. * Add subscriber
  3140. * @param {Function} fn callback
  3141. * @param {object} [thisptr] "this" pointer to be used when invoking the callback
  3142. * @param {...any} args arguments to be passed to the callback
  3143. */
  3144. subscribe(fn, thisptr, ...args) {
  3145. this._subscribers.push(fn);
  3146. this._thisptr.push(thisptr);
  3147. this._args.push(args);
  3148. }
  3149. /**
  3150. * Remove subscriber
  3151. * @param {Function} fn previously added callback
  3152. * @param {object} [thisptr] "this" pointer
  3153. */
  3154. unsubscribe(fn, thisptr) {
  3155. for (let j = this._subscribers.length - 1; j >= 0; j--) {
  3156. if (this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
  3157. this._subscribers.splice(j, 1);
  3158. this._thisptr.splice(j, 1);
  3159. this._args.splice(j, 1);
  3160. break;
  3161. }
  3162. }
  3163. }
  3164. /**
  3165. * Notify all subscribers about a state change
  3166. * @protected
  3167. */
  3168. _notify() {
  3169. for (let i = 0; i < this._subscribers.length; i++) this._subscribers[i].apply(this._thisptr[i], this._args[i]);
  3170. }
  3171. }
  3172. /***/ }),
  3173. /***/ 6049:
  3174. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_121659__) => {
  3175. "use strict";
  3176. /* harmony export */ __nested_webpack_require_121659__.d(__nested_webpack_exports__, {
  3177. /* harmony export */ f5: () => (/* binding */ ImageFormat),
  3178. /* harmony export */ kQ: () => (/* binding */ PixelComponent),
  3179. /* harmony export */ kg: () => (/* binding */ ColorComponentId),
  3180. /* harmony export */ zu: () => (/* binding */ MediaType)
  3181. /* harmony export */ });
  3182. /*
  3183. * speedy-vision.js
  3184. * GPU-accelerated Computer Vision for JavaScript
  3185. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3186. *
  3187. * Licensed under the Apache License, Version 2.0 (the "License");
  3188. * you may not use this file except in compliance with the License.
  3189. * You may obtain a copy of the License at
  3190. *
  3191. * http://www.apache.org/licenses/LICENSE-2.0
  3192. *
  3193. * Unless required by applicable law or agreed to in writing, software
  3194. * distributed under the License is distributed on an "AS IS" BASIS,
  3195. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3196. * See the License for the specific language governing permissions and
  3197. * limitations under the License.
  3198. *
  3199. * types.js
  3200. * Types & formats
  3201. */
  3202. /**
  3203. * Media types
  3204. * @enum {Symbol}
  3205. */
  3206. const MediaType = Object.freeze({
  3207. Image: Symbol('Image'),
  3208. Video: Symbol('Video'),
  3209. Canvas: Symbol('Canvas'),
  3210. OffscreenCanvas: Symbol('OffscreenCanvas'),
  3211. Bitmap: Symbol('Bitmap'),
  3212. Data: Symbol('Data')
  3213. });
  3214. /**
  3215. * Image formats
  3216. * @enum {Symbol}
  3217. */
  3218. const ImageFormat = Object.freeze({
  3219. RGBA: Symbol('RGBA'),
  3220. GREY: Symbol('GREY')
  3221. });
  3222. /**
  3223. * Pixel component (bitwise flags)
  3224. * @typedef {number} PixelComponent
  3225. */
  3226. const PixelComponent = Object.freeze({
  3227. RED: 1,
  3228. GREEN: 2,
  3229. BLUE: 4,
  3230. ALPHA: 8,
  3231. ALL: 15 // = RED | GREEN | BLUE | ALPHA
  3232. });
  3233. /**
  3234. * Component ID utility
  3235. */
  3236. const ColorComponentId = Object.freeze({
  3237. [PixelComponent.RED]: 0,
  3238. [PixelComponent.GREEN]: 1,
  3239. [PixelComponent.BLUE]: 2,
  3240. [PixelComponent.ALPHA]: 3
  3241. });
  3242. /***/ }),
  3243. /***/ 9037:
  3244. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_123644__) => {
  3245. "use strict";
  3246. /* harmony export */ __nested_webpack_require_123644__.d(__nested_webpack_exports__, {
  3247. /* harmony export */ A: () => (/* binding */ Utils)
  3248. /* harmony export */ });
  3249. /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_123644__(8581);
  3250. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_123644__(9192);
  3251. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_123644__(2199);
  3252. /*
  3253. * speedy-vision.js
  3254. * GPU-accelerated Computer Vision for JavaScript
  3255. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3256. *
  3257. * Licensed under the Apache License, Version 2.0 (the "License");
  3258. * you may not use this file except in compliance with the License.
  3259. * You may obtain a copy of the License at
  3260. *
  3261. * http://www.apache.org/licenses/LICENSE-2.0
  3262. *
  3263. * Unless required by applicable law or agreed to in writing, software
  3264. * distributed under the License is distributed on an "AS IS" BASIS,
  3265. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3266. * See the License for the specific language governing permissions and
  3267. * limitations under the License.
  3268. *
  3269. * utils.js
  3270. * Generic utilities
  3271. */
  3272. /**
  3273. * Generic utilities
  3274. */
  3275. class Utils {
  3276. /**
  3277. * Generates a warning
  3278. * @param {string} text message text
  3279. * @param {...string} args optional text
  3280. */
  3281. static warning(text, ...args) {
  3282. //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
  3283. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.warn('[speedy-vision] ' + text, ...args);
  3284. }
  3285. /**
  3286. * Logs a message
  3287. * @param {string} text message text
  3288. * @param {...string} args optional text
  3289. */
  3290. static log(text, ...args) {
  3291. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.log('[speedy-vision] ' + text, ...args);
  3292. }
  3293. /**
  3294. * Assertion
  3295. * @param {boolean} expr expression
  3296. * @param {string} [text] error message
  3297. * @throws {AssertionError}
  3298. */
  3299. static assert(expr, text = '') {
  3300. if (!expr) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
  3301. }
  3302. /**
  3303. * Gets the names of the arguments of the specified function
  3304. * @param {Function} fun
  3305. * @returns {string[]}
  3306. */
  3307. static functionArguments(fun) {
  3308. const code = fun.toString();
  3309. const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' : code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>';
  3310. const match = new RegExp(regex).exec(code);
  3311. if (match !== null) {
  3312. const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
  3313. return args.split(',').map(argname => argname.replace(/=.*$/, '').trim() // remove default params & trim
  3314. ).filter(argname => argname // handle trailing commas
  3315. );
  3316. } else throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
  3317. }
  3318. /**
  3319. * Get all property descriptors from an object,
  3320. * traversing its entire prototype chain
  3321. * @param {object} obj
  3322. * @returns {object}
  3323. */
  3324. static getAllPropertyDescriptors(obj) {
  3325. if (obj) {
  3326. const proto = Object.getPrototypeOf(obj);
  3327. return Object.assign(Object.assign({}, Utils.getAllPropertyDescriptors(proto)), Object.getOwnPropertyDescriptors(obj));
  3328. } else return Object.create(null);
  3329. }
  3330. /**
  3331. * Creates a HTMLCanvasElement with the given dimensions
  3332. * @param {number} width in pixels
  3333. * @param {number} height in pixels
  3334. * @returns {HTMLCanvasElement}
  3335. */
  3336. static createCanvas(width, height) {
  3337. const canvas = document.createElement('canvas');
  3338. canvas.width = width;
  3339. canvas.height = height;
  3340. return canvas;
  3341. }
  3342. /**
  3343. * Generate a 1D gaussian kernel with custom sigma
  3344. * Tip: use kernelSize >= (5 * sigma), kernelSize odd
  3345. * @param {number} sigma gaussian sigma
  3346. * @param {number} [kernelSize] kernel size, odd number
  3347. * @param {boolean} [normalized] normalize entries so that their sum is 1
  3348. * @returns {number[]}
  3349. */
  3350. static gaussianKernel(sigma, kernelSize = 0, normalized = true) {
  3351. /*
  3352. * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
  3353. *
  3354. * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
  3355. *
  3356. * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
  3357. *
  3358. * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
  3359. * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
  3360. *
  3361. * Setting a constant c := sqrt(2) * sigma, it follows that:
  3362. *
  3363. * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
  3364. */
  3365. // default kernel size
  3366. if (kernelSize == 0) {
  3367. kernelSize = Math.ceil(5.0 * sigma) | 0;
  3368. kernelSize += 1 - kernelSize % 2;
  3369. }
  3370. // validate input
  3371. kernelSize |= 0;
  3372. if (kernelSize < 1 || kernelSize % 2 == 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);else if (sigma <= 0.0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
  3373. // function erf(x) = -erf(-x) can be approximated numerically. See:
  3374. // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
  3375. const kernel = new Array(kernelSize);
  3376. // set constants
  3377. const N = kernelSize >> 1; // integer (floor, div 2)
  3378. const c = +sigma * 1.4142135623730951; // sigma * sqrt(2)
  3379. const m = 0.3275911;
  3380. const a1 = 0.254829592;
  3381. const a2 = -0.284496736;
  3382. const a3 = 1.421413741;
  3383. const a4 = -1.453152027;
  3384. const a5 = 1.061405429;
  3385. // compute the kernel
  3386. let sum = 0.0;
  3387. for (let j = 0; j < kernelSize; j++) {
  3388. let xa = (j - N + 0.5) / c;
  3389. let xb = (j - N - 0.5) / c;
  3390. let sa = 1.0,
  3391. sb = 1.0;
  3392. if (xa < 0.0) {
  3393. sa = -1.0;
  3394. xa = -xa;
  3395. }
  3396. if (xb < 0.0) {
  3397. sb = -1.0;
  3398. xb = -xb;
  3399. }
  3400. const ta = 1.0 / (1.0 + m * xa);
  3401. const tb = 1.0 / (1.0 + m * xb);
  3402. const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
  3403. const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
  3404. const ya = 1.0 - pa * Math.exp(-xa * xa);
  3405. const yb = 1.0 - pb * Math.exp(-xb * xb);
  3406. const erfa = sa * ya;
  3407. const erfb = sb * yb;
  3408. const fp = (erfa - erfb) / (2.0 * c);
  3409. kernel[j] = fp;
  3410. sum += fp;
  3411. }
  3412. // normalize the kernel
  3413. if (normalized) {
  3414. for (let j = 0; j < kernelSize; j++) kernel[j] /= sum;
  3415. }
  3416. // done!
  3417. return kernel;
  3418. }
  3419. /**
  3420. * Generate a 2D kernel in column-major format using two separable 1D kernels
  3421. * @param {number[]} ka 1D kernel
  3422. * @param {number[]} [kb]
  3423. * @returns {number[]}
  3424. */
  3425. static kernel2d(ka, kb = ka) {
  3426. const ksize = ka.length;
  3427. Utils.assert(ka.length == ka.length);
  3428. Utils.assert(ksize >= 1 && ksize % 2 == 1);
  3429. // compute the outer product ka x kb
  3430. let kernel2d = new Array(ksize * ksize),
  3431. k = 0;
  3432. for (let col = 0; col < ksize; col++) {
  3433. for (let row = 0; row < ksize; row++) kernel2d[k++] = ka[row] * kb[col];
  3434. }
  3435. return kernel2d;
  3436. }
  3437. /**
  3438. * Cartesian product a x b: [ [ai, bj] for all i, j ]
  3439. * @param {number[]} a
  3440. * @param {number[]} b
  3441. * @returns {Array<[number,number]>}
  3442. */
  3443. static cartesian(a, b) {
  3444. return [].concat(...a.map(a => b.map(b => [a, b])));
  3445. }
  3446. /**
  3447. * Symmetric range
  3448. * @param {number} n non-negative integer
  3449. * @returns {number[]} [ -n, ..., n ]
  3450. */
  3451. static symmetricRange(n) {
  3452. if ((n |= 0) < 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
  3453. return [...Array(2 * n + 1).keys()].map(x => x - n);
  3454. }
  3455. /**
  3456. * Compute the [0, n) range of integers
  3457. * @param {number} n positive integer
  3458. * @returns {number[]} [ 0, 1, ..., n-1 ]
  3459. */
  3460. static range(n) {
  3461. if ((n |= 0) <= 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
  3462. return [...Array(n).keys()];
  3463. }
  3464. /**
  3465. * Shuffle in-place
  3466. * @template T
  3467. * @param {T[]} arr
  3468. * @returns {T[]} arr
  3469. */
  3470. static shuffle(arr) {
  3471. const len = arr.length;
  3472. const m = len - 1;
  3473. // Fisher-Yattes
  3474. for (let i = 0; i < m; i++) {
  3475. const j = i + (Math.random() * (len - i) | 0); // i <= j < arr.length
  3476. if (i !== j) {
  3477. const t = arr[i];
  3478. arr[i] = arr[j];
  3479. arr[j] = t;
  3480. }
  3481. }
  3482. return arr;
  3483. }
  3484. /**
  3485. * Flatten an array (1 level only)
  3486. * @template U
  3487. * @param {U[]} array
  3488. * @returns {U[]}
  3489. */
  3490. static flatten(array) {
  3491. //return array.flat();
  3492. //return array.reduce((arr, val) => arr.concat(val), []);
  3493. const flat = [];
  3494. for (let i = 0, n = array.length; i < n; i++) {
  3495. const entry = array[i];
  3496. if (Array.isArray(entry)) {
  3497. for (let j = 0, m = entry.length; j < m; j++) flat.push(entry[j]);
  3498. } else flat.push(entry);
  3499. }
  3500. return flat;
  3501. }
  3502. /**
  3503. * Decode a 16-bit float from a
  3504. * unsigned 16-bit integer
  3505. * @param {number} uint16
  3506. * @returns {number}
  3507. */
  3508. static decodeFloat16(uint16) {
  3509. // decode according to sec 2.1.2
  3510. // 16-Bit Floating Point Numbers
  3511. // of the OpenGL ES 3 spec
  3512. const s = (uint16 & 0xFFFF) >> 15; // sign bit
  3513. const e = (uint16 & 0x7FFF) >> 10; // exponent
  3514. const m = uint16 & 0x3FF; // mantissa
  3515. const sign = 1 - 2 * s; // (-1)^s
  3516. if (e == 0) return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
  3517. else if (e == 31) return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
  3518. const f = e >= 15 ? 1 << e - 15 : 1.0 / (1 << 15 - e); // 2^(e-15)
  3519. return sign * f * (1.0 + m * 0.0009765625); // normal
  3520. }
  3521. /**
  3522. * Wrapper around getUserMedia()
  3523. * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
  3524. * @returns {SpeedyPromise<HTMLVideoElement>}
  3525. */
  3526. static requestCameraStream(constraints = {
  3527. audio: false,
  3528. video: true
  3529. }) {
  3530. Utils.log('Accessing the webcam...');
  3531. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
  3532. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
  3533. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  3534. const video = document.createElement('video');
  3535. video.onloadedmetadata = () => {
  3536. video.play();
  3537. Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
  3538. resolve(video);
  3539. };
  3540. video.setAttribute('playsinline', '');
  3541. video.setAttribute('autoplay', '');
  3542. if (constraints.audio === false || constraints.audio === undefined) video.setAttribute('muted', '');
  3543. video.srcObject = stream;
  3544. }).catch(err => {
  3545. if (err.name === 'NotAllowedError') {
  3546. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(`Please give access to the camera and reload the page.`, err));
  3547. } else if (err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
  3548. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(`Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`, err));
  3549. } else {
  3550. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(`Can't access the webcam.`, err));
  3551. }
  3552. });
  3553. });
  3554. }
  3555. /**
  3556. * Format binary data as a string with hex values
  3557. * @param {ArrayBuffer} bytes
  3558. * @returns {string}
  3559. */
  3560. static formatBinaryData(bytes) {
  3561. const uint8 = new Uint8Array(bytes);
  3562. const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
  3563. return array.join(' ');
  3564. }
  3565. /**
  3566. * Returns a string containing platform brand information
  3567. * @returns {string}
  3568. */
  3569. static platformString() {
  3570. // navigator.userAgent is easily and often spoofed, and thus is unreliable
  3571. // use the NavigatorUAData interface if available
  3572. if (typeof navigator.userAgentData === 'object') {
  3573. // use only low entropy data, so we don't need to ask the permission
  3574. // of the user to read this string
  3575. return navigator.userAgentData.platform;
  3576. }
  3577. // navigator.platform is deprecated. It can be spoofed on Firefox, but,
  3578. // at the time of this writing, there is no alternative apparently.
  3579. return navigator.platform;
  3580. }
  3581. }
  3582. /***/ }),
  3583. /***/ 5235:
  3584. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_136472__) => {
  3585. var map = {
  3586. "./colors.glsl": 8609,
  3587. "./filters.glsl": 4672,
  3588. "./fixed-point.glsl": 9778,
  3589. "./float16.glsl": 8710,
  3590. "./global.glsl": 2434,
  3591. "./int32.glsl": 439,
  3592. "./keypoint-descriptors.glsl": 8545,
  3593. "./keypoint-matches.glsl": 6762,
  3594. "./keypoints.glsl": 7639,
  3595. "./math.glsl": 431,
  3596. "./platform.glsl": 6822,
  3597. "./pyramids.glsl": 2728,
  3598. "./subpixel.glsl": 6823
  3599. };
  3600. function webpackContext(req) {
  3601. var id = webpackContextResolve(req);
  3602. return __nested_webpack_require_136472__(id);
  3603. }
  3604. function webpackContextResolve(req) {
  3605. if(!__nested_webpack_require_136472__.o(map, req)) {
  3606. var e = new Error("Cannot find module '" + req + "'");
  3607. e.code = 'MODULE_NOT_FOUND';
  3608. throw e;
  3609. }
  3610. return map[req];
  3611. }
  3612. webpackContext.keys = function webpackContextKeys() {
  3613. return Object.keys(map);
  3614. };
  3615. webpackContext.resolve = webpackContextResolve;
  3616. module.exports = webpackContext;
  3617. webpackContext.id = 5235;
  3618. /***/ }),
  3619. /***/ 4606:
  3620. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_137422__) => {
  3621. var map = {
  3622. "./filters/convolution": 1672,
  3623. "./filters/convolution.js": 1672,
  3624. "./filters/convolution1d.glsl": 8211,
  3625. "./filters/convolution2d.glsl": 7360,
  3626. "./filters/fast-median.glsl": 8191,
  3627. "./filters/nightvision.glsl": 4438,
  3628. "./filters/normalize-image.glsl": 5867,
  3629. "./filters/rgb2grey.glsl": 9252,
  3630. "./include/colors.glsl": 8609,
  3631. "./include/filters.glsl": 4672,
  3632. "./include/fixed-point.glsl": 9778,
  3633. "./include/float16.glsl": 8710,
  3634. "./include/global.glsl": 2434,
  3635. "./include/int32.glsl": 439,
  3636. "./include/keypoint-descriptors.glsl": 8545,
  3637. "./include/keypoint-matches.glsl": 6762,
  3638. "./include/keypoints.glsl": 7639,
  3639. "./include/math.glsl": 431,
  3640. "./include/platform.glsl": 6822,
  3641. "./include/pyramids.glsl": 2728,
  3642. "./include/subpixel.glsl": 6823,
  3643. "./keypoints/allocate-descriptors.glsl": 1341,
  3644. "./keypoints/allocate-extra.glsl": 7833,
  3645. "./keypoints/apply-homography.glsl": 2352,
  3646. "./keypoints/bf-knn.glsl": 7541,
  3647. "./keypoints/clip-border.glsl": 4868,
  3648. "./keypoints/clip.glsl": 5591,
  3649. "./keypoints/distance-filter.glsl": 191,
  3650. "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
  3651. "./keypoints/encode-keypoint-offsets.glsl": 336,
  3652. "./keypoints/encode-keypoint-positions.glsl": 8968,
  3653. "./keypoints/encode-keypoint-properties.glsl": 1733,
  3654. "./keypoints/encode-keypoints.glsl": 9674,
  3655. "./keypoints/encode-null-keypoints.glsl": 2090,
  3656. "./keypoints/fast.glsl": 1855,
  3657. "./keypoints/fast.vs.glsl": 4824,
  3658. "./keypoints/hamming-distance-filter.glsl": 2381,
  3659. "./keypoints/harris-cutoff.glsl": 6060,
  3660. "./keypoints/harris.glsl": 9974,
  3661. "./keypoints/knn-init.glsl": 3047,
  3662. "./keypoints/knn-transfer.glsl": 3266,
  3663. "./keypoints/laplacian.glsl": 8018,
  3664. "./keypoints/lk.glsl": 3168,
  3665. "./keypoints/lookup-of-locations.glsl": 3890,
  3666. "./keypoints/lookup-of-locations.vs.glsl": 8647,
  3667. "./keypoints/lsh-knn.glsl": 4776,
  3668. "./keypoints/mix-keypoints.glsl": 2648,
  3669. "./keypoints/nonmax-scale.glsl": 8825,
  3670. "./keypoints/nonmax-space.glsl": 5693,
  3671. "./keypoints/nonmax-suppression.glsl": 9280,
  3672. "./keypoints/orb-descriptor.glsl": 9108,
  3673. "./keypoints/orb-orientation.glsl": 7137,
  3674. "./keypoints/refine-scale.glsl": 9739,
  3675. "./keypoints/score-findmax.glsl": 8231,
  3676. "./keypoints/shuffle.glsl": 2518,
  3677. "./keypoints/sort-keypoints.glsl": 8096,
  3678. "./keypoints/subpixel-refinement.glsl": 5795,
  3679. "./keypoints/transfer-flow.glsl": 3169,
  3680. "./keypoints/transfer-orientation.glsl": 1337,
  3681. "./keypoints/transfer-to-extra.glsl": 6187,
  3682. "./keypoints/upload-keypoints.glsl": 477,
  3683. "./pyramids/downsample2.glsl": 4050,
  3684. "./pyramids/upsample2.glsl": 5545,
  3685. "./transforms/additive-mix.glsl": 7113,
  3686. "./transforms/resize.glsl": 1202,
  3687. "./transforms/warp-perspective.glsl": 7971,
  3688. "./utils/copy-components.glsl": 6122,
  3689. "./utils/copy-raster.glsl": 371,
  3690. "./utils/copy.glsl": 7307,
  3691. "./utils/fill-components.glsl": 8614,
  3692. "./utils/fill.glsl": 6271,
  3693. "./utils/flip-y.vs.glsl": 3016,
  3694. "./utils/scan-minmax2d.glsl": 3630,
  3695. "./utils/sobel-derivatives.glsl": 8508,
  3696. "./utils/sobel-derivatives.vs.glsl": 8073
  3697. };
  3698. function webpackContext(req) {
  3699. var id = webpackContextResolve(req);
  3700. return __nested_webpack_require_137422__(id);
  3701. }
  3702. function webpackContextResolve(req) {
  3703. if(!__nested_webpack_require_137422__.o(map, req)) {
  3704. var e = new Error("Cannot find module '" + req + "'");
  3705. e.code = 'MODULE_NOT_FOUND';
  3706. throw e;
  3707. }
  3708. return map[req];
  3709. }
  3710. webpackContext.keys = function webpackContextKeys() {
  3711. return Object.keys(map);
  3712. };
  3713. webpackContext.resolve = webpackContextResolve;
  3714. module.exports = webpackContext;
  3715. webpackContext.id = 4606;
  3716. /***/ }),
  3717. /***/ 8211:
  3718. /***/ ((module) => {
  3719. module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3720. /***/ }),
  3721. /***/ 7360:
  3722. /***/ ((module) => {
  3723. module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3724. /***/ }),
  3725. /***/ 8191:
  3726. /***/ ((module) => {
  3727. module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
  3728. /***/ }),
  3729. /***/ 4438:
  3730. /***/ ((module) => {
  3731. module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
  3732. /***/ }),
  3733. /***/ 5867:
  3734. /***/ ((module) => {
  3735. module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
  3736. /***/ }),
  3737. /***/ 9252:
  3738. /***/ ((module) => {
  3739. module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
  3740. /***/ }),
  3741. /***/ 8609:
  3742. /***/ ((module) => {
  3743. module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
  3744. /***/ }),
  3745. /***/ 4672:
  3746. /***/ ((module) => {
  3747. module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
  3748. /***/ }),
  3749. /***/ 9778:
  3750. /***/ ((module) => {
  3751. module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
  3752. /***/ }),
  3753. /***/ 8710:
  3754. /***/ ((module) => {
  3755. module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
  3756. /***/ }),
  3757. /***/ 2434:
  3758. /***/ ((module) => {
  3759. module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
  3760. /***/ }),
  3761. /***/ 439:
  3762. /***/ ((module) => {
  3763. module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
  3764. /***/ }),
  3765. /***/ 8545:
  3766. /***/ ((module) => {
  3767. module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
  3768. /***/ }),
  3769. /***/ 6762:
  3770. /***/ ((module) => {
  3771. module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
  3772. /***/ }),
  3773. /***/ 7639:
  3774. /***/ ((module) => {
  3775. module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
  3776. /***/ }),
  3777. /***/ 431:
  3778. /***/ ((module) => {
  3779. module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
  3780. /***/ }),
  3781. /***/ 6822:
  3782. /***/ ((module) => {
  3783. module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
  3784. /***/ }),
  3785. /***/ 2728:
  3786. /***/ ((module) => {
  3787. module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
  3788. /***/ }),
  3789. /***/ 6823:
  3790. /***/ ((module) => {
  3791. module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
  3792. /***/ }),
  3793. /***/ 1341:
  3794. /***/ ((module) => {
  3795. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3796. /***/ }),
  3797. /***/ 7833:
  3798. /***/ ((module) => {
  3799. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3800. /***/ }),
  3801. /***/ 2352:
  3802. /***/ ((module) => {
  3803. module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
  3804. /***/ }),
  3805. /***/ 7541:
  3806. /***/ ((module) => {
  3807. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
  3808. /***/ }),
  3809. /***/ 4868:
  3810. /***/ ((module) => {
  3811. module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
  3812. /***/ }),
  3813. /***/ 5591:
  3814. /***/ ((module) => {
  3815. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
  3816. /***/ }),
  3817. /***/ 191:
  3818. /***/ ((module) => {
  3819. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3820. /***/ }),
  3821. /***/ 5467:
  3822. /***/ ((module) => {
  3823. module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
  3824. /***/ }),
  3825. /***/ 336:
  3826. /***/ ((module) => {
  3827. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
  3828. /***/ }),
  3829. /***/ 8968:
  3830. /***/ ((module) => {
  3831. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
  3832. /***/ }),
  3833. /***/ 1733:
  3834. /***/ ((module) => {
  3835. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
  3836. /***/ }),
  3837. /***/ 9674:
  3838. /***/ ((module) => {
  3839. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
  3840. /***/ }),
  3841. /***/ 2090:
  3842. /***/ ((module) => {
  3843. module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
  3844. /***/ }),
  3845. /***/ 1855:
  3846. /***/ ((module) => {
  3847. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
  3848. /***/ }),
  3849. /***/ 4824:
  3850. /***/ ((module) => {
  3851. module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
  3852. /***/ }),
  3853. /***/ 2381:
  3854. /***/ ((module) => {
  3855. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3856. /***/ }),
  3857. /***/ 6060:
  3858. /***/ ((module) => {
  3859. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
  3860. /***/ }),
  3861. /***/ 9974:
  3862. /***/ ((module) => {
  3863. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
  3864. /***/ }),
  3865. /***/ 3047:
  3866. /***/ ((module) => {
  3867. module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
  3868. /***/ }),
  3869. /***/ 3266:
  3870. /***/ ((module) => {
  3871. module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
  3872. /***/ }),
  3873. /***/ 8018:
  3874. /***/ ((module) => {
  3875. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
  3876. /***/ }),
  3877. /***/ 3168:
  3878. /***/ ((module) => {
  3879. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
  3880. /***/ }),
  3881. /***/ 3890:
  3882. /***/ ((module) => {
  3883. module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3884. /***/ }),
  3885. /***/ 8647:
  3886. /***/ ((module) => {
  3887. module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
  3888. /***/ }),
  3889. /***/ 4776:
  3890. /***/ ((module) => {
  3891. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
  3892. /***/ }),
  3893. /***/ 2648:
  3894. /***/ ((module) => {
  3895. module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3896. /***/ }),
  3897. /***/ 8825:
  3898. /***/ ((module) => {
  3899. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
  3900. /***/ }),
  3901. /***/ 5693:
  3902. /***/ ((module) => {
  3903. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
  3904. /***/ }),
  3905. /***/ 9280:
  3906. /***/ ((module) => {
  3907. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
  3908. /***/ }),
  3909. /***/ 9108:
  3910. /***/ ((module) => {
  3911. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
  3912. /***/ }),
  3913. /***/ 7137:
  3914. /***/ ((module) => {
  3915. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
  3916. /***/ }),
  3917. /***/ 9739:
  3918. /***/ ((module) => {
  3919. module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
  3920. /***/ }),
  3921. /***/ 8231:
  3922. /***/ ((module) => {
  3923. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
  3924. /***/ }),
  3925. /***/ 2518:
  3926. /***/ ((module) => {
  3927. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
  3928. /***/ }),
  3929. /***/ 8096:
  3930. /***/ ((module) => {
  3931. module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
  3932. /***/ }),
  3933. /***/ 5795:
  3934. /***/ ((module) => {
  3935. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
  3936. /***/ }),
  3937. /***/ 3169:
  3938. /***/ ((module) => {
  3939. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
  3940. /***/ }),
  3941. /***/ 1337:
  3942. /***/ ((module) => {
  3943. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
  3944. /***/ }),
  3945. /***/ 6187:
  3946. /***/ ((module) => {
  3947. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
  3948. /***/ }),
  3949. /***/ 477:
  3950. /***/ ((module) => {
  3951. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
  3952. /***/ }),
  3953. /***/ 4050:
  3954. /***/ ((module) => {
  3955. module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
  3956. /***/ }),
  3957. /***/ 5545:
  3958. /***/ ((module) => {
  3959. module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
  3960. /***/ }),
  3961. /***/ 7113:
  3962. /***/ ((module) => {
  3963. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
  3964. /***/ }),
  3965. /***/ 1202:
  3966. /***/ ((module) => {
  3967. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
  3968. /***/ }),
  3969. /***/ 7971:
  3970. /***/ ((module) => {
  3971. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
  3972. /***/ }),
  3973. /***/ 6122:
  3974. /***/ ((module) => {
  3975. module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
  3976. /***/ }),
  3977. /***/ 371:
  3978. /***/ ((module) => {
  3979. module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
  3980. /***/ }),
  3981. /***/ 7307:
  3982. /***/ ((module) => {
  3983. module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
  3984. /***/ }),
  3985. /***/ 8614:
  3986. /***/ ((module) => {
  3987. module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
  3988. /***/ }),
  3989. /***/ 6271:
  3990. /***/ ((module) => {
  3991. module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
  3992. /***/ }),
  3993. /***/ 3016:
  3994. /***/ ((module) => {
  3995. module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
  3996. /***/ }),
  3997. /***/ 3630:
  3998. /***/ ((module) => {
  3999. module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
  4000. /***/ }),
  4001. /***/ 8508:
  4002. /***/ ((module) => {
  4003. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
  4004. /***/ }),
  4005. /***/ 8073:
  4006. /***/ ((module) => {
  4007. module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
  4008. /***/ }),
  4009. /***/ 3575:
  4010. /***/ ((module) => {
  4011. module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
  4012. f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
  4013. f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
  4014. BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
  4015. AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
  4016. CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
  4017. Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
  4018. AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
  4019. dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
  4020. TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
  4021. X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
  4022. MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
  4023. ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
  4024. PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
  4025. CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
  4026. AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
  4027. gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
  4028. AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
  4029. QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
  4030. AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
  4031. gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
  4032. IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
  4033. gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
  4034. hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
  4035. nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
  4036. AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
  4037. EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
  4038. IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
  4039. AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
  4040. IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
  4041. gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
  4042. AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
  4043. IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
  4044. AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
  4045. AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
  4046. IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
  4047. AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
  4048. QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
  4049. gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
  4050. IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
  4051. OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
  4052. IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
  4053. AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
  4054. APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
  4055. IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
  4056. AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
  4057. IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
  4058. CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
  4059. hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
  4060. IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
  4061. AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
  4062. BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
  4063. QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
  4064. ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
  4065. AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
  4066. BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
  4067. iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
  4068. IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
  4069. AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
  4070. gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
  4071. QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
  4072. QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
  4073. ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
  4074. gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
  4075. NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
  4076. AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
  4077. gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
  4078. IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
  4079. a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
  4080. KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
  4081. QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
  4082. CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
  4083. ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
  4084. QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
  4085. CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
  4086. AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
  4087. QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
  4088. ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
  4089. BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
  4090. AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
  4091. KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
  4092. EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
  4093. DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
  4094. BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
  4095. Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
  4096. DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
  4097. AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
  4098. QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
  4099. IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
  4100. QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
  4101. QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
  4102. IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
  4103. CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
  4104. KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
  4105. IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
  4106. ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
  4107. KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
  4108. AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
  4109. DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
  4110. QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
  4111. QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
  4112. ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
  4113. EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
  4114. SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
  4115. KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
  4116. gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
  4117. ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
  4118. ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
  4119. IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
  4120. IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
  4121. IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
  4122. DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
  4123. A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
  4124. akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
  4125. DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
  4126. u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
  4127. AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
  4128. IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
  4129. IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
  4130. aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
  4131. QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
  4132. KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
  4133. bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
  4134. IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
  4135. IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
  4136. IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
  4137. An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
  4138. DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
  4139. KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
  4140. QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
  4141. BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
  4142. QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
  4143. KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
  4144. ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
  4145. GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
  4146. QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
  4147. ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
  4148. B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
  4149. DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
  4150. BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
  4151. bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
  4152. IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
  4153. DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
  4154. IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
  4155. QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
  4156. FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
  4157. DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
  4158. AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
  4159. AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
  4160. QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
  4161. AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
  4162. EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
  4163. SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
  4164. Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
  4165. ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
  4166. IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
  4167. fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
  4168. IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
  4169. gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
  4170. gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
  4171. CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
  4172. IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
  4173. ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
  4174. DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
  4175. KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
  4176. gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
  4177. ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
  4178. AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
  4179. gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
  4180. aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
  4181. SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
  4182. CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
  4183. CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
  4184. CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
  4185. CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
  4186. AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
  4187. ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
  4188. gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
  4189. QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
  4190. AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
  4191. lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
  4192. kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
  4193. 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
  4194. ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
  4195. NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
  4196. gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
  4197. BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
  4198. AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
  4199. QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
  4200. GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
  4201. C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
  4202. DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
  4203. QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
  4204. IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
  4205. DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
  4206. f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
  4207. gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
  4208. gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
  4209. IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
  4210. AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
  4211. aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
  4212. bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
  4213. IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
  4214. AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
  4215. ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
  4216. gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
  4217. AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
  4218. IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
  4219. BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
  4220. l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
  4221. gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
  4222. AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
  4223. AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
  4224. aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
  4225. ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
  4226. aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
  4227. AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
  4228. FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
  4229. IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
  4230. oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
  4231. AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
  4232. IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
  4233. IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
  4234. 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
  4235. oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
  4236. IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
  4237. PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
  4238. APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
  4239. oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
  4240. GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
  4241. IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
  4242. mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
  4243. BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
  4244. AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
  4245. oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
  4246. oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
  4247. FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
  4248. oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
  4249. IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
  4250. JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
  4251. ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
  4252. AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
  4253. DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
  4254. IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
  4255. ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
  4256. CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
  4257. QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
  4258. IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
  4259. IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
  4260. ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
  4261. AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
  4262. CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
  4263. dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
  4264. KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
  4265. AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
  4266. HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
  4267. BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
  4268. AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
  4269. gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
  4270. QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
  4271. AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
  4272. BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
  4273. IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
  4274. kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
  4275. BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
  4276. IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
  4277. AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
  4278. AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
  4279. QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
  4280. CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
  4281. lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
  4282. dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
  4283. IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
  4284. IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
  4285. C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
  4286. IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
  4287. AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
  4288. KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
  4289. Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4290. ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
  4291. IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
  4292. EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
  4293. IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
  4294. IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
  4295. QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
  4296. IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
  4297. AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
  4298. ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
  4299. CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
  4300. AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
  4301. QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
  4302. kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
  4303. GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
  4304. ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
  4305. OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
  4306. BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
  4307. AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
  4308. IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
  4309. A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
  4310. dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
  4311. KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
  4312. IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
  4313. FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
  4314. GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
  4315. PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
  4316. E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
  4317. IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
  4318. IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
  4319. KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
  4320. Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4321. ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
  4322. gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
  4323. BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
  4324. EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
  4325. gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
  4326. AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
  4327. IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
  4328. AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
  4329. lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
  4330. b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
  4331. ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
  4332. IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
  4333. gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
  4334. ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
  4335. KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
  4336. AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
  4337. dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
  4338. NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
  4339. ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
  4340. DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
  4341. IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
  4342. IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
  4343. GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
  4344. BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
  4345. ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
  4346. gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
  4347. IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
  4348. FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
  4349. ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
  4350. QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
  4351. DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
  4352. dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
  4353. IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
  4354. IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
  4355. IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
  4356. AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
  4357. D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
  4358. EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
  4359. QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
  4360. IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
  4361. IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
  4362. KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
  4363. QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
  4364. GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
  4365. AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
  4366. ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
  4367. DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
  4368. AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
  4369. IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
  4370. zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
  4371. ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
  4372. AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
  4373. IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
  4374. AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
  4375. QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
  4376. B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
  4377. DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
  4378. gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
  4379. KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
  4380. AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
  4381. KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
  4382. HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
  4383. IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
  4384. IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
  4385. HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
  4386. QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
  4387. gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
  4388. QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
  4389. QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
  4390. ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
  4391. ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
  4392. ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
  4393. EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
  4394. KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
  4395. X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
  4396. ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
  4397. QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
  4398. dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
  4399. AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
  4400. AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
  4401. AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
  4402. KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
  4403. koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
  4404. CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
  4405. CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
  4406. IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
  4407. aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
  4408. ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
  4409. IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
  4410. EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
  4411. A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
  4412. A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
  4413. KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
  4414. AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
  4415. lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
  4416. ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
  4417. IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
  4418. aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
  4419. iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
  4420. DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
  4421. AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
  4422. AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
  4423. IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
  4424. lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
  4425. QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
  4426. IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
  4427. Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
  4428. BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
  4429. A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
  4430. ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
  4431. IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
  4432. taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
  4433. NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
  4434. gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
  4435. Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
  4436. DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
  4437. aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
  4438. ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
  4439. C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
  4440. dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
  4441. IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
  4442. dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
  4443. NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
  4444. YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
  4445. IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
  4446. bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
  4447. AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
  4448. b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
  4449. QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
  4450. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
  4451. dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
  4452. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
  4453. dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
  4454. NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
  4455. ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
  4456. YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
  4457. aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
  4458. MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
  4459. IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
  4460. dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
  4461. ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
  4462. IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
  4463. cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
  4464. AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
  4465. AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
  4466. OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
  4467. MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
  4468. MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
  4469. IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
  4470. cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
  4471. aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
  4472. LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
  4473. ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
  4474. bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
  4475. dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
  4476. MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
  4477. YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
  4478. YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
  4479. cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
  4480. b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
  4481. AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
  4482. NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
  4483. bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
  4484. aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
  4485. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
  4486. `
  4487. /***/ })
  4488. /******/ });
  4489. /************************************************************************/
  4490. /******/ // The module cache
  4491. /******/ var __webpack_module_cache__ = {};
  4492. /******/
  4493. /******/ // The require function
  4494. /******/ function __nested_webpack_require_314174__(moduleId) {
  4495. /******/ // Check if module is in cache
  4496. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  4497. /******/ if (cachedModule !== undefined) {
  4498. /******/ return cachedModule.exports;
  4499. /******/ }
  4500. /******/ // Create a new module (and put it into the cache)
  4501. /******/ var module = __webpack_module_cache__[moduleId] = {
  4502. /******/ // no module.id needed
  4503. /******/ // no module.loaded needed
  4504. /******/ exports: {}
  4505. /******/ };
  4506. /******/
  4507. /******/ // Execute the module function
  4508. /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_314174__);
  4509. /******/
  4510. /******/ // Return the exports of the module
  4511. /******/ return module.exports;
  4512. /******/ }
  4513. /******/
  4514. /************************************************************************/
  4515. /******/ /* webpack/runtime/define property getters */
  4516. /******/ (() => {
  4517. /******/ // define getter functions for harmony exports
  4518. /******/ __nested_webpack_require_314174__.d = (exports, definition) => {
  4519. /******/ for(var key in definition) {
  4520. /******/ if(__nested_webpack_require_314174__.o(definition, key) && !__nested_webpack_require_314174__.o(exports, key)) {
  4521. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  4522. /******/ }
  4523. /******/ }
  4524. /******/ };
  4525. /******/ })();
  4526. /******/
  4527. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  4528. /******/ (() => {
  4529. /******/ __nested_webpack_require_314174__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  4530. /******/ })();
  4531. /******/
  4532. /******/ /* webpack/runtime/make namespace object */
  4533. /******/ (() => {
  4534. /******/ // define __esModule on exports
  4535. /******/ __nested_webpack_require_314174__.r = (exports) => {
  4536. /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
  4537. /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
  4538. /******/ }
  4539. /******/ Object.defineProperty(exports, '__esModule', { value: true });
  4540. /******/ };
  4541. /******/ })();
  4542. /******/
  4543. /************************************************************************/
  4544. var __nested_webpack_exports__ = {};
  4545. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  4546. (() => {
  4547. "use strict";
  4548. // EXPORTS
  4549. __nested_webpack_require_314174__.d(__nested_webpack_exports__, {
  4550. "default": () => (/* binding */ Speedy)
  4551. });
  4552. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  4553. var speedy_gl = __nested_webpack_require_314174__(1001);
  4554. // EXTERNAL MODULE: ./src/utils/utils.js
  4555. var utils = __nested_webpack_require_314174__(9037);
  4556. // EXTERNAL MODULE: ./src/core/settings.js
  4557. var settings = __nested_webpack_require_314174__(2199);
  4558. // EXTERNAL MODULE: ./src/core/speedy-promise.js
  4559. var speedy_promise = __nested_webpack_require_314174__(9192);
  4560. ;// CONCATENATED MODULE: ./src/utils/asap.js
  4561. /*
  4562. * speedy-vision.js
  4563. * GPU-accelerated Computer Vision for JavaScript
  4564. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4565. *
  4566. * Licensed under the Apache License, Version 2.0 (the "License");
  4567. * you may not use this file except in compliance with the License.
  4568. * You may obtain a copy of the License at
  4569. *
  4570. * http://www.apache.org/licenses/LICENSE-2.0
  4571. *
  4572. * Unless required by applicable law or agreed to in writing, software
  4573. * distributed under the License is distributed on an "AS IS" BASIS,
  4574. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4575. * See the License for the specific language governing permissions and
  4576. * limitations under the License.
  4577. *
  4578. * asap.js
  4579. * Schedule a function to run "as soon as possible"
  4580. */
  4581. /** callbacks */
  4582. const callbacks = /** @type {Function[]} */[];
  4583. /** arguments to be passed to the callbacks */
  4584. const args = /** @type {any[][]} */[];
  4585. /** asap key */
  4586. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  4587. // Register an event listener
  4588. window.addEventListener('message', event => {
  4589. if (event.source !== window || event.data !== ASAP_KEY) return;
  4590. event.stopPropagation();
  4591. if (callbacks.length == 0) return;
  4592. const fn = callbacks.pop();
  4593. const argArray = args.pop();
  4594. fn.apply(undefined, argArray);
  4595. }, true);
  4596. /**
  4597. * Schedule a function to run "as soon as possible"
  4598. * @param {Function} fn callback
  4599. * @param {any[]} params optional parameters
  4600. */
  4601. function asap(fn, ...params) {
  4602. callbacks.unshift(fn);
  4603. args.unshift(params);
  4604. window.postMessage(ASAP_KEY, '*');
  4605. }
  4606. // EXTERNAL MODULE: ./src/utils/errors.js
  4607. var utils_errors = __nested_webpack_require_314174__(8581);
  4608. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
  4609. /*
  4610. * speedy-vision.js
  4611. * GPU-accelerated Computer Vision for JavaScript
  4612. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4613. *
  4614. * Licensed under the Apache License, Version 2.0 (the "License");
  4615. * you may not use this file except in compliance with the License.
  4616. * You may obtain a copy of the License at
  4617. *
  4618. * http://www.apache.org/licenses/LICENSE-2.0
  4619. *
  4620. * Unless required by applicable law or agreed to in writing, software
  4621. * distributed under the License is distributed on an "AS IS" BASIS,
  4622. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4623. * See the License for the specific language governing permissions and
  4624. * limitations under the License.
  4625. *
  4626. * speedy-texture-reader.js
  4627. * Reads data from textures
  4628. */
  4629. /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
  4630. const DEFAULT_NUMBER_OF_BUFFERS = 2;
  4631. /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
  4632. const runOnNextFrame = navigator.userAgent.includes('Firefox') ? (fn, ...args) => setTimeout(fn, 10, ...args) :
  4633. // RAF produces a warning on Firefox
  4634. (fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args)); // reduce battery usage
  4635. /**
  4636. * Reads data from textures
  4637. */
  4638. class SpeedyTextureReader {
  4639. /**
  4640. * Constructor
  4641. * @param {number} [numberOfBuffers]
  4642. */
  4643. constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS) {
  4644. utils/* Utils */.A.assert(numberOfBuffers > 0);
  4645. /** @type {boolean} is this object initialized? */
  4646. this._initialized = false;
  4647. /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
  4648. this._pixelBuffer = new Array(numberOfBuffers).fill(null).map(() => new Uint8Array(0));
  4649. /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
  4650. this._pbo = new Array(numberOfBuffers).fill(null);
  4651. /** @type {number} the index of the buffer that will be consumed in this frame */
  4652. this._consumerIndex = 0;
  4653. /** @type {number} the index of the buffer that will be produced next */
  4654. this._producerIndex = numberOfBuffers - 1;
  4655. /** @type {SpeedyPromise<void>[]} producer-consumer promises */
  4656. this._promise = Array.from({
  4657. length: numberOfBuffers
  4658. }, () => speedy_promise/* SpeedyPromise */.i.resolve());
  4659. /** @type {boolean[]} are the contents of the ith buffer being produced? */
  4660. this._busy = new Array(numberOfBuffers).fill(false);
  4661. /** @type {boolean[]} can the ith buffer be consumed? */
  4662. this._ready = new Array(numberOfBuffers).fill(true);
  4663. }
  4664. /**
  4665. * Initialize this object
  4666. * @param {SpeedyGPU} gpu
  4667. */
  4668. init(gpu) {
  4669. this._allocatePBOs(gpu);
  4670. gpu.subscribe(this._allocatePBOs, this, gpu);
  4671. this._initialized = true;
  4672. }
  4673. /**
  4674. * Release resources
  4675. * @param {SpeedyGPU} gpu
  4676. * @returns {null}
  4677. */
  4678. release(gpu) {
  4679. gpu.unsubscribe(this._allocatePBOs, this);
  4680. this._deallocatePBOs(gpu);
  4681. this._initialized = false;
  4682. return null;
  4683. }
  4684. /**
  4685. * Read pixels from a texture, synchronously.
  4686. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4687. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4688. * @param {number} [x]
  4689. * @param {number} [y]
  4690. * @param {number} [width]
  4691. * @param {number} [height]
  4692. * @returns {Uint8Array} pixels in the RGBA format
  4693. */
  4694. readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height) {
  4695. utils/* Utils */.A.assert(this._initialized);
  4696. const gl = texture.gl;
  4697. const fbo = texture.glFbo;
  4698. // clamp values
  4699. width = Math.max(0, Math.min(width, texture.width));
  4700. height = Math.max(0, Math.min(height, texture.height));
  4701. x = Math.max(0, Math.min(x, texture.width - width));
  4702. y = Math.max(0, Math.min(y, texture.height - height));
  4703. // buffer allocation
  4704. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4705. this._reallocate(sizeofBuffer);
  4706. // lost context?
  4707. if (gl.isContextLost()) return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4708. // read pixels
  4709. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4710. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
  4711. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4712. // done!
  4713. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4714. }
  4715. /**
  4716. * Read pixels from a texture, asynchronously, with PBOs.
  4717. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4718. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4719. * @param {number} [x]
  4720. * @param {number} [y]
  4721. * @param {number} [width]
  4722. * @param {number} [height]
  4723. * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
  4724. * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
  4725. */
  4726. readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false) {
  4727. utils/* Utils */.A.assert(this._initialized);
  4728. const gl = texture.gl;
  4729. const fbo = texture.glFbo;
  4730. // clamp values
  4731. width = Math.max(0, Math.min(width, texture.width));
  4732. height = Math.max(0, Math.min(height, texture.height));
  4733. x = Math.max(0, Math.min(x, texture.width - width));
  4734. y = Math.max(0, Math.min(y, texture.height - height));
  4735. // buffer allocation
  4736. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4737. this._reallocate(sizeofBuffer);
  4738. // lost context?
  4739. if (gl.isContextLost()) return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
  4740. // do not optimize?
  4741. if (!useBufferedDownloads) {
  4742. const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4743. return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() => pixelBuffer);
  4744. }
  4745. // Hide latency with a Producer-Consumer mechanism
  4746. const numberOfBuffers = this._pixelBuffer.length;
  4747. // GPU needs to produce data
  4748. const producerIndex = this._producerIndex;
  4749. if (!this._busy[producerIndex]) {
  4750. const pbo = this._pbo[producerIndex];
  4751. const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
  4752. this._producerIndex = (producerIndex + 1) % numberOfBuffers;
  4753. this._ready[producerIndex] = false;
  4754. this._busy[producerIndex] = true;
  4755. //console.time("produce "+producerIndex);
  4756. this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
  4757. //console.timeEnd("produce "+producerIndex);
  4758. this._busy[producerIndex] = false;
  4759. this._ready[producerIndex] = true;
  4760. });
  4761. }
  4762. //else console.log("skip",producerIndex);
  4763. else /* skip frame */;
  4764. // CPU needs to consume data
  4765. const consumerIndex = this._consumerIndex;
  4766. this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
  4767. if (!this._ready[consumerIndex]) {
  4768. //console.time("consume "+consumerIndex);
  4769. return this._promise[consumerIndex].then(() => {
  4770. //console.timeEnd("consume "+consumerIndex);
  4771. this._ready[consumerIndex] = false;
  4772. return this._pixelBuffer[consumerIndex];
  4773. });
  4774. }
  4775. //console.log("NO WAIT "+consumerIndex);
  4776. this._ready[consumerIndex] = false;
  4777. return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
  4778. }
  4779. /**
  4780. * Reallocate the pixel buffers, so that they can hold the required number of bytes
  4781. * If the pixel buffers already have the required capacity, then nothing is done
  4782. * @param {number} size in bytes
  4783. */
  4784. _reallocate(size) {
  4785. // no need to reallocate
  4786. if (size <= this._pixelBuffer[0].byteLength) return;
  4787. // reallocate
  4788. for (let i = 0; i < this._pixelBuffer.length; i++) {
  4789. const newBuffer = new Uint8Array(size);
  4790. //newBuffer.set(this._pixelBuffer[i]); // make this optional?
  4791. this._pixelBuffer[i] = newBuffer;
  4792. }
  4793. }
  4794. /**
  4795. * Allocate PBOs
  4796. * @param {SpeedyGPU} gpu
  4797. */
  4798. _allocatePBOs(gpu) {
  4799. const gl = gpu.gl;
  4800. for (let i = 0; i < this._pbo.length; i++) this._pbo[i] = gl.createBuffer();
  4801. }
  4802. /**
  4803. * Deallocate PBOs
  4804. * @param {SpeedyGPU} gpu
  4805. */
  4806. _deallocatePBOs(gpu) {
  4807. const gl = gpu.gl;
  4808. for (let i = this._pbo.length - 1; i >= 0; i--) {
  4809. gl.deleteBuffer(this._pbo[i]);
  4810. this._pbo[i] = null;
  4811. }
  4812. }
  4813. /**
  4814. * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
  4815. * It's assumed that the target texture is in the RGBA8 format
  4816. * @param {WebGL2RenderingContext} gl
  4817. * @param {WebGLBuffer} pbo
  4818. * @param {Uint8Array} outputBuffer with size >= width * height * 4
  4819. * @param {WebGLFramebuffer} fbo
  4820. * @param {GLint} x
  4821. * @param {GLint} y
  4822. * @param {GLsizei} width
  4823. * @param {GLsizei} height
  4824. * @returns {SpeedyPromise<void>}
  4825. */
  4826. static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height) {
  4827. /*
  4828. When testing Speedy on Chrome (mobile) using about:tracing with the
  4829. --enable-gpu-service-tracing flag, I found that A LOT of time is spent
  4830. in TraceGLAPI::glMapBufferRange, which takes place just after
  4831. GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
  4832. Using multiple PBOs doesn't seem to impact Chrome too much. Performance
  4833. is much better on Firefox. This suggests there is room for improvement.
  4834. I do not yet understand clearly the cause for this lag on Chrome. It
  4835. may be a CPU-GPU synchronization issue.
  4836. EDIT: I have found that using gl.flush() aggressively greatly improves
  4837. things. WebGL commands will be pushed frequently!
  4838. See also:
  4839. https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
  4840. https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
  4841. */
  4842. const size = width * height * 4;
  4843. // validate outputBuffer
  4844. utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
  4845. // read pixels into the PBO
  4846. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4847. gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
  4848. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4849. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
  4850. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4851. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4852. // create a fence
  4853. const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
  4854. gl.flush(); // make sure the sync command is read
  4855. // wait for the commands to be processed by the GPU
  4856. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  4857. // according to the WebGL2 spec sec 3.7.14 Sync objects,
  4858. // "sync objects may only transition to the signaled state
  4859. // when the user agent's event loop is not executing a task"
  4860. // in other words, it won't be signaled in the same frame
  4861. if (settings/* Settings */.w.gpuPollingMode != 'asap') runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);else asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  4862. }).then(() => {
  4863. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4864. gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
  4865. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4866. }).catch(err => {
  4867. throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
  4868. }).finally(() => {
  4869. gl.deleteSync(sync);
  4870. });
  4871. }
  4872. /**
  4873. * Waits for a sync object to become signaled
  4874. * @param {WebGL2RenderingContext} gl
  4875. * @param {WebGLSync} sync
  4876. * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
  4877. * @param {Function} resolve
  4878. * @param {Function} reject
  4879. * @param {number} [pollInterval] in milliseconds
  4880. * @param {number} [remainingAttempts] for timeout
  4881. */
  4882. static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000) {
  4883. (function poll() {
  4884. const status = gl.clientWaitSync(sync, flags, 0);
  4885. if (remainingAttempts-- <= 0) {
  4886. reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
  4887. } else if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
  4888. resolve();
  4889. } else {
  4890. //setTimeout(poll, pollInterval);
  4891. if (settings/* Settings */.w.gpuPollingMode != 'asap') requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
  4892. else asap(poll);
  4893. }
  4894. })();
  4895. }
  4896. }
  4897. // EXTERNAL MODULE: ./src/utils/globals.js
  4898. var globals = __nested_webpack_require_314174__(3816);
  4899. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
  4900. /*
  4901. * speedy-vision.js
  4902. * GPU-accelerated Computer Vision for JavaScript
  4903. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4904. *
  4905. * Licensed under the Apache License, Version 2.0 (the "License");
  4906. * you may not use this file except in compliance with the License.
  4907. * You may obtain a copy of the License at
  4908. *
  4909. * http://www.apache.org/licenses/LICENSE-2.0
  4910. *
  4911. * Unless required by applicable law or agreed to in writing, software
  4912. * distributed under the License is distributed on an "AS IS" BASIS,
  4913. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4914. * See the License for the specific language governing permissions and
  4915. * limitations under the License.
  4916. *
  4917. * speedy-texture.js
  4918. * A wrapper around WebGLTexture
  4919. */
  4920. /**
  4921. * Get a buffer filled with zeros
  4922. * @param {number} size number of bytes
  4923. * @returns {Uint8Array}
  4924. */
  4925. /*
  4926. const zeros = (function() {
  4927. let buffer = new Uint8Array(4);
  4928. return function(size) {
  4929. if(size > buffer.length)
  4930. buffer = new Uint8Array(size);
  4931. return buffer.subarray(0, size);
  4932. }
  4933. })();
  4934. */
  4935. /**
  4936. * A wrapper around WebGLTexture
  4937. */
  4938. class SpeedyTexture {
  4939. /**
  4940. * Constructor
  4941. * @param {WebGL2RenderingContext} gl
  4942. * @param {number} width texture width in pixels
  4943. * @param {number} height texture height in pixels
  4944. * @param {number} [format]
  4945. * @param {number} [internalFormat]
  4946. * @param {number} [dataType]
  4947. * @param {number} [filter]
  4948. * @param {number} [wrap]
  4949. */
  4950. constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT) {
  4951. /** @type {WebGL2RenderingContext} rendering context */
  4952. this._gl = gl;
  4953. /** @type {number} width of the texture */
  4954. this._width = Math.max(1, width | 0);
  4955. /** @type {number} height of the texture */
  4956. this._height = Math.max(1, height | 0);
  4957. /** @type {boolean} have we generated mipmaps for this texture? */
  4958. this._hasMipmaps = false;
  4959. /** @type {number} texture format */
  4960. this._format = format;
  4961. /** @type {number} internal format (usually a sized format) */
  4962. this._internalFormat = internalFormat;
  4963. /** @type {number} data type */
  4964. this._dataType = dataType;
  4965. /** @type {number} texture filtering (min & mag) */
  4966. this._filter = filter;
  4967. /** @type {number} texture wrapping */
  4968. this._wrap = wrap;
  4969. /** @type {WebGLTexture} internal texture object */
  4970. this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
  4971. }
  4972. /**
  4973. * Releases the texture
  4974. * @returns {null}
  4975. */
  4976. release() {
  4977. const gl = this._gl;
  4978. // already released?
  4979. if (this._glTexture == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
  4980. // release resources
  4981. this.discardMipmaps();
  4982. gl.deleteTexture(this._glTexture);
  4983. this._glTexture = null;
  4984. this._width = this._height = 0;
  4985. // done!
  4986. return null;
  4987. }
  4988. /**
  4989. * Upload pixel data to the texture. The texture will be resized if needed.
  4990. * @param {TexImageSource} data
  4991. * @param {number} [width] in pixels
  4992. * @param {number} [height] in pixels
  4993. * @return {SpeedyTexture} this
  4994. */
  4995. upload(data, width = this._width, height = this._height) {
  4996. const gl = this._gl;
  4997. // bugfix: if the media is a video, we can't really
  4998. // upload it to the GPU unless it's ready
  4999. if (data instanceof HTMLVideoElement) {
  5000. if (data.readyState < 2) {
  5001. // this may happen when the video loops (Firefox)
  5002. // keep the previously uploaded texture
  5003. //Utils.warning(`Trying to process a video that isn't ready yet`);
  5004. return this;
  5005. }
  5006. }
  5007. utils/* Utils */.A.assert(width > 0 && height > 0);
  5008. this.discardMipmaps();
  5009. this._width = width;
  5010. this._height = height;
  5011. this._internalFormat = gl.RGBA8;
  5012. this._format = gl.RGBA;
  5013. this._dataType = gl.UNSIGNED_BYTE;
  5014. SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
  5015. return this;
  5016. }
  5017. /**
  5018. * Clear the texture
  5019. * @returns {this}
  5020. */
  5021. clear() {
  5022. const gl = this._gl;
  5023. // context loss?
  5024. if (gl.isContextLost()) return this;
  5025. // clear texture data
  5026. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5027. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5028. gl.bindTexture(gl.TEXTURE_2D, null);
  5029. // no mipmaps
  5030. this.discardMipmaps();
  5031. // done!
  5032. return this;
  5033. }
  5034. /**
  5035. * Resize this texture. Its content will be lost!
  5036. * @param {number} width new width, in pixels
  5037. * @param {number} height new height, in pixels
  5038. * @returns {this}
  5039. */
  5040. resize(width, height) {
  5041. const gl = this._gl;
  5042. // no need to resize?
  5043. if (this._width === width && this._height === height) return this;
  5044. // validate size
  5045. width |= 0;
  5046. height |= 0;
  5047. if (width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH) throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);else if (width < 1 || height < 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
  5048. // context loss?
  5049. if (gl.isContextLost()) return this;
  5050. // update dimensions
  5051. this._width = width;
  5052. this._height = height;
  5053. // resize
  5054. // Note: this is fast on Chrome, but seems slow on Firefox
  5055. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5056. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5057. gl.bindTexture(gl.TEXTURE_2D, null);
  5058. // no mipmaps
  5059. this.discardMipmaps();
  5060. // done!
  5061. return this;
  5062. }
  5063. /**
  5064. * Generate mipmap
  5065. * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
  5066. * @returns {SpeedyTexture} this
  5067. */
  5068. generateMipmaps(mipmap = []) {
  5069. const gl = this._gl;
  5070. // nothing to do
  5071. if (this._hasMipmaps) return this;
  5072. // let the hardware compute the all levels of the pyramid, up to 1x1
  5073. // we also specify the TEXTURE_MIN_FILTER to be used from now on
  5074. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5075. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
  5076. gl.generateMipmap(gl.TEXTURE_2D);
  5077. gl.bindTexture(gl.TEXTURE_2D, null);
  5078. // accept custom textures
  5079. if (mipmap.length > 0) {
  5080. // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  5081. const width = this.width,
  5082. height = this.height;
  5083. const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
  5084. utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
  5085. // verify the dimensions of each level
  5086. for (let level = 1; level < mipmap.length; level++) {
  5087. // use max(1, floor(size / 2^lod)), in accordance to
  5088. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  5089. const w = Math.max(1, width >>> level);
  5090. const h = Math.max(1, height >>> level);
  5091. // verify the dimensions of this level
  5092. utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
  5093. // copy to mipmap
  5094. mipmap[level].copyTo(this, level);
  5095. }
  5096. }
  5097. // done!
  5098. this._hasMipmaps = true;
  5099. return this;
  5100. }
  5101. /**
  5102. * Invalidates previously generated mipmap, if any
  5103. */
  5104. discardMipmaps() {
  5105. const gl = this._gl;
  5106. // nothing to do
  5107. if (!this._hasMipmaps) return;
  5108. // reset the min filter
  5109. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5110. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
  5111. gl.bindTexture(gl.TEXTURE_2D, null);
  5112. // done!
  5113. this._hasMipmaps = false;
  5114. }
  5115. /**
  5116. * Does this texture have a mipmap?
  5117. * @returns {boolean}
  5118. */
  5119. hasMipmaps() {
  5120. return this._hasMipmaps;
  5121. }
  5122. /**
  5123. * Has this texture been released?
  5124. * @returns {boolean}
  5125. */
  5126. isReleased() {
  5127. return this._glTexture == null;
  5128. }
  5129. /**
  5130. * The internal WebGLTexture
  5131. * @returns {WebGLTexture}
  5132. */
  5133. get glTexture() {
  5134. return this._glTexture;
  5135. }
  5136. /**
  5137. * The width of the texture, in pixels
  5138. * @returns {number}
  5139. */
  5140. get width() {
  5141. return this._width;
  5142. }
  5143. /**
  5144. * The height of the texture, in pixels
  5145. * @returns {number}
  5146. */
  5147. get height() {
  5148. return this._height;
  5149. }
  5150. /**
  5151. * The WebGL Context
  5152. * @returns {WebGL2RenderingContext}
  5153. */
  5154. get gl() {
  5155. return this._gl;
  5156. }
  5157. /**
  5158. * Create a WebGL texture
  5159. * @param {WebGL2RenderingContext} gl
  5160. * @param {number} width in pixels
  5161. * @param {number} height in pixels
  5162. * @param {number} format usually gl.RGBA
  5163. * @param {number} internalFormat usually gl.RGBA8
  5164. * @param {number} dataType usually gl.UNSIGNED_BYTE
  5165. * @param {number} filter usually gl.NEAREST or gl.LINEAR
  5166. * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
  5167. * @returns {WebGLTexture}
  5168. */
  5169. static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap) {
  5170. utils/* Utils */.A.assert(width > 0 && height > 0);
  5171. // create & bind texture
  5172. const texture = gl.createTexture();
  5173. gl.bindTexture(gl.TEXTURE_2D, texture);
  5174. // setup
  5175. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
  5176. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
  5177. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
  5178. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
  5179. //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
  5180. gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
  5181. // unbind & return
  5182. gl.bindTexture(gl.TEXTURE_2D, null);
  5183. return texture;
  5184. }
  5185. /**
  5186. * Upload pixel data to a WebGL texture
  5187. * @param {WebGL2RenderingContext} gl
  5188. * @param {WebGLTexture} texture
  5189. * @param {GLsizei} width texture width
  5190. * @param {GLsizei} height texture height
  5191. * @param {TexImageSource} pixels
  5192. * @param {GLint} lod mipmap level-of-detail
  5193. * @param {number} format
  5194. * @param {number} internalFormat
  5195. * @param {number} dataType
  5196. * @returns {WebGLTexture} texture
  5197. */
  5198. static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType) {
  5199. // Prefer calling _upload() before gl.useProgram() to avoid the
  5200. // needless switching of GL programs internally. See also:
  5201. // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  5202. gl.bindTexture(gl.TEXTURE_2D, texture);
  5203. /*
  5204. // slower than texImage2D, unlike the spec?
  5205. gl.texSubImage2D(gl.TEXTURE_2D, // target
  5206. lod, // mip level
  5207. 0, // x-offset
  5208. 0, // y-offset
  5209. width, // texture width
  5210. height, // texture height
  5211. gl.RGBA, // source format
  5212. gl.UNSIGNED_BYTE, // source type
  5213. pixels); // source data
  5214. */
  5215. gl.texImage2D(gl.TEXTURE_2D,
  5216. // target
  5217. lod,
  5218. // mip level
  5219. internalFormat,
  5220. // internal format
  5221. width,
  5222. // texture width
  5223. height,
  5224. // texture height
  5225. 0,
  5226. // border
  5227. format,
  5228. // source format
  5229. dataType,
  5230. // source type
  5231. pixels); // source data
  5232. gl.bindTexture(gl.TEXTURE_2D, null);
  5233. return texture;
  5234. }
  5235. }
  5236. /**
  5237. * A SpeedyTexture with a framebuffer
  5238. */
  5239. class SpeedyDrawableTexture extends SpeedyTexture {
  5240. /**
  5241. * Constructor
  5242. * @param {WebGL2RenderingContext} gl
  5243. * @param {number} width texture width in pixels
  5244. * @param {number} height texture height in pixels
  5245. * @param {number} [format]
  5246. * @param {number} [internalFormat]
  5247. * @param {number} [dataType]
  5248. * @param {number} [filter]
  5249. * @param {number} [wrap]
  5250. */
  5251. constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined) {
  5252. super(gl, width, height, format, internalFormat, dataType, filter, wrap);
  5253. /** @type {WebGLFramebuffer} framebuffer */
  5254. this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
  5255. }
  5256. /**
  5257. * Releases the texture
  5258. * @returns {null}
  5259. */
  5260. release() {
  5261. const gl = this._gl;
  5262. // already released?
  5263. if (this._glFbo == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
  5264. // release the framebuffer
  5265. gl.deleteFramebuffer(this._glFbo);
  5266. this._glFbo = null;
  5267. // release the SpeedyTexture
  5268. return super.release();
  5269. }
  5270. /**
  5271. * The internal WebGLFramebuffer
  5272. * @returns {WebGLFramebuffer}
  5273. */
  5274. get glFbo() {
  5275. return this._glFbo;
  5276. }
  5277. /**
  5278. * Copy this texture into another
  5279. * (you may have to discard the mipmaps after calling this function)
  5280. * @param {SpeedyTexture} texture target texture
  5281. * @param {number} [lod] level-of-detail of the target texture
  5282. */
  5283. copyTo(texture, lod = 0) {
  5284. const gl = this._gl;
  5285. // context loss?
  5286. if (gl.isContextLost()) return;
  5287. // compute texture size as max(1, floor(size / 2^lod)),
  5288. // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
  5289. // (Mipmapping)
  5290. const pot = 1 << (lod |= 0);
  5291. const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
  5292. const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
  5293. // validate
  5294. utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
  5295. // copy to texture
  5296. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
  5297. }
  5298. /*
  5299. * Resize this texture
  5300. * @param {number} width new width, in pixels
  5301. * @param {number} height new height, in pixels
  5302. * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
  5303. * @returns {this}
  5304. */
  5305. /*resize(width, height, preserveContent = false)
  5306. {
  5307. const gl = this._gl;
  5308. // no need to preserve the content?
  5309. if(!preserveContent)
  5310. return super.resize(width, height);
  5311. // no need to resize?
  5312. if(this._width === width && this._height === height)
  5313. return this;
  5314. // validate size
  5315. width |= 0; height |= 0;
  5316. Utils.assert(width > 0 && height > 0);
  5317. // context loss?
  5318. if(gl.isContextLost())
  5319. return this;
  5320. // allocate new texture
  5321. const newTexture = SpeedyTexture._createTexture(gl, width, height);
  5322. // initialize the new texture with zeros to avoid a
  5323. // warning when calling copyTexSubImage2D() on Firefox
  5324. // this may not be very efficient?
  5325. SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
  5326. // copy the old texture to the new one
  5327. const oldWidth = this._width, oldHeight = this._height;
  5328. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
  5329. // bind FBO
  5330. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5331. // invalidate old data (is this needed?)
  5332. gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
  5333. // attach the new texture to the existing framebuffer
  5334. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5335. gl.COLOR_ATTACHMENT0, // color buffer
  5336. gl.TEXTURE_2D, // tex target
  5337. newTexture, // texture
  5338. 0); // mipmap level
  5339. // unbind FBO
  5340. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5341. // release the old texture and replace it
  5342. gl.deleteTexture(this._glTexture);
  5343. this._glTexture = newTexture;
  5344. // update dimensions & discard mipmaps
  5345. this.discardMipmaps();
  5346. this._width = width;
  5347. this._height = height;
  5348. // done!
  5349. return this;
  5350. }
  5351. */
  5352. /**
  5353. * Clear the texture
  5354. * @returns {this}
  5355. */
  5356. clear() {
  5357. //
  5358. // When we pass null to texImage2D(), it seems that Firefox
  5359. // doesn't clear the texture. Instead, it displays this warning:
  5360. //
  5361. // "WebGL warning: drawArraysInstanced:
  5362. // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
  5363. //
  5364. // Here is a workaround:
  5365. //
  5366. return this.clearToColor(0, 0, 0, 0);
  5367. }
  5368. /**
  5369. * Clear the texture to a color
  5370. * @param {number} r red component, a value in [0,1]
  5371. * @param {number} g green component, a value in [0,1]
  5372. * @param {number} b blue component, a value in [0,1]
  5373. * @param {number} a alpha component, a value in [0,1]
  5374. * @returns {this}
  5375. */
  5376. clearToColor(r, g, b, a) {
  5377. const gl = this._gl;
  5378. // context loss?
  5379. if (gl.isContextLost()) return this;
  5380. // clamp parameters
  5381. r = Math.max(0.0, Math.min(+r, 1.0));
  5382. g = Math.max(0.0, Math.min(+g, 1.0));
  5383. b = Math.max(0.0, Math.min(+b, 1.0));
  5384. a = Math.max(0.0, Math.min(+a, 1.0));
  5385. // discard mipmaps, if any
  5386. this.discardMipmaps();
  5387. // clear the texture
  5388. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5389. gl.viewport(0, 0, this._width, this._height);
  5390. gl.clearColor(r, g, b, a);
  5391. gl.clear(gl.COLOR_BUFFER_BIT);
  5392. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5393. // done!
  5394. return this;
  5395. }
  5396. /**
  5397. * Inspect the pixels of the texture for debugging purposes
  5398. * @param {SpeedyGPU} gpu
  5399. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5400. * @returns {Uint8Array}
  5401. */
  5402. inspect(gpu, textureReader) {
  5403. if (textureReader === undefined) {
  5404. textureReader = new SpeedyTextureReader();
  5405. textureReader.init(gpu);
  5406. const pixels = textureReader.readPixelsSync(this);
  5407. textureReader.release(gpu);
  5408. return new Uint8Array(pixels); // copy the array
  5409. } else {
  5410. const pixels = textureReader.readPixelsSync(this);
  5411. return new Uint8Array(pixels);
  5412. }
  5413. }
  5414. /**
  5415. * Inspect the pixels of the texture as unsigned 32-bit integers
  5416. * @param {SpeedyGPU} gpu
  5417. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5418. * @returns {Uint32Array}
  5419. */
  5420. inspect32(gpu, textureReader) {
  5421. utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
  5422. return new Uint32Array(this.inspect(gpu, textureReader).buffer);
  5423. }
  5424. /**
  5425. * Create a FBO associated with an existing texture
  5426. * @param {WebGL2RenderingContext} gl
  5427. * @param {WebGLTexture} texture
  5428. * @returns {WebGLFramebuffer}
  5429. */
  5430. static _createFramebuffer(gl, texture) {
  5431. const fbo = gl.createFramebuffer();
  5432. // setup framebuffer
  5433. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5434. gl.framebufferTexture2D(gl.FRAMEBUFFER,
  5435. // target
  5436. gl.COLOR_ATTACHMENT0,
  5437. // color buffer
  5438. gl.TEXTURE_2D,
  5439. // tex target
  5440. texture,
  5441. // texture
  5442. 0); // mipmap level
  5443. // check for errors
  5444. const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
  5445. if (status != gl.FRAMEBUFFER_COMPLETE) {
  5446. const error = (() => ['FRAMEBUFFER_UNSUPPORTED', 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS', 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'].filter(err => gl[err] === status)[0] || 'unknown error')();
  5447. throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
  5448. }
  5449. // unbind & return
  5450. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5451. return fbo;
  5452. }
  5453. /**
  5454. * Copy data from a framebuffer to a texture
  5455. * @param {WebGL2RenderingContext} gl
  5456. * @param {WebGLFramebuffer} fbo we'll read the data from this
  5457. * @param {WebGLTexture} texture destination texture
  5458. * @param {GLint} x xpos (where to start copying)
  5459. * @param {GLint} y ypos (where to start copying)
  5460. * @param {GLsizei} width width of the texture
  5461. * @param {GLsizei} height height of the texture
  5462. * @param {GLint} [lod] mipmap level-of-detail
  5463. * @returns {WebGLTexture} texture
  5464. */
  5465. static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0) {
  5466. //gl.activeTexture(gl.TEXTURE0);
  5467. gl.bindTexture(gl.TEXTURE_2D, texture);
  5468. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5469. gl.copyTexSubImage2D(gl.TEXTURE_2D,
  5470. // target
  5471. lod,
  5472. // mipmap level
  5473. 0,
  5474. // xoffset
  5475. 0,
  5476. // yoffset
  5477. x,
  5478. // xpos (where to start copying)
  5479. y,
  5480. // ypos (where to start copying)
  5481. width,
  5482. // width of the texture
  5483. height // height of the texture
  5484. );
  5485. /*
  5486. gl.copyTexImage2D(
  5487. gl.TEXTURE_2D, // target
  5488. lod, // mipmap level
  5489. gl.RGBA, // internal format
  5490. x, // xpos (where to start copying)
  5491. y, // ypos (where to start copying)
  5492. width, // width of the texture
  5493. height, // height of the texture
  5494. 0 // border
  5495. );
  5496. */
  5497. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5498. gl.bindTexture(gl.TEXTURE_2D, null);
  5499. return texture;
  5500. }
  5501. }
  5502. // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
  5503. var shader_declaration = __nested_webpack_require_314174__(9420);
  5504. ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
  5505. /*
  5506. * speedy-vision.js
  5507. * GPU-accelerated Computer Vision for JavaScript
  5508. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  5509. *
  5510. * Licensed under the Apache License, Version 2.0 (the "License");
  5511. * you may not use this file except in compliance with the License.
  5512. * You may obtain a copy of the License at
  5513. *
  5514. * http://www.apache.org/licenses/LICENSE-2.0
  5515. *
  5516. * Unless required by applicable law or agreed to in writing, software
  5517. * distributed under the License is distributed on an "AS IS" BASIS,
  5518. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5519. * See the License for the specific language governing permissions and
  5520. * limitations under the License.
  5521. *
  5522. * speedy-program.js
  5523. * SpeedyProgram class
  5524. */
  5525. /** @const {Object<string,string>} Map uniform type to a gl function */
  5526. const UNIFORM_SETTERS = Object.freeze({
  5527. 'sampler2D': 'uniform1i',
  5528. 'isampler2D': 'uniform1i',
  5529. 'usampler2D': 'uniform1i',
  5530. 'float': 'uniform1f',
  5531. 'int': 'uniform1i',
  5532. 'uint': 'uniform1ui',
  5533. 'bool': 'uniform1i',
  5534. 'vec2': 'uniform2f',
  5535. 'vec3': 'uniform3f',
  5536. 'vec4': 'uniform4f',
  5537. 'ivec2': 'uniform2i',
  5538. 'ivec3': 'uniform3i',
  5539. 'ivec4': 'uniform4i',
  5540. 'uvec2': 'uniform2ui',
  5541. 'uvec3': 'uniform3ui',
  5542. 'uvec4': 'uniform4ui',
  5543. 'bvec2': 'uniform2i',
  5544. 'bvec3': 'uniform3i',
  5545. 'bvec4': 'uniform4i',
  5546. 'mat2': 'uniformMatrix2fv',
  5547. 'mat3': 'uniformMatrix3fv',
  5548. 'mat4': 'uniformMatrix4fv'
  5549. });
  5550. /**
  5551. * @typedef {object} SpeedyProgramOptions
  5552. * @property {boolean} [renderToTexture] render results to a texture?
  5553. * @property {boolean} [pingpong] alternate output texture between calls
  5554. */
  5555. /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
  5556. /**
  5557. * A SpeedyProgram is a Function that runs GLSL code
  5558. */
  5559. class SpeedyProgram extends Function {
  5560. /**
  5561. * Creates a new SpeedyProgram
  5562. * @param {WebGL2RenderingContext} gl WebGL context
  5563. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5564. * @param {SpeedyProgramOptions} [options] user options
  5565. */
  5566. constructor(gl, shaderdecl, options = {}) {
  5567. super('...args', 'return this._self._call(...args)');
  5568. /** @type {SpeedyProgram} this function bound to this function! */
  5569. this._self = this.bind(this);
  5570. this._self._init(gl, shaderdecl, options);
  5571. return this._self;
  5572. }
  5573. /**
  5574. * Initialize the SpeedyProgram
  5575. * @param {WebGL2RenderingContext} gl WebGL context
  5576. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5577. * @param {SpeedyProgramOptions} options user options
  5578. */
  5579. _init(gl, shaderdecl, options) {
  5580. // not a valid context?
  5581. if (gl.isContextLost()) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
  5582. // options object
  5583. options = Object.assign({
  5584. // default options
  5585. renderToTexture: true,
  5586. pingpong: false
  5587. }, options);
  5588. /** @type {WebGL2RenderingContext} */
  5589. this._gl = gl;
  5590. /** @type {WebGLProgram} vertex shader + fragment shader */
  5591. this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
  5592. /** @type {ProgramGeometry} this is a quad */
  5593. this._geometry = new ProgramGeometry(gl, {
  5594. position: shaderdecl.locationOfAttributes.position,
  5595. texCoord: shaderdecl.locationOfAttributes.texCoord
  5596. });
  5597. /** @type {string[]} names of the arguments of the SpeedyProgram */
  5598. this._argnames = shaderdecl.arguments;
  5599. /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
  5600. this._argIsArray = new Array(this._argnames.length).fill(false);
  5601. /** @type {UBOHelper} UBO helper (lazy instantiation) */
  5602. this._ubo = null;
  5603. /** @type {boolean} should we render to a texture? If false, we render to the canvas */
  5604. this._renderToTexture = Boolean(options.renderToTexture);
  5605. /** @type {number} width of the output */
  5606. this._width = 1;
  5607. /** @type {number} height of the output */
  5608. this._height = 1;
  5609. /** @type {[number,number]} cached object that stores the size of the output */
  5610. this._size = [1, 1];
  5611. /** @type {SpeedyDrawableTexture[]} output texture(s) */
  5612. this._texture = new Array(options.pingpong ? 2 : 1).fill(null);
  5613. /** @type {number} used for pingpong rendering */
  5614. this._textureIndex = 0;
  5615. /** @type {Map<string,UniformVariable>} uniform variables */
  5616. this._uniform = new Map();
  5617. /** @type {ShaderDeclaration} shader declaration */
  5618. this._shaderdecl = shaderdecl;
  5619. // autodetect uniforms
  5620. gl.useProgram(this._program);
  5621. for (const name of shaderdecl.uniforms) {
  5622. const type = shaderdecl.uniformType(name);
  5623. const location = gl.getUniformLocation(this._program, name);
  5624. this._uniform.set(name, new UniformVariable(type, location));
  5625. }
  5626. // match arguments & uniforms
  5627. for (let j = 0; j < this._argnames.length; j++) {
  5628. const argname = this._argnames[j];
  5629. if (!this._uniform.has(argname)) {
  5630. this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
  5631. if (!this._argIsArray[j]) throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
  5632. }
  5633. }
  5634. }
  5635. /**
  5636. * Run the SpeedyProgram
  5637. * @param {...SpeedyProgramUniformValue} args
  5638. * @returns {SpeedyDrawableTexture}
  5639. */
  5640. _call(...args) {
  5641. const gl = this._gl;
  5642. const argnames = this._argnames;
  5643. const texture = this._texture[this._textureIndex];
  5644. // matching arguments?
  5645. if (args.length != argnames.length) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
  5646. // can't use the output texture as an input
  5647. /*
  5648. // slower method
  5649. const flatArgs = Utils.flatten(args);
  5650. for(let j = flatArgs.length - 1; j >= 0; j--) {
  5651. if(flatArgs[j] === this._texture[this._textureIndex])
  5652. throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5653. }
  5654. */
  5655. for (let j = args.length - 1; j >= 0; j--) {
  5656. if (args[j] === texture) throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5657. // else if(Array.isArray(args[j])) ...
  5658. // we don't support passing arrays of textures at the time of this writing
  5659. }
  5660. // context loss?
  5661. if (gl.isContextLost()) return texture;
  5662. // use program
  5663. gl.useProgram(this._program);
  5664. // bind the VAO
  5665. gl.bindVertexArray(this._geometry.vao);
  5666. // select the render target
  5667. const fbo = this._renderToTexture ? texture.glFbo : null;
  5668. // update texSize uniform (available in all fragment shaders)
  5669. const texSize = this._uniform.get('texSize');
  5670. this._size[0] = this._width;
  5671. this._size[1] = this._height;
  5672. texSize.setValue(gl, this._size);
  5673. // set uniforms[i] to args[i]
  5674. for (let i = 0, texNo = 0; i < args.length; i++) {
  5675. const argname = argnames[i];
  5676. if (!this._argIsArray[i]) {
  5677. // uniform variable matches argument name
  5678. const uniform = this._uniform.get(argname);
  5679. texNo = uniform.setValue(gl, args[i], texNo);
  5680. } else {
  5681. // uniform array matches argument name
  5682. const array = args[i];
  5683. if (Array.isArray(array)) {
  5684. if (this._uniform.has(indexedVariable(argname, array.length))) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
  5685. for (let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++) texNo = uniform.setValue(gl, array[j], texNo);
  5686. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
  5687. }
  5688. }
  5689. // set Uniform Buffer Objects (if any)
  5690. if (this._ubo !== null) this._ubo.update();
  5691. // bind the FBO
  5692. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5693. // draw call
  5694. gl.viewport(0, 0, this._width, this._height);
  5695. gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
  5696. // unbind the FBO
  5697. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5698. // unbind the VAO
  5699. gl.bindVertexArray(null);
  5700. // we've just changed the texture! discard the pyramid, if any
  5701. if (texture != null) texture.discardMipmaps();
  5702. // ping-pong rendering
  5703. this._pingpong();
  5704. // done!
  5705. return texture;
  5706. }
  5707. /**
  5708. * Set the output texture(s) and its (their) shape(s)
  5709. * @param {number} width new width, in pixels
  5710. * @param {number} height new height, in pixels
  5711. * @param {...SpeedyDrawableTexture|null} texture output texture(s)
  5712. * @returns {SpeedyProgram} this
  5713. */
  5714. outputs(width, height, ...texture) {
  5715. this._setOutputTexture(...texture);
  5716. this._setOutputSize(width, height);
  5717. return this;
  5718. }
  5719. /**
  5720. * Set the size of the output
  5721. * @param {number} width new width, in pixels
  5722. * @param {number} height new height, in pixels
  5723. * @returns {SpeedyProgram} this
  5724. */
  5725. _setOutputSize(width, height) {
  5726. utils/* Utils */.A.assert(width > 0 && height > 0);
  5727. // update output size
  5728. this._width = width | 0;
  5729. this._height = height | 0;
  5730. // resize the output texture(s)
  5731. for (let i = 0; i < this._texture.length; i++) {
  5732. if (this._texture[i] != null) this._texture[i].resize(this._width, this._height);
  5733. }
  5734. // done!
  5735. return this;
  5736. }
  5737. /**
  5738. * Use the provided texture(s) as output
  5739. * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
  5740. * @returns {SpeedyProgram} this
  5741. */
  5742. _setOutputTexture(...texture) {
  5743. utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
  5744. // update output texture(s)
  5745. for (let i = 0; i < this._texture.length; i++) this._texture[i] = texture[i];
  5746. this._textureIndex = 0;
  5747. // done!
  5748. return this;
  5749. }
  5750. /**
  5751. * Clear the internal textures
  5752. * @returns {SpeedyDrawableTexture}
  5753. */
  5754. clear() {
  5755. const texture = this._texture[this._textureIndex];
  5756. // clear internal textures
  5757. for (let i = 0; i < this._texture.length; i++) this._texture[i].clear();
  5758. // ping-pong rendering
  5759. this._pingpong();
  5760. // done!
  5761. return texture;
  5762. }
  5763. /**
  5764. * Set data using a Uniform Buffer Object
  5765. * @param {string} blockName uniform block name
  5766. * @param {ArrayBufferView} data
  5767. * @returns {SpeedyProgram} this
  5768. */
  5769. setUBO(blockName, data) {
  5770. if (this._ubo === null) this._ubo = new UBOHelper(this._gl, this._program);
  5771. this._ubo.set(blockName, data);
  5772. return this;
  5773. }
  5774. /**
  5775. * Release the resources associated with this SpeedyProgram
  5776. * @returns {null}
  5777. */
  5778. release() {
  5779. const gl = this._gl;
  5780. // Release UBOs (if any)
  5781. if (this._ubo != null) this._ubo = this._ubo.release();
  5782. // Unlink textures
  5783. this._texture.fill(null);
  5784. // Release geometry
  5785. this._geometry = this._geometry.release();
  5786. // Release program
  5787. gl.deleteProgram(this._program);
  5788. this._program = null;
  5789. // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
  5790. // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
  5791. // will automatically be marked for deletion when the JS object is
  5792. // destroyed (i.e., garbage collected)
  5793. // done!
  5794. return null;
  5795. }
  5796. /**
  5797. * A constant #defined in the shader declaration
  5798. * @param {string} name
  5799. * @returns {number}
  5800. */
  5801. definedConstant(name) {
  5802. return this._shaderdecl.definedConstant(name);
  5803. }
  5804. /**
  5805. * Helper method for pingpong rendering: alternates
  5806. * the texture index from 0 to 1 and vice-versa
  5807. */
  5808. _pingpong() {
  5809. if (this._texture.length > 1) this._textureIndex = 1 - this._textureIndex;
  5810. }
  5811. /**
  5812. * Compile and link GLSL shaders
  5813. * @param {WebGL2RenderingContext} gl
  5814. * @param {string} vertexShaderSource GLSL code of the vertex shader
  5815. * @param {string} fragmentShaderSource GLSL code of the fragment shader
  5816. * @returns {WebGLProgram}
  5817. */
  5818. static _compile(gl, vertexShaderSource, fragmentShaderSource) {
  5819. const program = gl.createProgram();
  5820. const vertexShader = gl.createShader(gl.VERTEX_SHADER);
  5821. const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
  5822. // compile vertex shader
  5823. gl.shaderSource(vertexShader, vertexShaderSource);
  5824. gl.compileShader(vertexShader);
  5825. gl.attachShader(program, vertexShader);
  5826. // compile fragment shader
  5827. gl.shaderSource(fragmentShader, fragmentShaderSource);
  5828. gl.compileShader(fragmentShader);
  5829. gl.attachShader(program, fragmentShader);
  5830. // link program
  5831. gl.linkProgram(program);
  5832. gl.validateProgram(program);
  5833. // return on success
  5834. if (gl.getProgramParameter(program, gl.LINK_STATUS)) return program;
  5835. // display an error
  5836. const errors = [gl.getShaderInfoLog(fragmentShader), gl.getShaderInfoLog(vertexShader), gl.getProgramInfoLog(program)];
  5837. gl.deleteProgram(program);
  5838. gl.deleteShader(fragmentShader);
  5839. gl.deleteShader(vertexShader);
  5840. // display error
  5841. const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
  5842. const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
  5843. const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
  5844. const formattedSource = source.split('\n').map((line, no) => col(1 + no) + line).join('\n');
  5845. throw new utils_errors/* GLError */.wB(`\n\n---------- ERROR ----------\n\n` + errors.filter(err => err).join('\n') + `\n\n---------- SOURCE CODE ----------\n\n` + formattedSource + '\n');
  5846. }
  5847. }
  5848. // ============================================================================
  5849. // HELPERS
  5850. // ============================================================================
  5851. /**
  5852. * Configure and store the VAO and the VBOs
  5853. * @param {WebGL2RenderingContext} gl
  5854. * @param {LocationOfAttributes} location
  5855. * @returns {ProgramGeometry}
  5856. *
  5857. * @typedef {Object} LocationOfAttributes
  5858. * @property {number} position
  5859. * @property {number} texCoord
  5860. *
  5861. * @typedef {Object} BufferOfAttributes
  5862. * @property {WebGLBuffer} position
  5863. * @property {WebGLBuffer} texCoord
  5864. */
  5865. function ProgramGeometry(gl, location) {
  5866. /** @type {WebGLVertexArrayObject} Vertex Array Object */
  5867. this.vao = gl.createVertexArray();
  5868. /** @type {BufferOfAttributes} Vertex Buffer Objects */
  5869. this.vbo = Object.freeze({
  5870. position: gl.createBuffer(),
  5871. texCoord: gl.createBuffer()
  5872. });
  5873. /** @type {WebGL2RenderingContext} */
  5874. this._gl = gl;
  5875. // bind the VAO
  5876. gl.bindVertexArray(this.vao);
  5877. // set the position attribute
  5878. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
  5879. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5880. // clip coordinates (CCW)
  5881. -1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
  5882. gl.enableVertexAttribArray(location.position);
  5883. gl.vertexAttribPointer(location.position,
  5884. // attribute location
  5885. 2,
  5886. // 2 components per vertex (x,y)
  5887. gl.FLOAT,
  5888. // type
  5889. false,
  5890. // don't normalize
  5891. 0,
  5892. // default stride (tightly packed)
  5893. 0); // offset
  5894. // set the texCoord attribute
  5895. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
  5896. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5897. // texture coordinates (CCW)
  5898. 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);
  5899. gl.enableVertexAttribArray(location.texCoord);
  5900. gl.vertexAttribPointer(location.texCoord,
  5901. // attribute location
  5902. 2,
  5903. // 2 components per vertex (x,y)
  5904. gl.FLOAT,
  5905. // type
  5906. false,
  5907. // don't normalize
  5908. 0,
  5909. // default stride (tightly packed)
  5910. 0); // offset
  5911. // unbind
  5912. gl.bindBuffer(gl.ARRAY_BUFFER, null);
  5913. gl.bindVertexArray(null);
  5914. // done!
  5915. return Object.freeze(this);
  5916. }
  5917. /**
  5918. * Releases the internal resources
  5919. * @returns {null}
  5920. */
  5921. ProgramGeometry.prototype.release = function () {
  5922. const gl = this._gl;
  5923. gl.deleteVertexArray(this.vao);
  5924. gl.deleteBuffer(this.vbo.position);
  5925. gl.deleteBuffer(this.vbo.texCoord);
  5926. return null;
  5927. };
  5928. /**
  5929. * Helper class for storing data in GLSL uniform variables
  5930. * @param {string} type
  5931. * @param {WebGLUniformLocation} location
  5932. */
  5933. function UniformVariable(type, location) {
  5934. /** @type {string} GLSL data type */
  5935. this.type = String(type);
  5936. if (!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
  5937. /** @type {WebGLUniformLocation} uniform location in a WebGL program */
  5938. this.location = location;
  5939. /** @type {string} setter function */
  5940. this.setter = UNIFORM_SETTERS[this.type];
  5941. const n = Number(this.setter.match(/^uniform(Matrix)?(\d)/)[2]) | 0;
  5942. /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
  5943. this.dim = this.type.startsWith('mat') ? 2 : this.type.indexOf('vec') >= 0 ? 1 : 0;
  5944. /** @type {number} required number of scalars */
  5945. this.length = this.dim == 2 ? n * n : n;
  5946. /** @type {SpeedyProgramUniformValue|null} cached value */
  5947. this._value = null;
  5948. }
  5949. /**
  5950. * Set the value of a uniform variable
  5951. * @param {WebGL2RenderingContext} gl
  5952. * @param {SpeedyProgramUniformValue} value use column-major format for matrices
  5953. * @param {number} [texNo] current texture index
  5954. * @returns {number} new texture index
  5955. */
  5956. UniformVariable.prototype.setValue = function (gl, value, texNo = -1) {
  5957. const setValue = /** @type {Function} */gl[this.setter];
  5958. // check uniform type
  5959. if (typeof value === 'object' && this.type.endsWith('sampler2D')) {
  5960. // set texture
  5961. if (texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS) throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);else if (Array.isArray(value)) throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);else if (value == null) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);else if (texNo < 0) throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
  5962. const tex = value;
  5963. gl.activeTexture(gl.TEXTURE0 + texNo);
  5964. gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
  5965. gl.uniform1i(this.location, texNo);
  5966. texNo++;
  5967. } else if (value === this._value && typeof value !== 'object') {
  5968. // do not update the uniform if it hasn't changed
  5969. // note that value may be an array whose entries may have been updated
  5970. void 0;
  5971. } else if (typeof value === 'number' || typeof value === 'boolean') {
  5972. // set scalar value
  5973. setValue.call(gl, this.location, value);
  5974. } else if (Array.isArray(value)) {
  5975. // set vector or matrix
  5976. if (value.length === this.length) {
  5977. if (this.dim == 2) setValue.call(gl, this.location, false, value); // matrix
  5978. else setValue.call(gl, this.location, ...value); // vector
  5979. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
  5980. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
  5981. // cache the value
  5982. this._value = value;
  5983. // done
  5984. return texNo;
  5985. };
  5986. /**
  5987. * @typedef {object} UBOStuff
  5988. * @property {WebGLBuffer} buffer
  5989. * @property {number} blockBindingIndex "global" binding index
  5990. * @property {number} blockIndex UBO "location" in the program
  5991. * @property {ArrayBufferView|null} data user-data
  5992. */
  5993. /**
  5994. * A helper class for handling Uniform Buffer Objects (UBOs)
  5995. * @param {WebGL2RenderingContext} gl
  5996. * @param {WebGLProgram} program
  5997. */
  5998. function UBOHelper(gl, program) {
  5999. /** @type {WebGL2RenderingContext} */
  6000. this._gl = gl;
  6001. /** @type {WebGLProgram} */
  6002. this._program = program;
  6003. /** @type {number} auto-increment counter */
  6004. this._nextIndex = 0;
  6005. /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
  6006. this._ubo = Object.create(null);
  6007. }
  6008. /**
  6009. * Set Uniform Buffer Object data
  6010. * (the buffer will be uploaded when the program is executed)
  6011. * @param {string} name uniform block name
  6012. * @param {ArrayBufferView} data
  6013. */
  6014. UBOHelper.prototype.set = function (name, data) {
  6015. const gl = this._gl;
  6016. // create UBO entry
  6017. if (this._ubo[name] === undefined) {
  6018. this._ubo[name] = {
  6019. buffer: gl.createBuffer(),
  6020. blockBindingIndex: this._nextIndex++,
  6021. blockIndex: -1,
  6022. data: null
  6023. };
  6024. }
  6025. // get UBO entry for the given block name
  6026. const ubo = this._ubo[name];
  6027. // read block index & assign binding point
  6028. if (ubo.blockIndex < 0) {
  6029. const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
  6030. gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
  6031. ubo.blockIndex = blockIndex;
  6032. }
  6033. // store the data - we'll upload it later
  6034. ubo.data = data;
  6035. };
  6036. /**
  6037. * Update UBO data
  6038. * Called when we're using the appropriate WebGLProgram
  6039. */
  6040. UBOHelper.prototype.update = function () {
  6041. const gl = this._gl;
  6042. for (const name in this._ubo) {
  6043. const ubo = this._ubo[name];
  6044. gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
  6045. gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
  6046. gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
  6047. gl.bindBuffer(gl.UNIFORM_BUFFER, null);
  6048. }
  6049. };
  6050. /**
  6051. * Release allocated buffers
  6052. * @returns {null}
  6053. */
  6054. UBOHelper.prototype.release = function () {
  6055. const gl = this._gl;
  6056. for (const name in this._ubo) {
  6057. const ubo = this._ubo[name];
  6058. gl.deleteBuffer(ubo.buffer);
  6059. ubo.data = null;
  6060. }
  6061. return null;
  6062. };
  6063. /**
  6064. * Generates an indexed variable name, as in variable[index]
  6065. * @param {string} variable
  6066. * @param {number} index
  6067. * @returns {string} variable[index]
  6068. */
  6069. function indexedVariable(variable, index) {
  6070. //return `${variable}[${index}]`; // no caching
  6071. // is this cache lookup really faster than string concatenation?
  6072. // what about memory consumption?
  6073. const cache = indexedVariable.cache;
  6074. let nameList = cache.get(variable);
  6075. if (nameList === undefined) cache.set(variable, nameList = []);
  6076. if (nameList[index] === undefined) nameList[index] = `${variable}[${index}]`;
  6077. return nameList[index];
  6078. }
  6079. /** @type {Map<string,string[]>} cached argument names */
  6080. indexedVariable.cache = new Map(); // Object.create(null)
  6081. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
  6082. /*
  6083. * speedy-vision.js
  6084. * GPU-accelerated Computer Vision for JavaScript
  6085. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6086. *
  6087. * Licensed under the Apache License, Version 2.0 (the "License");
  6088. * you may not use this file except in compliance with the License.
  6089. * You may obtain a copy of the License at
  6090. *
  6091. * http://www.apache.org/licenses/LICENSE-2.0
  6092. *
  6093. * Unless required by applicable law or agreed to in writing, software
  6094. * distributed under the License is distributed on an "AS IS" BASIS,
  6095. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6096. * See the License for the specific language governing permissions and
  6097. * limitations under the License.
  6098. *
  6099. * speedy-program-group.js
  6100. * An abstract group of programs that run on the GPU
  6101. */
  6102. /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
  6103. /**
  6104. * @typedef {object} SpeedyProgramHelpers
  6105. * @property {function(): SpeedyProgramOptions} usesPingpongRendering
  6106. * @property {function(): SpeedyProgramOptions} rendersToCanvas
  6107. */
  6108. /** @const {SpeedyProgramHelpers} Program settings generator */
  6109. const PROGRAM_HELPERS = Object.freeze({
  6110. /**
  6111. * Pingpong Rendering: the output texture of a
  6112. * program cannot be used as an input to itself.
  6113. * This is a convenient helper in these situations
  6114. * @returns {SpeedyProgramOptions}
  6115. */
  6116. usesPingpongRendering() {
  6117. return {
  6118. pingpong: true
  6119. };
  6120. },
  6121. /**
  6122. * Render to canvas
  6123. * Use it when we're supposed to see the texture
  6124. * @returns {SpeedyProgramOptions}
  6125. */
  6126. rendersToCanvas() {
  6127. return {
  6128. renderToTexture: false
  6129. };
  6130. }
  6131. });
  6132. /**
  6133. * SpeedyProgramGroup
  6134. * A semantically correlated group
  6135. * of programs that run on the GPU
  6136. * @abstract
  6137. */
  6138. class SpeedyProgramGroup {
  6139. /**
  6140. * Class constructor
  6141. * @protected
  6142. * @param {SpeedyGPU} gpu
  6143. */
  6144. constructor(gpu) {
  6145. /** @type {SpeedyGPU} GPU-accelerated routines */
  6146. this._gpu = gpu;
  6147. /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
  6148. this._programs = [];
  6149. }
  6150. /**
  6151. * Declare a program
  6152. * @protected
  6153. * @param {string} name Program name
  6154. * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
  6155. * @param {SpeedyProgramOptions} [options] Program settings
  6156. * @returns {this}
  6157. */
  6158. declare(name, builder, options = {}) {
  6159. // lazy instantiation of kernels
  6160. Object.defineProperty(this, name, {
  6161. get: (() => {
  6162. // Why cast a symbol to symbol?
  6163. // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
  6164. const key = /** @type {symbol} */Symbol(name);
  6165. return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
  6166. })()
  6167. });
  6168. return this;
  6169. }
  6170. /**
  6171. * Neat helpers to be used when declaring programs
  6172. * @returns {SpeedyProgramHelpers}
  6173. */
  6174. get program() {
  6175. return PROGRAM_HELPERS;
  6176. }
  6177. /**
  6178. * Releases all programs from this group
  6179. * @returns {null}
  6180. */
  6181. release() {
  6182. for (let i = 0; i < this._programs.length; i++) this._programs[i].release();
  6183. return null;
  6184. }
  6185. /**
  6186. * Spawn a SpeedyProgram
  6187. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6188. * @param {SpeedyProgramOptions} [options] Program settings
  6189. * @returns {SpeedyProgram}
  6190. */
  6191. _createProgram(shaderdecl, options = {}) {
  6192. const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
  6193. this._programs.push(program);
  6194. return program;
  6195. }
  6196. }
  6197. ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
  6198. /*
  6199. * speedy-vision.js
  6200. * GPU-accelerated Computer Vision for JavaScript
  6201. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6202. *
  6203. * Licensed under the Apache License, Version 2.0 (the "License");
  6204. * you may not use this file except in compliance with the License.
  6205. * You may obtain a copy of the License at
  6206. *
  6207. * http://www.apache.org/licenses/LICENSE-2.0
  6208. *
  6209. * Unless required by applicable law or agreed to in writing, software
  6210. * distributed under the License is distributed on an "AS IS" BASIS,
  6211. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6212. * See the License for the specific language governing permissions and
  6213. * limitations under the License.
  6214. *
  6215. * utils.js
  6216. * GPU utilities
  6217. */
  6218. //
  6219. // Shaders
  6220. //
  6221. // Copy image
  6222. const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
  6223. // Copy keypoints
  6224. const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6225. 'TYPE': 1
  6226. }).withArguments('image');
  6227. // Copy 2D vectors
  6228. const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6229. 'TYPE': 2
  6230. }).withArguments('image');
  6231. // Flip y-axis for output
  6232. const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
  6233. // Fill image with a constant
  6234. const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
  6235. // Fill zero or more color components of the input image with a constant value
  6236. const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
  6237. // Copy the src component of src to zero or more color components of a copy of dest
  6238. const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
  6239. // Scan the entire image and find the minimum & maximum pixel intensity
  6240. const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
  6241. // Compute the partial derivatives of an image
  6242. const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
  6243. /**
  6244. * SpeedyProgramGroupUtils
  6245. * Utility operations
  6246. */
  6247. class SpeedyProgramGroupUtils extends SpeedyProgramGroup {
  6248. /**
  6249. * Class constructor
  6250. * @param {SpeedyGPU} gpu
  6251. */
  6252. constructor(gpu) {
  6253. super(gpu);
  6254. this
  6255. // render to the canvas
  6256. .declare('renderToCanvas', flipY, Object.assign({}, this.program.rendersToCanvas()))
  6257. // copy image
  6258. .declare('copy', copy)
  6259. // copy keypoints
  6260. .declare('copyKeypoints', copyKeypoints)
  6261. // copy 2D vectors
  6262. .declare('copy2DVectors', copy2DVectors)
  6263. // Fill image with a constant
  6264. .declare('fill', fill)
  6265. // Fill zero or more color components of the input image with a constant value
  6266. .declare('fillComponents', fillComponents)
  6267. // Copy the src component of src to zero or more color components of a copy of dest
  6268. .declare('copyComponents', copyComponents)
  6269. // find minimum & maximum pixel intensity
  6270. .declare('scanMinMax2D', scanMinMax2D, Object.assign({}, this.program.usesPingpongRendering()))
  6271. // Compute the partial derivatives of an image
  6272. .declare('sobelDerivatives', sobelDerivatives);
  6273. }
  6274. }
  6275. // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
  6276. var convolution = __nested_webpack_require_314174__(1672);
  6277. ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
  6278. /*
  6279. * speedy-vision.js
  6280. * GPU-accelerated Computer Vision for JavaScript
  6281. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6282. *
  6283. * Licensed under the Apache License, Version 2.0 (the "License");
  6284. * you may not use this file except in compliance with the License.
  6285. * You may obtain a copy of the License at
  6286. *
  6287. * http://www.apache.org/licenses/LICENSE-2.0
  6288. *
  6289. * Unless required by applicable law or agreed to in writing, software
  6290. * distributed under the License is distributed on an "AS IS" BASIS,
  6291. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6292. * See the License for the specific language governing permissions and
  6293. * limitations under the License.
  6294. *
  6295. * filters.js
  6296. * Image filtering on the GPU
  6297. */
  6298. //
  6299. // Shaders
  6300. //
  6301. // Convert to greyscale
  6302. const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl').withArguments('image');
  6303. // Convolution
  6304. const filters_convolution = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl').withDefines({
  6305. 'KERNEL_SIZE_SQUARED': ksize * ksize
  6306. }).withArguments('image', 'kernel'), obj), {});
  6307. // Separable convolution
  6308. const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6309. 'KERNEL_SIZE': ksize,
  6310. 'AXIS': 0
  6311. }).withArguments('image', 'kernel'), obj), {});
  6312. const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6313. 'KERNEL_SIZE': ksize,
  6314. 'AXIS': 1
  6315. }).withArguments('image', 'kernel'), obj), {});
  6316. // Median filter
  6317. const median = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl').withDefines({
  6318. 'KERNEL_SIZE': ksize
  6319. }).withArguments('image'), obj), {});
  6320. // Normalize image
  6321. const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6322. 'GREYSCALE': 1
  6323. }).withArguments('minmax2d', 'minValue', 'maxValue');
  6324. const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6325. 'GREYSCALE': 0
  6326. }).withArguments('minmax2dRGB', 'minValue', 'maxValue');
  6327. // Nightvision
  6328. const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6329. 'GREYSCALE': 0
  6330. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6331. const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6332. 'GREYSCALE': 1
  6333. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6334. //
  6335. // Utilities
  6336. //
  6337. // Handy conversion for Gaussian filters
  6338. // (symmetric kernel, approx. zero after 3*sigma)
  6339. const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
  6340. // Generate a 1D Gaussian kernel
  6341. const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
  6342. // Generate a 1D Box filter
  6343. const box = ksize => new Array(ksize).fill(1.0 / ksize);
  6344. /**
  6345. * SpeedyProgramGroupFilters
  6346. * Image filtering
  6347. */
  6348. class SpeedyProgramGroupFilters extends SpeedyProgramGroup {
  6349. /**
  6350. * Class constructor
  6351. * @param {SpeedyGPU} gpu
  6352. */
  6353. constructor(gpu) {
  6354. super(gpu);
  6355. this
  6356. // convert to greyscale
  6357. .declare('rgb2grey', rgb2grey)
  6358. // median filters
  6359. .declare('median3', median[3]) // 3x3 window
  6360. .declare('median5', median[5]) // 5x5 window
  6361. .declare('median7', median[7]) // 7x7 window
  6362. // 2D convolution
  6363. .declare('convolution3', filters_convolution[3]) // 3x3 kernel
  6364. .declare('convolution5', filters_convolution[5]) // 5x5 kernel
  6365. .declare('convolution7', filters_convolution[7]) // 7x7 kernel
  6366. // 1D separable convolution
  6367. .declare('convolution3x', convolutionX[3]) // 1x3 kernel
  6368. .declare('convolution3y', convolutionY[3]) // 3x1 kernel
  6369. .declare('convolution5x', convolutionX[5]) // 1x5 kernel
  6370. .declare('convolution5y', convolutionY[5]) // 5x1 kernel
  6371. .declare('convolution7x', convolutionX[7]).declare('convolution7y', convolutionY[7]).declare('convolution9x', convolutionX[9]).declare('convolution9y', convolutionY[9]).declare('convolution11x', convolutionX[11]).declare('convolution11y', convolutionY[11]).declare('convolution13x', convolutionX[13]).declare('convolution13y', convolutionY[13]).declare('convolution15x', convolutionX[15]).declare('convolution15y', convolutionY[15])
  6372. // normalize image
  6373. .declare('normalizeGreyscale', normalizeGreyscale).declare('normalizeColored', normalizeColored)
  6374. // nightvision
  6375. .declare('nightvision', nightvision).declare('nightvisionGreyscale', nightvisionGreyscale).declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255))).declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
  6376. // gaussian: separable kernels
  6377. // see also: http://dev.theomader.com/gaussian-kernel-calculator/
  6378. .declare('gaussian3x', (0,convolution.convX)([0.25, 0.5, 0.25])) // sigma ~ 1.0
  6379. .declare('gaussian3y', (0,convolution.convY)([0.25, 0.5, 0.25])).declare('gaussian5x', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])) // sigma ~ 1.0
  6380. .declare('gaussian5y', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('gaussian7x', (0,convolution.convX)(gaussian(7))).declare('gaussian7y', (0,convolution.convY)(gaussian(7))).declare('gaussian9x', (0,convolution.convX)(gaussian(9))).declare('gaussian9y', (0,convolution.convY)(gaussian(9))).declare('gaussian11x', (0,convolution.convX)(gaussian(11))).declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
  6381. // box filter: separable kernels
  6382. .declare('box3x', (0,convolution.convX)(box(3))).declare('box3y', (0,convolution.convY)(box(3))).declare('box5x', (0,convolution.convX)(box(5))).declare('box5y', (0,convolution.convY)(box(5))).declare('box7x', (0,convolution.convX)(box(7))).declare('box7y', (0,convolution.convY)(box(7))).declare('box9x', (0,convolution.convX)(box(9))).declare('box9y', (0,convolution.convY)(box(9))).declare('box11x', (0,convolution.convX)(box(11))).declare('box11y', (0,convolution.convY)(box(11)));
  6383. }
  6384. }
  6385. // EXTERNAL MODULE: ./src/core/speedy-namespace.js
  6386. var speedy_namespace = __nested_webpack_require_314174__(6634);
  6387. ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
  6388. /*
  6389. * speedy-vision.js
  6390. * GPU-accelerated Computer Vision for JavaScript
  6391. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6392. *
  6393. * Licensed under the Apache License, Version 2.0 (the "License");
  6394. * you may not use this file except in compliance with the License.
  6395. * You may obtain a copy of the License at
  6396. *
  6397. * http://www.apache.org/licenses/LICENSE-2.0
  6398. *
  6399. * Unless required by applicable law or agreed to in writing, software
  6400. * distributed under the License is distributed on an "AS IS" BASIS,
  6401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6402. * See the License for the specific language governing permissions and
  6403. * limitations under the License.
  6404. *
  6405. * speedy-descriptordb.js
  6406. * A database of binary descriptors in video memory
  6407. */
  6408. //
  6409. // A database of binary descriptors is a texture that stores
  6410. // a set of (descriptor: uint8_t[]) entries.
  6411. //
  6412. /** @type {number} we use RGBA8 textures to store the descriptors */
  6413. const DESCRIPTORDB_BYTESPERPIXEL = 4;
  6414. /** @type {number} texture size goes up to 16 MB */
  6415. const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
  6416. /**
  6417. * Utility for generating a database of binary descriptors in video memory
  6418. */
  6419. class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q {
  6420. /**
  6421. * Create a database of binary descriptors
  6422. * @param {SpeedyTexture} texture output texture
  6423. * @param {Uint8Array[]} descriptors binary descriptors
  6424. * @param {number} descriptorSize in bytes, a multiple of 4
  6425. * @returns {SpeedyTexture} texture
  6426. */
  6427. static create(texture, descriptors, descriptorSize) {
  6428. utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
  6429. const numberOfDescriptors = descriptors.length;
  6430. const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
  6431. // find an appropriate texture size
  6432. const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
  6433. const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
  6434. // setup texture parameters
  6435. const stride = 1 << log2stride;
  6436. const width = stride,
  6437. height = stride; // we use powers-of-two
  6438. // are we within storage capacity?
  6439. const capacity = width * height / pixelsPerDescriptor;
  6440. if (numberOfDescriptors > capacity) throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
  6441. // create texture data
  6442. const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
  6443. for (let i = 0; i < numberOfDescriptors; i++) {
  6444. const byteOffset = i * descriptorSize;
  6445. const descriptor = descriptors[i];
  6446. // validate input
  6447. utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
  6448. utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
  6449. // write data
  6450. data.set(descriptor, byteOffset);
  6451. }
  6452. // log data for further study
  6453. const MEGABYTE = 1048576;
  6454. const totalSize = numberOfDescriptors * descriptorSize;
  6455. utils/* Utils */.A.log(`Creating a ${width}x${height} database of ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors ` + `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`);
  6456. // upload to the texture
  6457. texture.resize(width, height);
  6458. texture.upload(data);
  6459. return texture;
  6460. }
  6461. }
  6462. ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
  6463. /*
  6464. * speedy-vision.js
  6465. * GPU-accelerated Computer Vision for JavaScript
  6466. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6467. *
  6468. * Licensed under the Apache License, Version 2.0 (the "License");
  6469. * you may not use this file except in compliance with the License.
  6470. * You may obtain a copy of the License at
  6471. *
  6472. * http://www.apache.org/licenses/LICENSE-2.0
  6473. *
  6474. * Unless required by applicable law or agreed to in writing, software
  6475. * distributed under the License is distributed on an "AS IS" BASIS,
  6476. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6477. * See the License for the specific language governing permissions and
  6478. * limitations under the License.
  6479. *
  6480. * speedy-lsh.js
  6481. * GPU-based LSH tables for fast matching of binary descriptors
  6482. */
  6483. /*
  6484. * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
  6485. * ------------------------------------------------
  6486. *
  6487. * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
  6488. * Indices of keypoint descriptors are stored in several tables, each with many
  6489. * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
  6490. * size to match the keypoints.
  6491. *
  6492. * Buckets in video memory may get full. Wouldn't it be cool if we could use a
  6493. * probabilistic approach to let us work within their storage capacity?
  6494. *
  6495. * Let there be n buckets in a table, each with storage capacity c (holding
  6496. * up to c elements). Buckets are numbered from 0 to n-1.
  6497. *
  6498. * We pick uniformly a random bucket to store a new element in the table. Let
  6499. * X be the chosen bucket. The probability that we'll store the new element in
  6500. * any particular bucket k is:
  6501. *
  6502. * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
  6503. *
  6504. * On average, each new element stored in the table inserts 1/n of an element
  6505. * in each bucket. If we add m new elements to the table, each bucket receives
  6506. * m/n elements, on average(*).
  6507. *
  6508. * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
  6509. * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
  6510. * addition, the expected value of (m Ik) is m * E(ik) = m/n.
  6511. *
  6512. * Now let Yi be the number of elements inserted in bucket i in m additions to
  6513. * the table. We model Yi as Poisson(m/n), since on average, m additions to
  6514. * the table result in m/n new elements being inserted in bucket i. Buckets
  6515. * are picked independently. Hence, for all i, the probability that we insert
  6516. * q elements in bucket i in m additions to the table is:
  6517. *
  6518. * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
  6519. *
  6520. * Given that each bucket has storage capacity c, we require Yi <= c with a
  6521. * high probability p (say, p = 0.99). This means that, in m additions, we
  6522. * don't want to exceed the capacity c with high probability. So, let us find
  6523. * a (large) value of m such that:
  6524. *
  6525. * P(Yi <= c) >= p
  6526. *
  6527. * Sounds good! We can find the largest matching m using binary search.
  6528. *
  6529. * I don't think we need to enforce a high probability that ALL buckets stay
  6530. * within their capacity - n is large, we need to use the available space, and
  6531. * we have multiple tables anyway.
  6532. *
  6533. * In practice, the assumption that buckets are picked uniformly doesn't hold:
  6534. * keypoints that are nearby tend to have similar descriptors and buckets are
  6535. * picked according to those descriptors. Still, this model works well enough
  6536. * in practice and it is simple! That's what I like about it!
  6537. *
  6538. * ... now, how I actually do the matching is the theme of the next episode!
  6539. */
  6540. /** @type {number} Default number of tables in a LSH data structure */
  6541. const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
  6542. /** @type {number} Default number of bits of a hash */
  6543. const LSH_DEFAULT_HASH_SIZE = 15;
  6544. /** @type {number[]} Acceptable number of tables for a LSH data structure */
  6545. const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
  6546. /** @type {number[]} Acceptable values for hashSize, in bits */
  6547. const LSH_ACCEPTABLE_HASH_SIZES = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20];
  6548. /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
  6549. const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32, 64];
  6550. /**
  6551. * @typedef {Object} LSHProfile LSH profile
  6552. * @property {string} name name of the profile
  6553. * @property {number} capacity maximum number of keypoints that can be stored in such a table
  6554. * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
  6555. * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
  6556. * @property {number} bucketCapacity maximum number of entries of a bucket of a table
  6557. */
  6558. /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
  6559. const generateLSHProfiles = (t, h, p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [{
  6560. name: 'x-small',
  6561. bucketCapacity: 1,
  6562. tableCount: t,
  6563. hashSize: h,
  6564. capacity: findTableCapacity(h, 1, p)
  6565. }, {
  6566. name: 'small',
  6567. bucketCapacity: 2,
  6568. tableCount: t,
  6569. hashSize: h,
  6570. capacity: findTableCapacity(h, 2, p)
  6571. }, {
  6572. name: 'small-plus',
  6573. bucketCapacity: 3,
  6574. tableCount: t,
  6575. hashSize: h,
  6576. capacity: findTableCapacity(h, 3, p)
  6577. }, {
  6578. name: 'medium',
  6579. bucketCapacity: 4,
  6580. tableCount: t,
  6581. hashSize: h,
  6582. capacity: findTableCapacity(h, 4, p)
  6583. }, {
  6584. name: 'medium-plus',
  6585. bucketCapacity: 5,
  6586. tableCount: t,
  6587. hashSize: h,
  6588. capacity: findTableCapacity(h, 5, p)
  6589. }, {
  6590. name: 'large',
  6591. bucketCapacity: 6,
  6592. tableCount: t,
  6593. hashSize: h,
  6594. capacity: findTableCapacity(h, 6, p)
  6595. }, {
  6596. name: 'x-large',
  6597. bucketCapacity: 8,
  6598. tableCount: t,
  6599. hashSize: h,
  6600. capacity: findTableCapacity(h, 8, p)
  6601. }];
  6602. //
  6603. // LSH hash sequences: random bits in increasing order
  6604. // We generate a few sequences (one for each table) supporting up to 16 hash bits
  6605. // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
  6606. //
  6607. /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
  6608. /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
  6609. /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
  6610. /** @type {number} maximum number of elements of a sequence */
  6611. const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
  6612. /** @type {number} number of sequences in a BitSequences object */
  6613. const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
  6614. /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
  6615. const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i + 1) * LSH_SEQUENCE_MAXLEN).sort()), seq);
  6616. /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
  6617. const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray((i + 1) * LSH_SEQUENCE_MAXLEN - p, (i + 1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)), seq);
  6618. /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
  6619. const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p, o) => (p[o] = f(o), p), {}))(h => ({
  6620. // for 256-bit descriptors
  6621. 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  6622. // for 512-bit descriptors
  6623. 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN))))
  6624. }));
  6625. //
  6626. // Misc
  6627. //
  6628. /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
  6629. const LSH_BYTESPERPIXEL = 4;
  6630. /** @type {function(number): number} next power of 2 */
  6631. const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  6632. /**
  6633. * GPU-based LSH tables for fast matching of binary descriptors
  6634. */
  6635. class SpeedyLSH {
  6636. /**
  6637. * Constructor
  6638. * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
  6639. * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
  6640. * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
  6641. * @param {number} [tableCount] number of LSH tables, preferably a power of two
  6642. * @param {number} [hashSize] number of bits of a hash of a descriptor
  6643. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6644. */
  6645. constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95) {
  6646. const descriptorCount = descriptors.length;
  6647. const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
  6648. const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
  6649. // validate input
  6650. utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
  6651. utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
  6652. utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
  6653. utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
  6654. utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
  6655. /** @type {LSHProfile} LSH profile */
  6656. this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
  6657. /** @type {number} descriptor size, in bytes */
  6658. this._descriptorSize = descriptorSize;
  6659. /** @type {number} number of descriptors */
  6660. this._descriptorCount = descriptorCount;
  6661. /** @type {BitSequences} bit sequences */
  6662. this._sequences = this._pickSequences(this._descriptorSize);
  6663. /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
  6664. this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
  6665. /** @type {SpeedyTexture} a storage of descriptors */
  6666. this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
  6667. }
  6668. /**
  6669. * Descriptor size, in bytes
  6670. * @returns {number}
  6671. */
  6672. get descriptorSize() {
  6673. return this._descriptorSize;
  6674. }
  6675. /**
  6676. * Number of descriptors stored in this LSH data structure
  6677. * @returns {number}
  6678. */
  6679. get descriptorCount() {
  6680. return this._descriptorCount;
  6681. }
  6682. /**
  6683. * LSH bit sequences
  6684. * @returns {BitSequences}
  6685. */
  6686. get sequences() {
  6687. return this._sequences;
  6688. }
  6689. /**
  6690. * Number of bits that make a hash
  6691. * @returns {number}
  6692. */
  6693. get hashSize() {
  6694. return this._profile.hashSize;
  6695. }
  6696. /**
  6697. * Maximum number of descriptors that can be stored in a bucket of a table
  6698. * @returns {number}
  6699. */
  6700. get bucketCapacity() {
  6701. return this._profile.bucketCapacity;
  6702. }
  6703. /**
  6704. * How many buckets per table do we have?
  6705. * @returns {number}
  6706. */
  6707. get bucketsPerTable() {
  6708. return 1 << this._profile.hashSize;
  6709. }
  6710. /**
  6711. * Number of LSH tables
  6712. * @returns {number}
  6713. */
  6714. get tableCount() {
  6715. return this._profile.tableCount;
  6716. }
  6717. /**
  6718. * Size of one LSH table, in bytes
  6719. * @returns {number}
  6720. */
  6721. get tableSize() {
  6722. return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
  6723. }
  6724. /**
  6725. * Size of all LSH tables combined, in bytes
  6726. * @returns {number}
  6727. */
  6728. get totalSize() {
  6729. // actually, the total memory in VRAM may be a bit larger than
  6730. // this value, depending on the actual size of the texture
  6731. return this.tableCount * this.tableSize;
  6732. }
  6733. /**
  6734. * LSH tables texture
  6735. * @returns {SpeedyDrawableTexture}
  6736. */
  6737. get tables() {
  6738. return this._tables;
  6739. }
  6740. /**
  6741. * A collection of descriptors
  6742. * @returns {SpeedyDrawableTexture}
  6743. */
  6744. get descriptorDB() {
  6745. return this._descriptorDB;
  6746. }
  6747. /**
  6748. * Pick the appropriate LSH sequences for a particular descriptor size
  6749. * @param {number} descriptorSize in bytes
  6750. * @returns {BitSequences}
  6751. */
  6752. _pickSequences(descriptorSize) {
  6753. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
  6754. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
  6755. return LSH_SEQUENCES[this.hashSize][descriptorSize];
  6756. }
  6757. /**
  6758. * Create LSH tables
  6759. * @param {SpeedyTexture} texture output texture
  6760. * @param {BitSequences} sequences bit sequences
  6761. * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
  6762. * @param {number} descriptorSize in bytes
  6763. * @returns {SpeedyTexture} texture
  6764. */
  6765. _createStaticTables(texture, sequences, descriptors, descriptorSize) {
  6766. const END_OF_LIST = 0xFFFFFFFF;
  6767. const profileName = this._profile.name;
  6768. const tableCapacity = this._profile.capacity;
  6769. const tableCount = this.tableCount;
  6770. const bucketsPerTable = this.bucketsPerTable;
  6771. const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
  6772. const hashSize = this.hashSize;
  6773. const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
  6774. const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
  6775. const textureHeight = Math.ceil(numberOfPixels / textureWidth);
  6776. const numberOfDescriptors = descriptors.length;
  6777. // validate input
  6778. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
  6779. utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
  6780. utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
  6781. // log
  6782. const MEGABYTE = 1048576;
  6783. utils/* Utils */.A.log(`Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` + `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` + `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `);
  6784. // warn the user if there are too many descriptors
  6785. if (numberOfDescriptors > tableCapacity) {
  6786. const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
  6787. utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
  6788. }
  6789. // create empty LSH tables
  6790. const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
  6791. const bytes = new Uint8Array(buffer).fill(0xFF);
  6792. const data = new DataView(buffer);
  6793. // shuffle the descriptors...
  6794. // it seems like a good idea to handle collisions of similar descriptors,
  6795. // which may be located next to each other in the array
  6796. const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
  6797. // for each descriptor
  6798. // do everything in little-endian format!
  6799. const numberOfDiscardedDescriptorsPerTable = new Array(tableCount).fill(0);
  6800. for (let i = 0; i < numberOfDescriptors; i++) {
  6801. const descriptorIndex = permutation[i]; //i;
  6802. const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
  6803. // for each table
  6804. for (let table = 0; table < tableCount; table++) {
  6805. // compute hash & memory addresses
  6806. const hash = hashes[table];
  6807. const tableByteOffset = table * bucketsPerTable * bucketSize;
  6808. const bucketByteOffset = tableByteOffset + hash * bucketSize;
  6809. // find the end of the list
  6810. let index = END_OF_LIST;
  6811. for (let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
  6812. const byteOffset = bucketByteOffset + entryByteOffset;
  6813. index = data.getUint32(byteOffset, true);
  6814. // add the keypoint
  6815. if (index == END_OF_LIST) {
  6816. data.setUint32(byteOffset, descriptorIndex, true);
  6817. break;
  6818. }
  6819. }
  6820. // note: if the bucket is full, we just discard the entry :\
  6821. // we give this event a probabilistic treatment (see above),
  6822. // so it happens with low probability
  6823. if (index != END_OF_LIST) numberOfDiscardedDescriptorsPerTable[table]++;
  6824. }
  6825. }
  6826. // log data for further study
  6827. const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
  6828. const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
  6829. utils/* Utils */.A.log(`When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` + `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` + `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` + `Minimum: ${Math.min(...profile).toFixed(2)}%. ` + `Table capacity: ${tableCapacity}.`);
  6830. // upload the LSH tables to the GPU
  6831. texture.resize(textureWidth, textureHeight);
  6832. texture.upload(bytes);
  6833. return texture;
  6834. }
  6835. /**
  6836. * Pick bits from a binary descriptor
  6837. * @param {Uint8Array} descriptor a single descriptor
  6838. * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
  6839. * @returns {number[]} hash code for each table
  6840. */
  6841. _hashCodes(descriptor, sequences) {
  6842. const tableCount = this.tableCount;
  6843. const hashSize = this.hashSize;
  6844. const bucketsPerTable = this.bucketsPerTable;
  6845. const hashes = new Array(tableCount);
  6846. //const descriptorSize = descriptor.length;
  6847. // just to be sure...
  6848. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN && sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount);
  6849. // for each table
  6850. for (let table = 0; table < tableCount; table++) {
  6851. const offset = LSH_SEQUENCE_MAXLEN * table;
  6852. // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
  6853. let hash = 0;
  6854. for (let i = 0; i < hashSize; i++) {
  6855. let bit = sequences[offset + i];
  6856. let b = bit >>> 3;
  6857. let m = 1 << (bit & 7);
  6858. //Utils.assert(b < descriptorSize);
  6859. hash = hash << 1 | (descriptor[b] & m) != 0;
  6860. }
  6861. // validate & store
  6862. utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
  6863. hashes[table] = hash;
  6864. }
  6865. // done!
  6866. return hashes;
  6867. }
  6868. }
  6869. /**
  6870. * Compute P(X <= k), where X ~ Poisson(lambda)
  6871. * @param {number} lambda positive number
  6872. * @param {number} k non-negative integer
  6873. * @returns {number}
  6874. */
  6875. function cumulativePoisson(lambda, k) {
  6876. const exp = Math.exp(-lambda);
  6877. let sum = 1,
  6878. fat = 1,
  6879. pow = 1;
  6880. // k should be small!!!
  6881. for (let i = 1; i <= k; i++) sum += (pow *= lambda) / (fat *= i);
  6882. return sum * exp;
  6883. }
  6884. /**
  6885. * Find the maximum number of keypoint descriptors that a table can hold
  6886. * @param {number} hashSize positive integer
  6887. * @param {number} bucketCapacity positive integer
  6888. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6889. * @return {number} optimal table capacity
  6890. */
  6891. function findTableCapacity(hashSize, bucketCapacity, probability = 0.99) {
  6892. const n = 1 << hashSize; // number of buckets
  6893. const c = bucketCapacity;
  6894. const p = probability;
  6895. let l = 1,
  6896. r = n * c; // watch for overflow!
  6897. let m = 0,
  6898. pm = 0;
  6899. // binary search
  6900. while (l < r) {
  6901. m = Math.floor((l + r) / 2);
  6902. pm = cumulativePoisson(m / n, c);
  6903. if (pm > p)
  6904. //if(1-pm < 1-p)
  6905. l = m + 1;else r = m;
  6906. }
  6907. return m;
  6908. }
  6909. ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
  6910. /*
  6911. * speedy-vision.js
  6912. * GPU-accelerated Computer Vision for JavaScript
  6913. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6914. *
  6915. * Licensed under the Apache License, Version 2.0 (the "License");
  6916. * you may not use this file except in compliance with the License.
  6917. * You may obtain a copy of the License at
  6918. *
  6919. * http://www.apache.org/licenses/LICENSE-2.0
  6920. *
  6921. * Unless required by applicable law or agreed to in writing, software
  6922. * distributed under the License is distributed on an "AS IS" BASIS,
  6923. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6924. * See the License for the specific language governing permissions and
  6925. * limitations under the License.
  6926. *
  6927. * keypoints.js
  6928. * Facade for various keypoint detection algorithms
  6929. */
  6930. // FAST corner detector
  6931. const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl').withDefines({
  6932. 'FAST_TYPE': 916
  6933. }).withArguments('corners', 'pyramid', 'lod', 'threshold');
  6934. // Harris corner detector
  6935. const harris = [1, 3, 5, 7].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl').withDefines({
  6936. 'WINDOW_SIZE': win
  6937. }).withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian'), obj), {});
  6938. const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl').withArguments('corners', 'iterationNumber');
  6939. const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl').withArguments('corners', 'maxScore', 'quality');
  6940. // Subpixel refinement
  6941. const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6942. 'METHOD': 0
  6943. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6944. const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6945. 'METHOD': 1
  6946. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6947. const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6948. 'METHOD': 2
  6949. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6950. const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6951. 'METHOD': 3
  6952. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6953. // Scale refinement
  6954. const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6955. 'METHOD': 0
  6956. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6957. const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6958. 'METHOD': 1
  6959. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  6960. // Pixel allocation
  6961. const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6962. const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6963. const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl').withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6964. // ORB descriptors
  6965. const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl').withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
  6966. const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl').withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6967. // Non-maximum suppression
  6968. const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6969. 'MULTISCALE': 0
  6970. }).withArguments('image', 'lodStep');
  6971. const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6972. 'MULTISCALE': 1
  6973. }).withArguments('image', 'lodStep');
  6974. const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl').withArguments('corners');
  6975. const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6976. 'USE_LAPLACIAN': 1
  6977. }).withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
  6978. const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6979. 'USE_LAPLACIAN': 0
  6980. }).withArguments('corners', 'pyramid', 'lodStep');
  6981. const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl').withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
  6982. // Keypoint tracking & optical-flow
  6983. const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl').withDefines({
  6984. 'WINDOW_SIZE': win
  6985. }).withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {});
  6986. const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl').withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6987. // Brute-force matching
  6988. const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6989. 'ENCODE_FILTERS': 0
  6990. });
  6991. const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6992. 'ENCODE_FILTERS': 1
  6993. });
  6994. const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  6995. const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  6996. 'DESCRIPTOR_SIZE': 32,
  6997. 'NUMBER_OF_KEYPOINTS_PER_PASS': 16
  6998. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  6999. const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  7000. 'DESCRIPTOR_SIZE': 64,
  7001. 'NUMBER_OF_KEYPOINTS_PER_PASS': 8
  7002. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7003. // LSH-based KNN matching
  7004. const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7005. 'ENCODE_FILTERS': 0
  7006. });
  7007. const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7008. 'ENCODE_FILTERS': 1
  7009. });
  7010. const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => (obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => (obj[hashSize] = [0, 1, 2].reduce((obj, level) => (obj[level] = (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl').withDefines({
  7011. 'DESCRIPTOR_SIZE': descriptorSize,
  7012. 'HASH_SIZE': hashSize,
  7013. 'LEVEL': level,
  7014. 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
  7015. 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT
  7016. }).withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {}), obj), {}), obj), {});
  7017. const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7018. // Keypoint sorting
  7019. const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7020. 'STAGE': 1
  7021. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7022. const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7023. 'STAGE': 2
  7024. }).withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
  7025. const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7026. 'STAGE': 3
  7027. }).withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
  7028. // Keypoint mixing
  7029. const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7030. 'STAGE': 1
  7031. }).withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
  7032. const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7033. 'STAGE': 2
  7034. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7035. const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7036. 'STAGE': 3
  7037. }).withArguments('array', 'blockSize');
  7038. const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7039. 'STAGE': 5
  7040. }).withArguments('array');
  7041. const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7042. 'STAGE': 4
  7043. }).withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7044. // Keypoint encoding
  7045. const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7046. 'FS_OUTPUT_TYPE': 2,
  7047. 'STAGE': 1
  7048. }).withArguments('corners');
  7049. const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl').withDefines({
  7050. 'FS_OUTPUT_TYPE': 2,
  7051. 'FS_USE_CUSTOM_PRECISION': 1,
  7052. 'STAGE': 2
  7053. }).withArguments('lookupTable', 'blockSize', 'width', 'height');
  7054. const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7055. 'STAGE': -1
  7056. }).withArguments('lookupTable');
  7057. const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl').withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
  7058. const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl').withArguments('corners', 'imageSize');
  7059. const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl').withDefines({
  7060. 'MAX_ITERATIONS': 6
  7061. }) // dependent texture reads :(
  7062. .withArguments('offsetsImage', 'imageSize');
  7063. const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl').withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7064. const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl').withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7065. const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl').withArguments();
  7066. const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl').withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7067. const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl').withDefines({
  7068. // UBOs can hold at least 16KB of data;
  7069. // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
  7070. // according to the GL ES 3 reference.
  7071. // Each keypoint uses 16 bytes (vec4)
  7072. 'BUFFER_SIZE': 1024 //16384 / 16
  7073. }).withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
  7074. // Geometric transformations
  7075. const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl').withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7076. // Keypoint filters
  7077. const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl').withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7078. const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl').withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7079. const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7080. 'DESCRIPTOR_SIZE': 32
  7081. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7082. const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7083. 'DESCRIPTOR_SIZE': 64
  7084. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7085. // Other utilities
  7086. const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl').withDefines({
  7087. 'PERMUTATION_MAXLEN': 2048
  7088. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7089. const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl').withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7090. /**
  7091. * SpeedyProgramGroupKeypoints
  7092. * Keypoint detection
  7093. */
  7094. class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup {
  7095. /**
  7096. * Class constructor
  7097. * @param {SpeedyGPU} gpu
  7098. */
  7099. constructor(gpu) {
  7100. super(gpu);
  7101. this
  7102. //
  7103. // FAST corner detector
  7104. //
  7105. .declare('fast9_16', fast9_16, Object.assign({}, this.program.usesPingpongRendering()))
  7106. //
  7107. // Harris corner detector
  7108. //
  7109. .declare('harris1', harris[1], Object.assign({}, this.program.usesPingpongRendering())).declare('harris3', harris[3], Object.assign({}, this.program.usesPingpongRendering())).declare('harris5', harris[5], Object.assign({}, this.program.usesPingpongRendering())).declare('harris7', harris[7], Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreFindMax', harrisScoreFindMax, Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreCutoff', harrisScoreCutoff)
  7110. //
  7111. // Subpixel refinement
  7112. //
  7113. .declare('subpixelQuadratic1d', subpixelQuadratic1d).declare('subpixelTaylor2d', subpixelTaylor2d).declare('subpixelBicubic', subpixelBicubic).declare('subpixelBilinear', subpixelBilinear)
  7114. //
  7115. // Scale refinement
  7116. //
  7117. .declare('refineScaleLoG', refineScaleLoG).declare('refineScaleFAST916', refineScaleFAST916)
  7118. //
  7119. // Pixel allocation
  7120. //
  7121. .declare('allocateDescriptors', allocateDescriptors).declare('allocateExtra', allocateExtra).declare('transferToExtra', transferToExtra)
  7122. //
  7123. // ORB descriptors
  7124. //
  7125. .declare('orbDescriptor', orbDescriptor).declare('orbOrientation', orbOrientation)
  7126. //
  7127. // Non-maximum suppression
  7128. //
  7129. .declare('nonmax', nonMaxSuppression).declare('pyrnonmax', multiscaleNonMaxSuppression).declare('nonmaxSpace', nonmaxSpace).declare('nonmaxScale', nonmaxScale).declare('nonmaxScaleSimple', nonmaxScaleSimple).declare('laplacian', laplacian)
  7130. //
  7131. // LK optical-flow
  7132. //
  7133. .declare('lk21', lk[21], Object.assign({}, this.program.usesPingpongRendering())).declare('lk19', lk[19], Object.assign({}, this.program.usesPingpongRendering())).declare('lk17', lk[17], Object.assign({}, this.program.usesPingpongRendering())).declare('lk15', lk[15], Object.assign({}, this.program.usesPingpongRendering())).declare('lk13', lk[13], Object.assign({}, this.program.usesPingpongRendering())).declare('lk11', lk[11], Object.assign({}, this.program.usesPingpongRendering())).declare('lk9', lk[9], Object.assign({}, this.program.usesPingpongRendering())).declare('lk7', lk[7], Object.assign({}, this.program.usesPingpongRendering())).declare('lk5', lk[5], Object.assign({}, this.program.usesPingpongRendering())).declare('lk3', lk[3], Object.assign({}, this.program.usesPingpongRendering())).declare('transferFlow', transferFlow)
  7134. //
  7135. // Brute-force KNN matching
  7136. //
  7137. .declare('bfMatcherInitCandidates', bfMatcherInitCandidates).declare('bfMatcherInitFilters', bfMatcherInitFilters).declare('bfMatcherTransfer', bfMatcherTransfer, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher32', bfMatcher32, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher64', bfMatcher64, Object.assign({}, this.program.usesPingpongRendering()))
  7138. //
  7139. // LSH-based KNN matching
  7140. //
  7141. .declare('lshKnnInitCandidates', lshKnnInitCandidates).declare('lshKnnInitFilters', lshKnnInitFilters).declare('lshKnnTransfer', lshKnnTransfer, Object.assign({}, this.program.usesPingpongRendering()))
  7142. //
  7143. // Keypoint sorting
  7144. //
  7145. .declare('sortCreatePermutation', sortCreatePermutation).declare('sortMergePermutation', sortMergePermutation, Object.assign({}, this.program.usesPingpongRendering())).declare('sortApplyPermutation', sortApplyPermutation)
  7146. //
  7147. // Keypoint mixing
  7148. //
  7149. .declare('mixKeypointsPreInit', mixKeypointsPreInit).declare('mixKeypointsInit', mixKeypointsInit).declare('mixKeypointsSort', mixKeypointsSort, Object.assign({}, this.program.usesPingpongRendering())).declare('mixKeypointsView', mixKeypointsView).declare('mixKeypointsApply', mixKeypointsApply)
  7150. //
  7151. // Keypoint encoders
  7152. //
  7153. .declare('encodeNullKeypoints', encodeNullKeypoints).declare('encodeKeypoints', encodeKeypoints).declare('initLookupTable', initLookupTable).declare('sortLookupTable', sortLookupTable, Object.assign({}, this.program.usesPingpongRendering())).declare('viewLookupTable', viewLookupTable).declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets).declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointPositions', encodeKeypointPositions, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointProperties', encodeKeypointProperties).declare('transferOrientation', transferOrientation).declare('uploadKeypoints', uploadKeypoints, Object.assign({}, this.program.usesPingpongRendering()))
  7154. //
  7155. // Geometric transformations
  7156. //
  7157. .declare('applyHomography', applyHomography)
  7158. //
  7159. // Keypoint filters
  7160. //
  7161. .declare('clipBorder', clipBorder).declare('distanceFilter', distanceFilter).declare('hammingDistanceFilter32', hammingDistanceFilter32).declare('hammingDistanceFilter64', hammingDistanceFilter64)
  7162. //
  7163. // Other utilities
  7164. //
  7165. .declare('shuffle', shuffle).declare('clip', clip);
  7166. //
  7167. // LSH-based KNN matching
  7168. //
  7169. for (const descriptorSize of Object.keys(lshKnn)) {
  7170. for (const hashSize of Object.keys(lshKnn[descriptorSize])) {
  7171. for (const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
  7172. const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
  7173. this.declare(name, lshKnn[descriptorSize][hashSize][level], Object.assign({}, this.program.usesPingpongRendering()));
  7174. }
  7175. }
  7176. }
  7177. }
  7178. }
  7179. ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
  7180. /*
  7181. * speedy-vision.js
  7182. * GPU-accelerated Computer Vision for JavaScript
  7183. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7184. *
  7185. * Licensed under the Apache License, Version 2.0 (the "License");
  7186. * you may not use this file except in compliance with the License.
  7187. * You may obtain a copy of the License at
  7188. *
  7189. * http://www.apache.org/licenses/LICENSE-2.0
  7190. *
  7191. * Unless required by applicable law or agreed to in writing, software
  7192. * distributed under the License is distributed on an "AS IS" BASIS,
  7193. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7194. * See the License for the specific language governing permissions and
  7195. * limitations under the License.
  7196. *
  7197. * pyramids.js
  7198. * Image pyramids
  7199. */
  7200. //
  7201. // Shaders
  7202. //
  7203. const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
  7204. const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
  7205. /**
  7206. * SpeedyProgramGroupPyramids
  7207. * Image pyramids
  7208. */
  7209. class SpeedyProgramGroupPyramids extends SpeedyProgramGroup {
  7210. /**
  7211. * Class constructor
  7212. * @param {SpeedyGPU} gpu
  7213. */
  7214. constructor(gpu) {
  7215. super(gpu);
  7216. this
  7217. // upsampling & downsampling
  7218. .declare('upsample2', upsample2).declare('downsample2', downsample2)
  7219. // separable kernels for gaussian smoothing
  7220. // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
  7221. // pick a = 0.4 for gaussian approximation (sigma = 1)
  7222. .declare('smoothX', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('smoothY', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05]))
  7223. /*
  7224. .declare('reduce', conv2D([
  7225. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
  7226. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7227. 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
  7228. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7229. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
  7230. ]))
  7231. */
  7232. // smoothing for 2x image
  7233. // same rules as above with sum(k) = 2
  7234. .declare('smoothX2', (0,convolution.convX)([0.1, 0.5, 0.8, 0.5, 0.1
  7235. // NOTE: this would saturate the image, but we apply it
  7236. // on a 2x upsampled version with lots of zero pixels
  7237. ])).declare('smoothY2', (0,convolution.convY)([0.1, 0.5, 0.8, 0.5, 0.1], 1.0 / 2.0));
  7238. }
  7239. }
  7240. ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
  7241. /*
  7242. * speedy-vision.js
  7243. * GPU-accelerated Computer Vision for JavaScript
  7244. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7245. *
  7246. * Licensed under the Apache License, Version 2.0 (the "License");
  7247. * you may not use this file except in compliance with the License.
  7248. * You may obtain a copy of the License at
  7249. *
  7250. * http://www.apache.org/licenses/LICENSE-2.0
  7251. *
  7252. * Unless required by applicable law or agreed to in writing, software
  7253. * distributed under the License is distributed on an "AS IS" BASIS,
  7254. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7255. * See the License for the specific language governing permissions and
  7256. * limitations under the License.
  7257. *
  7258. * transforms.js
  7259. * Geometric transformations
  7260. */
  7261. //
  7262. // Shaders
  7263. //
  7264. // Perspective warp
  7265. const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl').withArguments('image', 'inverseHomography');
  7266. // Resize image
  7267. const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7268. 'INTERPOLATION_METHOD': 0 // Nearest neighbors
  7269. }).withArguments('image');
  7270. const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7271. 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
  7272. }).withArguments('image');
  7273. // Additive mix (TODO create a new program group?)
  7274. const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl').withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
  7275. /**
  7276. * SpeedyProgramGroupTransforms
  7277. * Geometric transformations
  7278. */
  7279. class SpeedyProgramGroupTransforms extends SpeedyProgramGroup {
  7280. /**
  7281. * Class constructor
  7282. * @param {SpeedyGPU} gpu
  7283. */
  7284. constructor(gpu) {
  7285. super(gpu);
  7286. this.declare('warpPerspective', warpPerspective).declare('resizeNearest', resizeNearest).declare('resizeBilinear', resizeBilinear).declare('additiveMix', additiveMix);
  7287. }
  7288. }
  7289. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
  7290. /*
  7291. * speedy-vision.js
  7292. * GPU-accelerated Computer Vision for JavaScript
  7293. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7294. *
  7295. * Licensed under the Apache License, Version 2.0 (the "License");
  7296. * you may not use this file except in compliance with the License.
  7297. * You may obtain a copy of the License at
  7298. *
  7299. * http://www.apache.org/licenses/LICENSE-2.0
  7300. *
  7301. * Unless required by applicable law or agreed to in writing, software
  7302. * distributed under the License is distributed on an "AS IS" BASIS,
  7303. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7304. * See the License for the specific language governing permissions and
  7305. * limitations under the License.
  7306. *
  7307. * speedy-program-center.js
  7308. * An access point to all programs that run on the GPU
  7309. */
  7310. /**
  7311. * An access point to all programs that run on the CPU
  7312. * All program groups can be accessed via this class
  7313. */
  7314. class SpeedyProgramCenter {
  7315. /**
  7316. * Class constructor
  7317. * @param {SpeedyGPU} gpu reference to SpeedyGPU
  7318. */
  7319. constructor(gpu) {
  7320. // Note: we instantiate the program groups lazily
  7321. /** @type {SpeedyGPU} reference to SpeedyGPU */
  7322. this._gpu = gpu;
  7323. /** @type {SpeedyProgramGroupFilters} image filters */
  7324. this._filters = null;
  7325. /** @type {SpeedyProgramGroupTransforms} geometric transformations */
  7326. this._transforms = null;
  7327. /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
  7328. this._pyramids = null;
  7329. /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
  7330. this._keypoints = null;
  7331. /** @type {SpeedyProgramGroupUtils} utility programs */
  7332. this._utils = null;
  7333. }
  7334. /**
  7335. * Image filters & convolutions
  7336. * @returns {SpeedyProgramGroupFilters}
  7337. */
  7338. get filters() {
  7339. return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
  7340. }
  7341. /**
  7342. * Geometric transformations
  7343. * @returns {SpeedyProgramGroupTransforms}
  7344. */
  7345. get transforms() {
  7346. return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
  7347. }
  7348. /**
  7349. * Image pyramids & scale-space
  7350. * @returns {SpeedyProgramGroupPyramids}
  7351. */
  7352. get pyramids() {
  7353. return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
  7354. }
  7355. /**
  7356. * Keypoint detection & description
  7357. * @returns {SpeedyProgramGroupKeypoints}
  7358. */
  7359. get keypoints() {
  7360. return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
  7361. }
  7362. /**
  7363. * Utility programs
  7364. * @returns {SpeedyProgramGroupUtils}
  7365. */
  7366. get utils() {
  7367. return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
  7368. }
  7369. /**
  7370. * Release all programs from all groups. You'll
  7371. * no longer be able to use any of them.
  7372. * @returns {null}
  7373. */
  7374. release() {
  7375. for (const key in this) {
  7376. if (Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
  7377. const group = this[key];
  7378. if (group instanceof SpeedyProgramGroup) group.release();
  7379. }
  7380. }
  7381. return null;
  7382. }
  7383. }
  7384. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
  7385. /*
  7386. * speedy-vision.js
  7387. * GPU-accelerated Computer Vision for JavaScript
  7388. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7389. *
  7390. * Licensed under the Apache License, Version 2.0 (the "License");
  7391. * you may not use this file except in compliance with the License.
  7392. * You may obtain a copy of the License at
  7393. *
  7394. * http://www.apache.org/licenses/LICENSE-2.0
  7395. *
  7396. * Unless required by applicable law or agreed to in writing, software
  7397. * distributed under the License is distributed on an "AS IS" BASIS,
  7398. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7399. * See the License for the specific language governing permissions and
  7400. * limitations under the License.
  7401. *
  7402. * speedy-texture-pool.js
  7403. * Texture pool
  7404. */
  7405. // Constants
  7406. const DEFAULT_CAPACITY = 1024;
  7407. const BUCKET = Symbol('Bucket');
  7408. /*
  7409. === Heuristics to figure out the capacity of a texture pool ===
  7410. 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
  7411. 2. Figure out the average texture size in your application (say, 640x360 pixels).
  7412. 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
  7413. uses 4 bytes (RGBA format).
  7414. 4. Divide the maximum amount of VRAM by the average texture size in bytes
  7415. (say, 72). That's the capacity of the pool.
  7416. Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
  7417. Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  7418. */
  7419. /**
  7420. * @typedef {number} TextureBucketIndex index of a bucket in a pool
  7421. */
  7422. /**
  7423. * A bucket
  7424. */
  7425. class TextureBucket {
  7426. /**
  7427. * Constructor
  7428. * @param {SpeedyDrawableTexture} texture managed texture
  7429. * @param {TextureBucketIndex} index index of this bucket
  7430. * @param {TextureBucketIndex} next index of the next bucket
  7431. */
  7432. constructor(texture, index, next) {
  7433. /** @type {SpeedyDrawableTexture} managed texture */
  7434. this.texture = texture;
  7435. /** @type {TextureBucketIndex} index of this bucket */
  7436. this.index = index;
  7437. /** @type {TextureBucketIndex} index of the next bucket */
  7438. this.next = next;
  7439. /** @type {boolean} whether the texture is available or not */
  7440. this.free = true;
  7441. }
  7442. }
  7443. /**
  7444. * Texture pool
  7445. */
  7446. class SpeedyTexturePool {
  7447. /**
  7448. * Constructor
  7449. * @param {SpeedyGPU} gpu
  7450. * @param {number} [capacity] number of textures in the pool
  7451. */
  7452. constructor(gpu, capacity = DEFAULT_CAPACITY) {
  7453. utils/* Utils */.A.assert(capacity > 0);
  7454. /** @type {TextureBucket[]} buckets */
  7455. this._bucket = Array.from({
  7456. length: capacity
  7457. }, (_, i) => new TextureBucket(null, i, i - 1));
  7458. /** @type {TextureBucketIndex} index of an available bucket */
  7459. this._head = capacity - 1;
  7460. /** @type {SpeedyGPU} GPU instance */
  7461. this._gpu = gpu;
  7462. }
  7463. /**
  7464. * Get a texture from the pool
  7465. * @returns {SpeedyDrawableTexture}
  7466. */
  7467. allocate() {
  7468. if (this._head < 0) throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
  7469. const bucket = this._bucket[this._head];
  7470. bucket.free = false;
  7471. this._head = bucket.next;
  7472. if (bucket.texture == null)
  7473. // lazy instantiation
  7474. bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
  7475. return bucket.texture;
  7476. }
  7477. /**
  7478. * Put a texture back in the pool
  7479. * @param {SpeedyDrawableTexture} texture
  7480. * @returns {null}
  7481. */
  7482. free(texture) {
  7483. const bucket = texture[BUCKET];
  7484. utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
  7485. bucket.next = this._head;
  7486. bucket.free = true;
  7487. this._head = bucket.index;
  7488. return null;
  7489. }
  7490. /**
  7491. * Release the texture pool
  7492. * @returns {null}
  7493. */
  7494. release() {
  7495. for (let i = 0; i < this._bucket.length; i++) {
  7496. if (this._bucket[i].texture != null) this._bucket[i].texture = this._bucket[i].texture.release();
  7497. }
  7498. return null;
  7499. }
  7500. /**
  7501. * Create a texture with a reference to a bucket
  7502. * @param {WebGL2RenderingContext} gl
  7503. * @param {TextureBucket} bucket
  7504. * @returns {SpeedyDrawableTexture}
  7505. */
  7506. static _createManagedTexture(gl, bucket) {
  7507. const texture = new SpeedyDrawableTexture(gl, 1, 1);
  7508. return Object.defineProperty(texture, BUCKET, {
  7509. configurable: false,
  7510. enumerable: false,
  7511. writable: false,
  7512. value: bucket
  7513. });
  7514. }
  7515. }
  7516. // EXTERNAL MODULE: ./src/utils/types.js
  7517. var types = __nested_webpack_require_314174__(6049);
  7518. ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
  7519. /*
  7520. * speedy-vision.js
  7521. * GPU-accelerated Computer Vision for JavaScript
  7522. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7523. *
  7524. * Licensed under the Apache License, Version 2.0 (the "License");
  7525. * you may not use this file except in compliance with the License.
  7526. * You may obtain a copy of the License at
  7527. *
  7528. * http://www.apache.org/licenses/LICENSE-2.0
  7529. *
  7530. * Unless required by applicable law or agreed to in writing, software
  7531. * distributed under the License is distributed on an "AS IS" BASIS,
  7532. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7533. * See the License for the specific language governing permissions and
  7534. * limitations under the License.
  7535. *
  7536. * speedy-media-source.js
  7537. * Wrappers around <img>, <video>, <canvas>, etc.
  7538. */
  7539. /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
  7540. /** Internal token for protected constructors */
  7541. const PRIVATE_TOKEN = Symbol();
  7542. /**
  7543. * An abstract media source: a wrapper around native
  7544. * elements such as: HTMLImageElement, HTMLVideoElement,
  7545. * and so on
  7546. * @abstract
  7547. */
  7548. class SpeedyMediaSource {
  7549. /**
  7550. * @protected Constructor
  7551. * @param {symbol} token
  7552. */
  7553. constructor(token) {
  7554. // the constructor is not public
  7555. if (token !== PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  7556. /** @type {SpeedyMediaSourceNativeElement} underlying media object */
  7557. this._data = null;
  7558. }
  7559. /**
  7560. * Load a media source
  7561. * @param {SpeedyMediaSourceNativeElement} wrappedObject
  7562. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7563. */
  7564. static load(wrappedObject) {
  7565. if (wrappedObject instanceof HTMLImageElement) return SpeedyImageMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLVideoElement) return SpeedyVideoMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLCanvasElement) return SpeedyCanvasMediaSource.load(wrappedObject);else if (typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas) return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageBitmap) return SpeedyBitmapMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageData) return SpeedyDataMediaSource.load(wrappedObject);else throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
  7566. }
  7567. /**
  7568. * The underlying wrapped object
  7569. * @returns {SpeedyMediaSourceNativeElement}
  7570. */
  7571. get data() {
  7572. return this._data;
  7573. }
  7574. /**
  7575. * Is the underlying media loaded?
  7576. * @returns {boolean}
  7577. */
  7578. isLoaded() {
  7579. return this._data !== null;
  7580. }
  7581. /**
  7582. * The type of the underlying media source
  7583. * @abstract
  7584. * @returns {MediaType}
  7585. */
  7586. get type() {
  7587. throw new utils_errors/* AbstractMethodError */.aQ();
  7588. }
  7589. /**
  7590. * Media width, in pixels
  7591. * @abstract
  7592. * @returns {number}
  7593. */
  7594. get width() {
  7595. throw new utils_errors/* AbstractMethodError */.aQ();
  7596. }
  7597. /**
  7598. * Media height, in pixels
  7599. * @abstract
  7600. * @returns {number}
  7601. */
  7602. get height() {
  7603. throw new utils_errors/* AbstractMethodError */.aQ();
  7604. }
  7605. /**
  7606. * Clone this media source
  7607. * @abstract
  7608. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7609. */
  7610. clone() {
  7611. throw new utils_errors/* AbstractMethodError */.aQ();
  7612. }
  7613. /**
  7614. * Release resources associated with this object
  7615. * @returns {null}
  7616. */
  7617. release() {
  7618. return this._data = null;
  7619. }
  7620. /**
  7621. * Load the underlying media
  7622. * @abstract
  7623. * @param {SpeedyMediaSourceNativeElement} element
  7624. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7625. */
  7626. _load(element) {
  7627. throw new utils_errors/* AbstractMethodError */.aQ();
  7628. }
  7629. /**
  7630. * Wait for an event to be triggered in an element
  7631. * @param {Element} element
  7632. * @param {string} eventName
  7633. * @param {number} [timeout] in ms
  7634. * @returns {SpeedyPromise<Element>}
  7635. */
  7636. static _waitUntil(element, eventName, timeout = 30000) {
  7637. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7638. utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
  7639. const timer = setTimeout(() => {
  7640. clear();
  7641. reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
  7642. }, timeout);
  7643. function clear() {
  7644. clearTimeout(timer);
  7645. element.removeEventListener('error', handleError, false);
  7646. element.removeEventListener(eventName, handleSuccess, false);
  7647. }
  7648. function handleError() {
  7649. const hasError = element.error !== null && typeof element.error === 'object';
  7650. const error = hasError ? element.error : {
  7651. code: -1,
  7652. message: ''
  7653. };
  7654. const info = `${error.message} (error code ${error.code})`;
  7655. clear();
  7656. reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
  7657. }
  7658. function handleSuccess() {
  7659. clear();
  7660. resolve(element);
  7661. }
  7662. element.addEventListener('error', handleError, false);
  7663. element.addEventListener(eventName, handleSuccess, false);
  7664. });
  7665. }
  7666. }
  7667. /**
  7668. * Image media source:
  7669. * a wrapper around HTMLImageElement
  7670. */
  7671. class SpeedyImageMediaSource extends SpeedyMediaSource {
  7672. /**
  7673. * @private Constructor
  7674. * @param {symbol} token
  7675. */
  7676. constructor(token) {
  7677. super(token);
  7678. /** @type {HTMLImageElement} image element */
  7679. this._data = null;
  7680. }
  7681. /**
  7682. * The underlying wrapped object
  7683. * @returns {HTMLImageElement}
  7684. */
  7685. get data() {
  7686. return this._data;
  7687. }
  7688. /**
  7689. * The type of the underlying media source
  7690. * @returns {MediaType}
  7691. */
  7692. get type() {
  7693. return types/* MediaType */.zu.Image;
  7694. }
  7695. /**
  7696. * Media width, in pixels
  7697. * @returns {number}
  7698. */
  7699. get width() {
  7700. return this._data ? this._data.naturalWidth : 0;
  7701. }
  7702. /**
  7703. * Media height, in pixels
  7704. * @returns {number}
  7705. */
  7706. get height() {
  7707. return this._data ? this._data.naturalHeight : 0;
  7708. }
  7709. /**
  7710. * Clone this media source
  7711. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7712. */
  7713. clone() {
  7714. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7715. const newNode = /** @type {HTMLImageElement} */this._data.cloneNode(true);
  7716. return SpeedyImageMediaSource.load(newNode);
  7717. }
  7718. /**
  7719. * Load the underlying media
  7720. * @param {HTMLImageElement} image
  7721. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7722. */
  7723. _load(image) {
  7724. if (this.isLoaded()) this.release();
  7725. if (image.complete && image.naturalWidth !== 0) {
  7726. // already loaded?
  7727. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7728. this._data = image;
  7729. resolve(this);
  7730. });
  7731. } else {
  7732. return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
  7733. this._data = image;
  7734. return this;
  7735. });
  7736. }
  7737. }
  7738. /**
  7739. * Load the underlying media
  7740. * @param {HTMLImageElement} image
  7741. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7742. */
  7743. static load(image) {
  7744. return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
  7745. }
  7746. }
  7747. /**
  7748. * Video media source:
  7749. * a wrapper around HTMLVideoElement
  7750. */
  7751. class SpeedyVideoMediaSource extends SpeedyMediaSource {
  7752. /**
  7753. * @private Constructor
  7754. * @param {symbol} token
  7755. */
  7756. constructor(token) {
  7757. super(token);
  7758. /** @type {HTMLVideoElement} video element */
  7759. this._data = null;
  7760. }
  7761. /**
  7762. * The underlying wrapped object
  7763. * @returns {HTMLVideoElement}
  7764. */
  7765. get data() {
  7766. return this._data;
  7767. }
  7768. /**
  7769. * The type of the underlying media source
  7770. * @returns {MediaType}
  7771. */
  7772. get type() {
  7773. return types/* MediaType */.zu.Video;
  7774. }
  7775. /**
  7776. * Media width, in pixels
  7777. * @returns {number}
  7778. */
  7779. get width() {
  7780. // Warning: videoWidth & videoHeight may change at any time !!!
  7781. // so you can't cache these dimensions
  7782. return this._data ? this._data.videoWidth : 0;
  7783. }
  7784. /**
  7785. * Media height, in pixels
  7786. * @returns {number}
  7787. */
  7788. get height() {
  7789. return this._data ? this._data.videoHeight : 0;
  7790. }
  7791. /**
  7792. * Clone this media source
  7793. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7794. */
  7795. clone() {
  7796. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7797. const newNode = /** @type {HTMLVideoElement} */this._data.cloneNode(true);
  7798. return SpeedyVideoMediaSource.load(newNode);
  7799. }
  7800. /**
  7801. * Load the underlying media
  7802. * @param {HTMLVideoElement} video
  7803. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7804. */
  7805. _load(video) {
  7806. if (this.isLoaded()) this.release();
  7807. if (video.readyState >= 4) {
  7808. // already loaded?
  7809. return this._handleAutoplay(video).then(() => {
  7810. this._data = video;
  7811. return this;
  7812. });
  7813. } else {
  7814. // waitUntil('canplay'); // use readyState >= 3
  7815. setTimeout(() => video.load());
  7816. return SpeedyMediaSource._waitUntil(video, 'canplaythrough').then(() => {
  7817. return this._handleAutoplay(video).then(() => {
  7818. this._data = video;
  7819. return this;
  7820. });
  7821. });
  7822. }
  7823. }
  7824. /**
  7825. * Handle browser quirks concerning autoplay
  7826. * @param {HTMLVideoElement} video
  7827. * @returns {SpeedyPromise<void>} gets rejected if we can't autoplay
  7828. */
  7829. _handleAutoplay(video) {
  7830. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  7831. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  7832. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  7833. // videos marked as autoplay may not play if not visible on-screen
  7834. // videos marked as autoplay should be muted
  7835. if (video.autoplay /*&& video.muted*/) {
  7836. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7837. const promise = video.play();
  7838. // handle older browsers
  7839. if (promise === undefined) {
  7840. resolve();
  7841. return;
  7842. }
  7843. // wrap promise
  7844. promise.then(resolve, reject);
  7845. });
  7846. }
  7847. // nothing to do
  7848. return speedy_promise/* SpeedyPromise */.i.resolve();
  7849. }
  7850. /**
  7851. * Load the underlying media
  7852. * @param {HTMLVideoElement} video
  7853. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7854. */
  7855. static load(video) {
  7856. return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
  7857. }
  7858. }
  7859. /**
  7860. * Canvas media source:
  7861. * a wrapper around HTMLCanvasElement
  7862. */
  7863. class SpeedyCanvasMediaSource extends SpeedyMediaSource {
  7864. /**
  7865. * @private Constructor
  7866. * @param {symbol} token
  7867. */
  7868. constructor(token) {
  7869. super(token);
  7870. /** @type {HTMLCanvasElement} canvas element */
  7871. this._data = null;
  7872. }
  7873. /**
  7874. * The underlying wrapped object
  7875. * @returns {HTMLCanvasElement}
  7876. */
  7877. get data() {
  7878. return this._data;
  7879. }
  7880. /**
  7881. * The type of the underlying media source
  7882. * @returns {MediaType}
  7883. */
  7884. get type() {
  7885. return types/* MediaType */.zu.Canvas;
  7886. }
  7887. /**
  7888. * Media width, in pixels
  7889. * @returns {number}
  7890. */
  7891. get width() {
  7892. return this._data ? this._data.width : 0;
  7893. }
  7894. /**
  7895. * Media height, in pixels
  7896. * @returns {number}
  7897. */
  7898. get height() {
  7899. return this._data ? this._data.height : 0;
  7900. }
  7901. /**
  7902. * Clone this media source
  7903. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7904. */
  7905. clone() {
  7906. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7907. const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
  7908. const newContext = newCanvas.getContext('2d');
  7909. newContext.drawImage(this._data, 0, 0);
  7910. return SpeedyCanvasMediaSource.load(newCanvas);
  7911. }
  7912. /**
  7913. * Load the underlying media
  7914. * @param {HTMLCanvasElement} canvas
  7915. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7916. */
  7917. _load(canvas) {
  7918. if (this.isLoaded()) this.release();
  7919. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7920. this._data = canvas;
  7921. resolve(this);
  7922. });
  7923. }
  7924. /**
  7925. * Load the underlying media
  7926. * @param {HTMLCanvasElement} canvas
  7927. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7928. */
  7929. static load(canvas) {
  7930. return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
  7931. }
  7932. }
  7933. /**
  7934. * OffscreenCanvas media source:
  7935. * a wrapper around OffscreenCanvas
  7936. */
  7937. class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource {
  7938. /**
  7939. * @private Constructor
  7940. * @param {symbol} token
  7941. */
  7942. constructor(token) {
  7943. super(token);
  7944. /** @type {OffscreenCanvas} offscreen canvas element */
  7945. this._data = null;
  7946. }
  7947. /**
  7948. * The underlying wrapped object
  7949. * @returns {OffscreenCanvas}
  7950. */
  7951. get data() {
  7952. return this._data;
  7953. }
  7954. /**
  7955. * The type of the underlying media source
  7956. * @returns {MediaType}
  7957. */
  7958. get type() {
  7959. return types/* MediaType */.zu.OffscreenCanvas;
  7960. }
  7961. /**
  7962. * Media width, in pixels
  7963. * @returns {number}
  7964. */
  7965. get width() {
  7966. return this._data ? this._data.width : 0;
  7967. }
  7968. /**
  7969. * Media height, in pixels
  7970. * @returns {number}
  7971. */
  7972. get height() {
  7973. return this._data ? this._data.height : 0;
  7974. }
  7975. /**
  7976. * Clone this media source
  7977. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7978. */
  7979. clone() {
  7980. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7981. const newCanvas = new OffscreenCanvas(this.width, this.height);
  7982. const newContext = newCanvas.getContext('2d');
  7983. newContext.drawImage(this._data, 0, 0);
  7984. return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
  7985. }
  7986. /**
  7987. * Load the underlying media
  7988. * @param {OffscreenCanvas} offscreenCanvas
  7989. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7990. */
  7991. _load(offscreenCanvas) {
  7992. if (this.isLoaded()) this.release();
  7993. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7994. this._data = offscreenCanvas;
  7995. resolve(this);
  7996. });
  7997. }
  7998. /**
  7999. * Load the underlying media
  8000. * @param {OffscreenCanvas} offscreenCanvas
  8001. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8002. */
  8003. static load(offscreenCanvas) {
  8004. return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
  8005. }
  8006. }
  8007. /**
  8008. * Bitmap media source:
  8009. * a wrapper around ImageBitmap
  8010. */
  8011. class SpeedyBitmapMediaSource extends SpeedyMediaSource {
  8012. /**
  8013. * @private Constructor
  8014. * @param {symbol} token
  8015. */
  8016. constructor(token) {
  8017. super(token);
  8018. /** @type {ImageBitmap} image bitmap */
  8019. this._data = null;
  8020. }
  8021. /**
  8022. * The underlying wrapped object
  8023. * @returns {ImageBitmap}
  8024. */
  8025. get data() {
  8026. return this._data;
  8027. }
  8028. /**
  8029. * The type of the underlying media source
  8030. * @returns {MediaType}
  8031. */
  8032. get type() {
  8033. return types/* MediaType */.zu.Bitmap;
  8034. }
  8035. /**
  8036. * Media width, in pixels
  8037. * @returns {number}
  8038. */
  8039. get width() {
  8040. return this._data ? this._data.width : 0;
  8041. }
  8042. /**
  8043. * Media height, in pixels
  8044. * @returns {number}
  8045. */
  8046. get height() {
  8047. return this._data ? this._data.height : 0;
  8048. }
  8049. /**
  8050. * Clone this media source
  8051. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8052. */
  8053. clone() {
  8054. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8055. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  8056. createImageBitmap(this._data).then(newBitmap => {
  8057. const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
  8058. newSource._load(newBitmap).then(resolve, reject);
  8059. }, reject);
  8060. });
  8061. }
  8062. /**
  8063. * Release resources associated with this object
  8064. * @returns {null}
  8065. */
  8066. release() {
  8067. if (this._data != null) this._data.close();
  8068. return super.release();
  8069. }
  8070. /**
  8071. * Load the underlying media
  8072. * @param {ImageBitmap} bitmap
  8073. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8074. */
  8075. _load(bitmap) {
  8076. if (this.isLoaded()) this.release();
  8077. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8078. this._data = bitmap;
  8079. resolve(this);
  8080. });
  8081. }
  8082. /**
  8083. * Load the underlying media
  8084. * @param {ImageBitmap} bitmap
  8085. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8086. */
  8087. static load(bitmap) {
  8088. return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
  8089. }
  8090. }
  8091. /**
  8092. * Data media source:
  8093. * a wrapper around ImageData
  8094. */
  8095. class SpeedyDataMediaSource extends SpeedyMediaSource {
  8096. /**
  8097. * @private Constructor
  8098. * @param {symbol} token
  8099. */
  8100. constructor(token) {
  8101. super(token);
  8102. /** @type {ImageData} image data */
  8103. this._data = null;
  8104. }
  8105. /**
  8106. * The underlying wrapped object
  8107. * @returns {ImageData}
  8108. */
  8109. get data() {
  8110. return this._data;
  8111. }
  8112. /**
  8113. * The type of the underlying media source
  8114. * @returns {MediaType}
  8115. */
  8116. get type() {
  8117. return types/* MediaType */.zu.Data;
  8118. }
  8119. /**
  8120. * Media width, in pixels
  8121. * @returns {number}
  8122. */
  8123. get width() {
  8124. return this._data ? this._data.width : 0;
  8125. }
  8126. /**
  8127. * Media height, in pixels
  8128. * @returns {number}
  8129. */
  8130. get height() {
  8131. return this._data ? this._data.height : 0;
  8132. }
  8133. /**
  8134. * Clone this media source
  8135. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8136. */
  8137. clone() {
  8138. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8139. const imageDataCopy = new ImageData(new Uint8ClampedArray(this._data.data), this._data.width, this._data.height);
  8140. return SpeedyDataMediaSource.load(imageDataCopy);
  8141. }
  8142. /**
  8143. * Load the underlying media
  8144. * @param {ImageData} imageData
  8145. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8146. */
  8147. _load(imageData) {
  8148. if (this.isLoaded()) this.release();
  8149. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8150. this._data = imageData;
  8151. resolve(this);
  8152. });
  8153. }
  8154. /**
  8155. * Load the underlying media
  8156. * @param {ImageData} imageData
  8157. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8158. */
  8159. static load(imageData) {
  8160. return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
  8161. }
  8162. }
  8163. // EXTERNAL MODULE: ./src/utils/observable.js
  8164. var observable = __nested_webpack_require_314174__(3211);
  8165. ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
  8166. /*
  8167. * speedy-vision.js
  8168. * GPU-accelerated Computer Vision for JavaScript
  8169. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8170. *
  8171. * Licensed under the Apache License, Version 2.0 (the "License");
  8172. * you may not use this file except in compliance with the License.
  8173. * You may obtain a copy of the License at
  8174. *
  8175. * http://www.apache.org/licenses/LICENSE-2.0
  8176. *
  8177. * Unless required by applicable law or agreed to in writing, software
  8178. * distributed under the License is distributed on an "AS IS" BASIS,
  8179. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8180. * See the License for the specific language governing permissions and
  8181. * limitations under the License.
  8182. *
  8183. * speedy-gpu.js
  8184. * GPU-accelerated routines for Computer Vision
  8185. */
  8186. /**
  8187. * GPU-accelerated routines for Computer Vision
  8188. */
  8189. class SpeedyGPU extends observable/* Observable */.c {
  8190. /**
  8191. * Constructor
  8192. */
  8193. constructor() {
  8194. super();
  8195. /** @type {SpeedyGL} cached reference */
  8196. this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
  8197. /** @type {SpeedyProgramCenter} GPU-based programs */
  8198. this._programs = new SpeedyProgramCenter(this);
  8199. /** @type {SpeedyTexturePool} texture pool */
  8200. this._texturePool = new SpeedyTexturePool(this);
  8201. // recreate the state if necessary
  8202. this._speedyGL.subscribe(this._reset, this);
  8203. }
  8204. /**
  8205. * Access point to all GPU programs
  8206. * @returns {SpeedyProgramCenter}
  8207. */
  8208. get programs() {
  8209. return this._programs;
  8210. }
  8211. /**
  8212. * The WebGL Rendering Context
  8213. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  8214. * @returns {WebGL2RenderingContext}
  8215. */
  8216. get gl() {
  8217. return this._speedyGL.gl;
  8218. }
  8219. /**
  8220. * Internal canvas
  8221. * @returns {HTMLCanvasElement}
  8222. */
  8223. get canvas() {
  8224. return this._speedyGL.canvas;
  8225. }
  8226. /**
  8227. * Texture pool
  8228. * @returns {SpeedyTexturePool}
  8229. */
  8230. get texturePool() {
  8231. return this._texturePool;
  8232. }
  8233. /**
  8234. * Renders a texture to the canvas
  8235. * @param {SpeedyTexture} texture
  8236. * @returns {HTMLCanvasElement} returned for convenience
  8237. */
  8238. renderToCanvas(texture) {
  8239. const width = texture.width;
  8240. const height = texture.height;
  8241. const canvas = this.canvas;
  8242. // do we need to resize the canvas?
  8243. if (width > canvas.width || height > canvas.height) {
  8244. utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
  8245. canvas.width = width;
  8246. canvas.height = height;
  8247. }
  8248. // render
  8249. this.programs.utils.renderToCanvas.outputs(width, height, null);
  8250. this.programs.utils.renderToCanvas(texture);
  8251. // done!
  8252. return canvas;
  8253. }
  8254. /**
  8255. * Upload an image to the GPU
  8256. * @param {SpeedyMediaSource} source
  8257. * @param {SpeedyTexture} outputTexture
  8258. * @returns {SpeedyTexture} outputTexture
  8259. */
  8260. upload(source, outputTexture) {
  8261. return outputTexture.upload(source.data, source.width, source.height);
  8262. }
  8263. /**
  8264. * Releases resources
  8265. * @returns {null}
  8266. */
  8267. release() {
  8268. utils/* Utils */.A.assert(!this.isReleased());
  8269. // release internal components
  8270. this._programs = this._programs.release();
  8271. this._texturePool = this._texturePool.release();
  8272. // unsubscribe
  8273. this._speedyGL.unsubscribe(this._reset);
  8274. return null;
  8275. }
  8276. /**
  8277. * Has this SpeedyGPU been released?
  8278. * @returns {boolean}
  8279. */
  8280. isReleased() {
  8281. return this._programs == null;
  8282. }
  8283. /**
  8284. * Lose & restore the WebGL context (useful for testing purposes)
  8285. * @return {SpeedyPromise<void>} resolves as soon as the context is restored
  8286. */
  8287. loseAndRestoreWebGLContext() {
  8288. return this._speedyGL.loseAndRestoreContext().then(() => void 0);
  8289. }
  8290. /**
  8291. * Reset the internal state
  8292. * (called on context reset)
  8293. */
  8294. _reset() {
  8295. if (this.isReleased()) return;
  8296. this._programs = new SpeedyProgramCenter(this);
  8297. this._texturePool = new SpeedyTexturePool(this);
  8298. this._notify();
  8299. }
  8300. }
  8301. ;// CONCATENATED MODULE: ./src/core/speedy-size.js
  8302. /*
  8303. * speedy-vision.js
  8304. * GPU-accelerated Computer Vision for JavaScript
  8305. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8306. *
  8307. * Licensed under the Apache License, Version 2.0 (the "License");
  8308. * you may not use this file except in compliance with the License.
  8309. * You may obtain a copy of the License at
  8310. *
  8311. * http://www.apache.org/licenses/LICENSE-2.0
  8312. *
  8313. * Unless required by applicable law or agreed to in writing, software
  8314. * distributed under the License is distributed on an "AS IS" BASIS,
  8315. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8316. * See the License for the specific language governing permissions and
  8317. * limitations under the License.
  8318. *
  8319. * speedy-size.js
  8320. * Size of a rectangle
  8321. */
  8322. /**
  8323. * Size of a rectangle
  8324. */
  8325. class SpeedySize {
  8326. /**
  8327. * Constructor
  8328. * @param {number} width non-negative number
  8329. * @param {number} height non-negative number
  8330. */
  8331. constructor(width, height) {
  8332. /** @type {number} width */
  8333. this._width = Math.max(0, +width);
  8334. /** @type {number} height */
  8335. this._height = Math.max(0, +height);
  8336. }
  8337. //
  8338. // ===== METHODS =====
  8339. //
  8340. /**
  8341. * Width
  8342. * @returns {number}
  8343. */
  8344. get width() {
  8345. return this._width;
  8346. }
  8347. /**
  8348. * Width
  8349. * @param {number} value
  8350. */
  8351. set width(value) {
  8352. this._width = Math.max(0, +value);
  8353. }
  8354. /**
  8355. * Height
  8356. * @returns {number}
  8357. */
  8358. get height() {
  8359. return this._height;
  8360. }
  8361. /**
  8362. * Height
  8363. * @param {number} value
  8364. */
  8365. set height(value) {
  8366. this._height = Math.max(0, +value);
  8367. }
  8368. /**
  8369. * Convert to string
  8370. * @returns {string}
  8371. */
  8372. toString() {
  8373. return `SpeedySize(${this.width}, ${this.height})`;
  8374. }
  8375. /**
  8376. * Is this size equal to anotherSize?
  8377. * @param {SpeedySize} anotherSize
  8378. * @returns {boolean}
  8379. */
  8380. equals(anotherSize) {
  8381. return this.width === anotherSize.width && this.height === anotherSize.height;
  8382. }
  8383. /**
  8384. * The area of the rectangle
  8385. * @returns {number}
  8386. */
  8387. area() {
  8388. return this.width * this.height;
  8389. }
  8390. }
  8391. ;// CONCATENATED MODULE: ./src/core/speedy-media.js
  8392. /*
  8393. * speedy-vision.js
  8394. * GPU-accelerated Computer Vision for JavaScript
  8395. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8396. *
  8397. * Licensed under the Apache License, Version 2.0 (the "License");
  8398. * you may not use this file except in compliance with the License.
  8399. * You may obtain a copy of the License at
  8400. *
  8401. * http://www.apache.org/licenses/LICENSE-2.0
  8402. *
  8403. * Unless required by applicable law or agreed to in writing, software
  8404. * distributed under the License is distributed on an "AS IS" BASIS,
  8405. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8406. * See the License for the specific language governing permissions and
  8407. * limitations under the License.
  8408. *
  8409. * speedy-media.js
  8410. * SpeedyMedia implementation
  8411. */
  8412. /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  8413. /**
  8414. * @typedef {object} SpeedyMediaOptions
  8415. * @property {ImageFormat} [format] default is RGBA
  8416. */
  8417. /** A helper used to keep the constructor of SpeedyMedia private */
  8418. const speedy_media_PRIVATE_TOKEN = Symbol();
  8419. /**
  8420. * SpeedyMedia encapsulates a media element
  8421. * (e.g., image, video, canvas)
  8422. */
  8423. class SpeedyMedia {
  8424. /**
  8425. * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
  8426. * @param {symbol} token
  8427. * @param {SpeedyMediaSource} source
  8428. * @param {SpeedyMediaOptions} [options] options object
  8429. */
  8430. constructor(token, source, options = {}) {
  8431. // private constructor
  8432. if (token !== speedy_media_PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  8433. /** @type {SpeedyMediaSource} media source */
  8434. this._source = source;
  8435. /** @type {ImageFormat} format */
  8436. this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
  8437. /** @type {SpeedyMediaOptions} options */
  8438. this._options = Object.freeze(Object.assign(Object.assign({}, options), {}, {
  8439. format: this._format
  8440. }));
  8441. // validate
  8442. if (!source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);else if (this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
  8443. }
  8444. /**
  8445. * Load a media source
  8446. * Will wait until the HTML media source is loaded
  8447. * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
  8448. * @param {SpeedyMediaOptions} [options] options object
  8449. * @param {boolean} [log] show log message?
  8450. * @returns {SpeedyPromise<SpeedyMedia>}
  8451. */
  8452. static load(mediaSource, options = {}, log = true) {
  8453. return SpeedyMediaSource.load(mediaSource).then(source => {
  8454. utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
  8455. // FIXME user could pass an invalid format in options if ImageFormat is made public
  8456. const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
  8457. // show log message
  8458. if (log) utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
  8459. // done!
  8460. return media;
  8461. });
  8462. }
  8463. /**
  8464. * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
  8465. * @returns {SpeedyMediaSourceNativeElement} the media element
  8466. */
  8467. get source() {
  8468. return this._source ? this._source.data : null;
  8469. }
  8470. /**
  8471. * The type of the media attached to this SpeedyMedia object
  8472. * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
  8473. */
  8474. get type() {
  8475. if (this.isReleased()) return 'unknown';
  8476. switch (this._source.type) {
  8477. case types/* MediaType */.zu.Image:
  8478. return 'image';
  8479. case types/* MediaType */.zu.Video:
  8480. return 'video';
  8481. case types/* MediaType */.zu.Canvas:
  8482. return 'canvas';
  8483. case types/* MediaType */.zu.OffscreenCanvas:
  8484. return 'offscreen-canvas';
  8485. case types/* MediaType */.zu.Bitmap:
  8486. return 'bitmap';
  8487. case types/* MediaType */.zu.Data:
  8488. return 'data';
  8489. default:
  8490. // this shouldn't happen
  8491. return 'unknown';
  8492. }
  8493. }
  8494. /**
  8495. * Gets the width of the media
  8496. * @returns {number} media width
  8497. */
  8498. get width() {
  8499. return this._source ? this._source.width : 0;
  8500. }
  8501. /**
  8502. * Gets the height of the media
  8503. * @returns {number} media height
  8504. */
  8505. get height() {
  8506. return this._source ? this._source.height : 0;
  8507. }
  8508. /**
  8509. * The size of this media, in pixels
  8510. * @returns {SpeedySize}
  8511. */
  8512. get size() {
  8513. return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
  8514. }
  8515. /**
  8516. * Returns a read-only object featuring advanced options
  8517. * related to this SpeedyMedia object
  8518. * @returns {SpeedyMediaOptions}
  8519. */
  8520. get options() {
  8521. return this._options;
  8522. }
  8523. /**
  8524. * Releases resources associated with this media
  8525. * @returns {null}
  8526. */
  8527. release() {
  8528. if (!this.isReleased()) {
  8529. utils/* Utils */.A.log('Releasing SpeedyMedia object...');
  8530. this._source = this._source.release();
  8531. }
  8532. return null;
  8533. }
  8534. /**
  8535. * Has this media been released?
  8536. * @returns {boolean}
  8537. */
  8538. isReleased() {
  8539. return this._source == null;
  8540. }
  8541. /**
  8542. * Clones the SpeedyMedia object
  8543. * @returns {SpeedyPromise<SpeedyMedia>} a clone object
  8544. */
  8545. clone() {
  8546. // has the media been released?
  8547. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
  8548. // clone the object
  8549. const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
  8550. // done!
  8551. return speedy_promise/* SpeedyPromise */.i.resolve(clone);
  8552. }
  8553. /**
  8554. * Converts the media to an ImageBitmap
  8555. * @returns {SpeedyPromise<ImageBitmap>}
  8556. */
  8557. toBitmap() {
  8558. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');else if (!this._source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');else if (this._source.type == types/* MediaType */.zu.Bitmap) return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);else return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
  8559. }
  8560. }
  8561. ;// CONCATENATED MODULE: ./src/core/speedy-platform.js
  8562. /*
  8563. * speedy-vision.js
  8564. * GPU-accelerated Computer Vision for JavaScript
  8565. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8566. *
  8567. * Licensed under the Apache License, Version 2.0 (the "License");
  8568. * you may not use this file except in compliance with the License.
  8569. * You may obtain a copy of the License at
  8570. *
  8571. * http://www.apache.org/licenses/LICENSE-2.0
  8572. *
  8573. * Unless required by applicable law or agreed to in writing, software
  8574. * distributed under the License is distributed on an "AS IS" BASIS,
  8575. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8576. * See the License for the specific language governing permissions and
  8577. * limitations under the License.
  8578. *
  8579. * speedy-platform.js
  8580. * Utilities to query information about the graphics driver
  8581. */
  8582. /**
  8583. * Utilities to query information about the graphics driver. This information
  8584. * may or may not be available, depending on the privacy settings of the web
  8585. * browser. In addition, it may be more or less accurate in different browsers.
  8586. */
  8587. class SpeedyPlatform extends speedy_namespace/* SpeedyNamespace */.Q {
  8588. /**
  8589. * Renderer string of the graphics driver
  8590. * @returns {string}
  8591. */
  8592. static get renderer() {
  8593. return speedy_gl/* SpeedyGL */.c.instance.renderer;
  8594. }
  8595. /**
  8596. * Vendor string of the graphics driver
  8597. * @returns {string}
  8598. */
  8599. static get vendor() {
  8600. return speedy_gl/* SpeedyGL */.c.instance.vendor;
  8601. }
  8602. }
  8603. ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
  8604. /*
  8605. * speedy-vision.js
  8606. * GPU-accelerated Computer Vision for JavaScript
  8607. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8608. *
  8609. * Licensed under the Apache License, Version 2.0 (the "License");
  8610. * you may not use this file except in compliance with the License.
  8611. * You may obtain a copy of the License at
  8612. *
  8613. * http://www.apache.org/licenses/LICENSE-2.0
  8614. *
  8615. * Unless required by applicable law or agreed to in writing, software
  8616. * distributed under the License is distributed on an "AS IS" BASIS,
  8617. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8618. * See the License for the specific language governing permissions and
  8619. * limitations under the License.
  8620. *
  8621. * speedy-vector.js
  8622. * Vectors
  8623. */
  8624. /**
  8625. * 2D vector of floating-point numbers
  8626. */
  8627. class SpeedyVector2 {
  8628. /**
  8629. * Create a 2D vector
  8630. * @param {number} x
  8631. * @param {number} y
  8632. */
  8633. constructor(x, y) {
  8634. /** @type {number} x coordinate */
  8635. this._x = +x;
  8636. /** @type {number} y coordinate */
  8637. this._y = +y;
  8638. }
  8639. //
  8640. // ===== METHODS =====
  8641. //
  8642. /**
  8643. * x-coordinate
  8644. * @returns {number}
  8645. */
  8646. get x() {
  8647. return this._x;
  8648. }
  8649. /**
  8650. * x-coordinate
  8651. * @param {number} value
  8652. */
  8653. set x(value) {
  8654. this._x = +value;
  8655. }
  8656. /**
  8657. * y-coordinate
  8658. * @returns {number}
  8659. */
  8660. get y() {
  8661. return this._y;
  8662. }
  8663. /**
  8664. * y-coordinate
  8665. * @param {number} value
  8666. */
  8667. set y(value) {
  8668. this._y = +value;
  8669. }
  8670. /**
  8671. * Convert to string
  8672. * @returns {string}
  8673. */
  8674. toString() {
  8675. return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8676. }
  8677. /**
  8678. * Is this vector equal to v?
  8679. * @param {SpeedyVector2} v
  8680. * @returns {boolean}
  8681. */
  8682. equals(v) {
  8683. return this.x === v.x && this.y === v.y;
  8684. }
  8685. /**
  8686. * Dot product between this vector and another vector
  8687. * @param {SpeedyVector2} v another vector
  8688. * @returns {number}
  8689. */
  8690. dot(v) {
  8691. return this.x * v.x + this.y * v.y;
  8692. }
  8693. /**
  8694. * The distance between this vector and another vector
  8695. * @param {SpeedyVector2} v another vector
  8696. * @returns {number}
  8697. */
  8698. distanceTo(v) {
  8699. const dx = this.x - v.x;
  8700. const dy = this.y - v.y;
  8701. return Math.sqrt(dx * dx + dy * dy);
  8702. }
  8703. /**
  8704. * Euclidean norm
  8705. * @returns {number}
  8706. */
  8707. length() {
  8708. return Math.sqrt(this.x * this.x + this.y * this.y);
  8709. }
  8710. /**
  8711. * Returns a normalized version of this vector
  8712. * @returns {SpeedyVector2}
  8713. */
  8714. normalized() {
  8715. const len = this.length();
  8716. if (len > 0.0) return new SpeedyVector2(this.x / len, this.y / len);else return new SpeedyVector2(0.0, 0.0);
  8717. }
  8718. /**
  8719. * Returns a copy of this vector translated by offset
  8720. * @param {SpeedyVector2} offset
  8721. * @returns {SpeedyVector2}
  8722. */
  8723. plus(offset) {
  8724. return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
  8725. }
  8726. /**
  8727. * Returns a copy of this vector translated by -offset
  8728. * @param {SpeedyVector2} offset
  8729. * @returns {SpeedyVector2}
  8730. */
  8731. minus(offset) {
  8732. return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
  8733. }
  8734. /**
  8735. * Returns a copy of this vector scaled by a scalar
  8736. * @param {number} scalar
  8737. * @returns {SpeedyVector2}
  8738. */
  8739. times(scalar) {
  8740. return new SpeedyVector2(this.x * scalar, this.y * scalar);
  8741. }
  8742. }
  8743. ;// CONCATENATED MODULE: ./src/core/speedy-point.js
  8744. /*
  8745. * speedy-vision.js
  8746. * GPU-accelerated Computer Vision for JavaScript
  8747. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8748. *
  8749. * Licensed under the Apache License, Version 2.0 (the "License");
  8750. * you may not use this file except in compliance with the License.
  8751. * You may obtain a copy of the License at
  8752. *
  8753. * http://www.apache.org/licenses/LICENSE-2.0
  8754. *
  8755. * Unless required by applicable law or agreed to in writing, software
  8756. * distributed under the License is distributed on an "AS IS" BASIS,
  8757. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8758. * See the License for the specific language governing permissions and
  8759. * limitations under the License.
  8760. *
  8761. * speedy-point.js
  8762. * Points in space
  8763. */
  8764. /**
  8765. * 2D point
  8766. */
  8767. class SpeedyPoint2 {
  8768. /**
  8769. * Create a 2D point
  8770. * @param {number} x
  8771. * @param {number} y
  8772. */
  8773. constructor(x, y) {
  8774. /** @type {number} x coordinate */
  8775. this._x = +x;
  8776. /** @type {number} y coordinate */
  8777. this._y = +y;
  8778. }
  8779. //
  8780. // ===== METHODS =====
  8781. //
  8782. /**
  8783. * x-coordinate
  8784. * @returns {number}
  8785. */
  8786. get x() {
  8787. return this._x;
  8788. }
  8789. /**
  8790. * x-coordinate
  8791. * @param {number} value
  8792. */
  8793. set x(value) {
  8794. this._x = +value;
  8795. }
  8796. /**
  8797. * y-coordinate
  8798. * @returns {number}
  8799. */
  8800. get y() {
  8801. return this._y;
  8802. }
  8803. /**
  8804. * y-coordinate
  8805. * @param {number} value
  8806. */
  8807. set y(value) {
  8808. this._y = +value;
  8809. }
  8810. /**
  8811. * Convert to string
  8812. * @returns {string}
  8813. */
  8814. toString() {
  8815. return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8816. }
  8817. /**
  8818. * Add a vector to this point
  8819. * @param {SpeedyVector2} v
  8820. * @returns {SpeedyPoint2}
  8821. */
  8822. plus(v) {
  8823. return new SpeedyPoint2(this.x + v.x, this.y + v.y);
  8824. }
  8825. /**
  8826. * Subtracts a point p from this point
  8827. * @param {SpeedyPoint2} p
  8828. * @returns {SpeedyVector2}
  8829. */
  8830. minus(p) {
  8831. return new SpeedyVector2(this.x - p.x, this.y - p.y);
  8832. }
  8833. /**
  8834. * Is this point equal to p?
  8835. * @param {SpeedyPoint2} p
  8836. * @returns {boolean}
  8837. */
  8838. equals(p) {
  8839. return this.x === p.x && this.y === p.y;
  8840. }
  8841. }
  8842. // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
  8843. var speedy_matrix_expr = __nested_webpack_require_314174__(6306);
  8844. // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
  8845. var speedy_matrix_wasm = __nested_webpack_require_314174__(6465);
  8846. // EXTERNAL MODULE: ./src/core/speedy-matrix.js
  8847. var speedy_matrix = __nested_webpack_require_314174__(4188);
  8848. ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
  8849. /*
  8850. * speedy-vision.js
  8851. * GPU-accelerated Computer Vision for JavaScript
  8852. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8853. *
  8854. * Licensed under the Apache License, Version 2.0 (the "License");
  8855. * you may not use this file except in compliance with the License.
  8856. * You may obtain a copy of the License at
  8857. *
  8858. * http://www.apache.org/licenses/LICENSE-2.0
  8859. *
  8860. * Unless required by applicable law or agreed to in writing, software
  8861. * distributed under the License is distributed on an "AS IS" BASIS,
  8862. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8863. * See the License for the specific language governing permissions and
  8864. * limitations under the License.
  8865. *
  8866. * speedy-matrix-factory.js
  8867. * A factory of matrices
  8868. */
  8869. /**
  8870. * Matrix routines
  8871. */
  8872. class SpeedyMatrixFactory extends Function {
  8873. /**
  8874. * Constructor
  8875. */
  8876. constructor() {
  8877. // This factory can be invoked as a function
  8878. super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
  8879. return this.bind(this);
  8880. }
  8881. /**
  8882. * @private
  8883. *
  8884. * Create a new matrix filled with the specified size and entries
  8885. * @param {number} rows
  8886. * @param {number} [columns]
  8887. * @param {number[]} [entries] in column-major format
  8888. * @returns {SpeedyMatrix}
  8889. */
  8890. _create(rows, columns = rows, entries = []) {
  8891. return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
  8892. }
  8893. /**
  8894. * @private
  8895. *
  8896. * Evaluate an expression synchronously and store the result in a new matrix
  8897. * @param {SpeedyMatrixExpr} expr matrix expression
  8898. * @returns {SpeedyMatrix}
  8899. */
  8900. _from(expr) {
  8901. return speedy_matrix.SpeedyMatrix.From(expr);
  8902. }
  8903. /**
  8904. * Create a new matrix filled with zeros with the specified size
  8905. * @param {number} rows
  8906. * @param {number} [columns]
  8907. * @returns {SpeedyMatrix}
  8908. */
  8909. Zeros(rows, columns = rows) {
  8910. return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
  8911. }
  8912. /**
  8913. * Create a new matrix filled with ones with the specified size
  8914. * @param {number} rows
  8915. * @param {number} [columns]
  8916. * @returns {SpeedyMatrix}
  8917. */
  8918. Ones(rows, columns = rows) {
  8919. return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
  8920. }
  8921. /**
  8922. * Create an identity matrix with the specified size
  8923. * @param {number} rows
  8924. * @param {number} [columns]
  8925. * @returns {SpeedyMatrix}
  8926. */
  8927. Eye(rows, columns = rows) {
  8928. return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
  8929. }
  8930. /**
  8931. * Returns a promise that resolves immediately if the WebAssembly routines
  8932. * are ready to be used, or as soon as they do become ready
  8933. * @returns {SpeedyPromise<void>}
  8934. */
  8935. ready() {
  8936. return speedy_matrix.SpeedyMatrix.ready();
  8937. }
  8938. /**
  8939. * QR decomposition
  8940. * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
  8941. * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
  8942. * @param {SpeedyMatrix} mat is m x n, input
  8943. * @param {object} [options]
  8944. * @param {'reduced'|'full'} [options.mode]
  8945. * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
  8946. */
  8947. qr(Q, R, mat, {
  8948. mode = 'reduced'
  8949. } = {}) {
  8950. const A = mat,
  8951. m = mat.rows,
  8952. n = mat.columns;
  8953. // validate shapes & mode
  8954. if (mode == 'reduced') {
  8955. if (Q.rows != m || Q.columns != n || R.rows != n || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
  8956. } else if (mode == 'full') {
  8957. if (Q.rows != m || Q.columns != m || R.rows != m || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
  8958. } else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
  8959. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  8960. wasm,
  8961. memory
  8962. }) => {
  8963. // allocate matrices
  8964. const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
  8965. const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
  8966. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  8967. // copy input matrices to WASM memory
  8968. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  8969. // run the WASM routine
  8970. if (mode == 'reduced') wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);else wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
  8971. // copy output matrices from WASM memory
  8972. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
  8973. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
  8974. // deallocate matrices
  8975. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  8976. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
  8977. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
  8978. // done!
  8979. return [Q, R];
  8980. });
  8981. }
  8982. /**
  8983. * Solve a possibly overdetermined system of linear
  8984. * equations Ax = b for x using ordinary least squares
  8985. * @param {SpeedyMatrix} solution n x 1, output
  8986. * @param {SpeedyMatrix} A m x n, m >= n, input
  8987. * @param {SpeedyMatrix} b m x 1, output
  8988. * @param {object} [options]
  8989. * @param {'qr'} [options.method] method of resolution
  8990. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  8991. */
  8992. ols(solution, A, b, {
  8993. method = 'qr'
  8994. } = {}) {
  8995. const m = A.rows,
  8996. n = A.columns;
  8997. const x = solution;
  8998. // validate shapes
  8999. if (m < n || n == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9000. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9001. wasm,
  9002. memory
  9003. }) => {
  9004. // allocate matrices
  9005. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  9006. const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
  9007. const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
  9008. // copy input matrices to WASM memory
  9009. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  9010. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
  9011. // run the WASM routine
  9012. switch (method) {
  9013. case 'qr':
  9014. wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
  9015. break;
  9016. default:
  9017. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9018. }
  9019. // copy output matrix from WASM memory
  9020. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
  9021. // deallocate matrices
  9022. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
  9023. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
  9024. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  9025. // done!
  9026. return solution;
  9027. });
  9028. }
  9029. /**
  9030. * Solve a system of linear equations Ax = b for x
  9031. * @param {SpeedyMatrix} solution m x 1, output
  9032. * @param {SpeedyMatrix} A m x m, input
  9033. * @param {SpeedyMatrix} b m x 1, output
  9034. * @param {object} [options]
  9035. * @param {'qr'} [options.method] method of resolution
  9036. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9037. */
  9038. solve(solution, A, b, {
  9039. method = 'qr'
  9040. } = {}) {
  9041. const m = A.rows,
  9042. n = A.columns;
  9043. const x = solution;
  9044. // validate shapes
  9045. if (m != n) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9046. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9047. wasm,
  9048. memory
  9049. }) => {
  9050. // select method
  9051. switch (method) {
  9052. case 'qr':
  9053. return this.ols(x, A, b, {
  9054. method
  9055. });
  9056. /*case 'lu':
  9057. break;*/
  9058. default:
  9059. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9060. }
  9061. });
  9062. }
  9063. /**
  9064. * Compute a perspective transformation using 4 correspondences of points
  9065. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9066. * @param {SpeedyMatrix} src 2x4 input points - source coordinates
  9067. * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
  9068. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9069. */
  9070. perspective(homography, src, dest) {
  9071. // validate shapes
  9072. if (src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
  9073. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9074. wasm,
  9075. memory
  9076. }) => {
  9077. // allocate matrices
  9078. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9079. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9080. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9081. // copy input matrices to WASM memory
  9082. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9083. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9084. // run the WASM routine
  9085. wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
  9086. // copy output matrix from WASM memory
  9087. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9088. // deallocate matrices
  9089. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9090. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9091. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9092. // done!
  9093. return homography;
  9094. });
  9095. }
  9096. /**
  9097. * Compute a perspective transformation using n >= 4 correspondences of points
  9098. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9099. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9100. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9101. * @param {object} [options]
  9102. * @param {'default'|'pransac'} [options.method] method of computation
  9103. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9104. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9105. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9106. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9107. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9108. */
  9109. findHomography(homography, src, dest, {
  9110. method = 'default',
  9111. mask = null,
  9112. reprojectionError = 3,
  9113. numberOfHypotheses = 512,
  9114. bundleSize = 128
  9115. } = {}) {
  9116. // validate shapes
  9117. if (src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9118. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9119. wasm,
  9120. memory
  9121. }) => {
  9122. // allocate matrices
  9123. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9124. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9125. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9126. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9127. // copy input matrices to WASM memory
  9128. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9129. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9130. // run the WASM routine
  9131. switch (method) {
  9132. case 'pransac':
  9133. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9134. wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9135. break;
  9136. case 'default':
  9137. case 'dlt':
  9138. // obsolete
  9139. wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
  9140. break;
  9141. default:
  9142. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
  9143. }
  9144. // copy output matrices from WASM memory
  9145. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9146. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9147. // deallocate matrices
  9148. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9149. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9150. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9151. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9152. // done!
  9153. return homography;
  9154. });
  9155. }
  9156. /**
  9157. * Apply a perspective transformation to a set of 2D points
  9158. * @param {SpeedyMatrix} dest 2 x n output matrix
  9159. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9160. * @param {SpeedyMatrix} transform 3x3 homography matrix
  9161. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9162. */
  9163. applyPerspectiveTransform(dest, src, transform) {
  9164. // validate shapes
  9165. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 3 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
  9166. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9167. wasm,
  9168. memory
  9169. }) => {
  9170. // allocate matrices
  9171. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9172. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9173. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9174. // copy input matrices to WASM memory
  9175. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9176. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9177. // run the WASM routine
  9178. wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
  9179. // copy output matrix from WASM memory
  9180. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9181. // deallocate matrices
  9182. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9183. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9184. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9185. // done!
  9186. return dest;
  9187. });
  9188. }
  9189. /**
  9190. * Compute an affine transform using 3 correspondences of points
  9191. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9192. * @param {SpeedyMatrix} src 2x3 input points - source coordinates
  9193. * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
  9194. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9195. */
  9196. affine(transform, src, dest) {
  9197. // validate shapes
  9198. if (src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
  9199. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9200. wasm,
  9201. memory
  9202. }) => {
  9203. // allocate matrices
  9204. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9205. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9206. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9207. // copy input matrices to WASM memory
  9208. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9209. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9210. // run the WASM routine
  9211. wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
  9212. // copy output matrix from WASM memory
  9213. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9214. // deallocate matrices
  9215. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9216. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9217. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9218. // done!
  9219. return transform;
  9220. });
  9221. }
  9222. /**
  9223. * Compute an affine transformation using n >= 3 correspondences of points
  9224. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9225. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9226. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9227. * @param {object} [options]
  9228. * @param {'default'|'pransac'} [options.method] method of computation
  9229. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9230. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9231. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9232. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9233. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
  9234. */
  9235. findAffineTransform(transform, src, dest, {
  9236. method = 'default',
  9237. mask = null,
  9238. reprojectionError = 3,
  9239. numberOfHypotheses = 512,
  9240. bundleSize = 128
  9241. } = {}) {
  9242. // validate shapes
  9243. if (src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9244. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9245. wasm,
  9246. memory
  9247. }) => {
  9248. // allocate matrices
  9249. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9250. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9251. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9252. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9253. // copy input matrices to WASM memory
  9254. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9255. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9256. // run the WASM routine
  9257. switch (method) {
  9258. case 'pransac':
  9259. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9260. wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9261. break;
  9262. case 'default':
  9263. wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
  9264. break;
  9265. default:
  9266. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
  9267. }
  9268. // copy output matrices from WASM memory
  9269. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9270. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9271. // deallocate matrices
  9272. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9273. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9274. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9275. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9276. // done!
  9277. return transform;
  9278. });
  9279. }
  9280. /**
  9281. * Apply an affine transformation to a set of 2D points
  9282. * @param {SpeedyMatrix} dest 2 x n output matrix
  9283. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9284. * @param {SpeedyMatrix} transform 2x3 affine transform
  9285. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9286. */
  9287. applyAffineTransform(dest, src, transform) {
  9288. // validate shapes
  9289. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
  9290. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9291. wasm,
  9292. memory
  9293. }) => {
  9294. // allocate matrices
  9295. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9296. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9297. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9298. // copy input matrices to WASM memory
  9299. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9300. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9301. // run the WASM routine
  9302. wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
  9303. // copy output matrix from WASM memory
  9304. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9305. // deallocate matrices
  9306. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9307. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9308. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9309. // done!
  9310. return dest;
  9311. });
  9312. }
  9313. }
  9314. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
  9315. /*
  9316. * speedy-vision.js
  9317. * GPU-accelerated Computer Vision for JavaScript
  9318. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9319. *
  9320. * Licensed under the Apache License, Version 2.0 (the "License");
  9321. * you may not use this file except in compliance with the License.
  9322. * You may obtain a copy of the License at
  9323. *
  9324. * http://www.apache.org/licenses/LICENSE-2.0
  9325. *
  9326. * Unless required by applicable law or agreed to in writing, software
  9327. * distributed under the License is distributed on an "AS IS" BASIS,
  9328. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9329. * See the License for the specific language governing permissions and
  9330. * limitations under the License.
  9331. *
  9332. * pipeline-message.js
  9333. * A message that is shared between nodes of a pipeline
  9334. */
  9335. /**
  9336. * Types of messages
  9337. * @enum {Symbol}
  9338. */
  9339. const SpeedyPipelineMessageType = Object.freeze({
  9340. Nothing: Symbol('Nothing'),
  9341. Image: Symbol('Image'),
  9342. Keypoints: Symbol('Keypoints'),
  9343. Vector2: Symbol('Vector2'),
  9344. LSHTables: Symbol('LSHTables'),
  9345. KeypointMatches: Symbol('KeypointMatches')
  9346. });
  9347. /**
  9348. * Diagnostic data
  9349. * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
  9350. */
  9351. /**
  9352. * A message that is shared between nodes of a pipeline
  9353. * @abstract
  9354. */
  9355. class SpeedyPipelineMessage {
  9356. /**
  9357. * Constructor
  9358. * @param {SpeedyPipelineMessageType} type message type
  9359. */
  9360. constructor(type) {
  9361. /** @type {SpeedyPipelineMessageType} message type */
  9362. this._type = type;
  9363. }
  9364. /**
  9365. * Message type
  9366. * @returns {SpeedyPipelineMessageType}
  9367. */
  9368. get type() {
  9369. return this._type;
  9370. }
  9371. /**
  9372. * Checks if the type of this message is equal to parameter type
  9373. * @param {SpeedyPipelineMessageType} type
  9374. * @returns {boolean}
  9375. */
  9376. hasType(type) {
  9377. return this._type === type;
  9378. }
  9379. /**
  9380. * Is this an empty message?
  9381. * @returns {boolean}
  9382. */
  9383. isEmpty() {
  9384. return this.hasType(SpeedyPipelineMessageType.Nothing);
  9385. }
  9386. /**
  9387. * Convert to string
  9388. * @returns {string}
  9389. */
  9390. toString() {
  9391. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this.type);
  9392. return `message of type ${type}`;
  9393. }
  9394. /**
  9395. * Inspect this message for debugging purposes
  9396. * @param {SpeedyGPU} gpu
  9397. * @returns {SpeedyPipelineMessageDiagnosticData}
  9398. */
  9399. inspect(gpu) {
  9400. throw new utils_errors/* AbstractMethodError */.aQ();
  9401. }
  9402. /**
  9403. * Set parameters
  9404. * @abstract
  9405. * @param {...any} args
  9406. * @returns {SpeedyPipelineMessage} this message
  9407. */
  9408. set(...args) {
  9409. throw new utils_errors/* AbstractMethodError */.aQ();
  9410. }
  9411. /**
  9412. * Create a message of the specified type
  9413. * @param {SpeedyPipelineMessageType} type
  9414. * @returns {SpeedyPipelineMessage}
  9415. */
  9416. static create(type) {
  9417. return createMessage(type);
  9418. }
  9419. }
  9420. /**
  9421. * An empty message carrying nothing
  9422. */
  9423. class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage {
  9424. /**
  9425. * Constructor
  9426. */
  9427. constructor() {
  9428. super(SpeedyPipelineMessageType.Nothing);
  9429. }
  9430. /**
  9431. * Set parameters
  9432. * @returns {SpeedyPipelineMessage} this message
  9433. */
  9434. set() {
  9435. return this;
  9436. }
  9437. /**
  9438. * Inspect this message for debugging purposes
  9439. * @param {SpeedyGPU} gpu
  9440. * @returns {SpeedyPipelineMessageDiagnosticData}
  9441. */
  9442. inspect(gpu) {
  9443. return {
  9444. type: this.constructor.name
  9445. };
  9446. }
  9447. }
  9448. /**
  9449. * A message transporting an image
  9450. */
  9451. class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage {
  9452. /**
  9453. * Constructor
  9454. */
  9455. constructor() {
  9456. super(SpeedyPipelineMessageType.Image);
  9457. /** @type {SpeedyDrawableTexture} the image we carry */
  9458. this._image = null;
  9459. /** @type {ImageFormat} image format */
  9460. this._format = types/* ImageFormat */.f5.RGBA;
  9461. }
  9462. /**
  9463. * Set parameters
  9464. * @param {SpeedyDrawableTexture} image the image we carry
  9465. * @param {ImageFormat} [format] image format
  9466. * @returns {SpeedyPipelineMessage} this message
  9467. */
  9468. set(image, format = types/* ImageFormat */.f5.RGBA) {
  9469. // set parameters
  9470. this._image = image;
  9471. this._format = format;
  9472. // done!
  9473. return this;
  9474. }
  9475. /**
  9476. * Inspect this message for debugging purposes
  9477. * @param {SpeedyGPU} gpu
  9478. * @returns {SpeedyPipelineMessageDiagnosticData}
  9479. */
  9480. inspect(gpu) {
  9481. const formatName = Object.keys(types/* ImageFormat */.f5).find(format => types/* ImageFormat */.f5[format] === this.format);
  9482. return {
  9483. type: this.constructor.name,
  9484. format: String(formatName),
  9485. imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
  9486. image: this.image ? '<image data>' /* possibly MBs of data */ : '',
  9487. hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
  9488. };
  9489. }
  9490. /**
  9491. * The image we carry
  9492. * @returns {SpeedyDrawableTexture}
  9493. */
  9494. get image() {
  9495. return this._image;
  9496. }
  9497. /**
  9498. * Image format
  9499. * @returns {ImageFormat}
  9500. */
  9501. get format() {
  9502. return this._format;
  9503. }
  9504. }
  9505. /**
  9506. * A message transporting keypoints
  9507. */
  9508. class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage {
  9509. /**
  9510. * Constructor
  9511. */
  9512. constructor() {
  9513. super(SpeedyPipelineMessageType.Keypoints);
  9514. /** @type {SpeedyDrawableTexture} encoded keypoints */
  9515. this._encodedKeypoints = null;
  9516. /** @type {number} descriptor size in bytes */
  9517. this._descriptorSize = 0;
  9518. /** @type {number} extra size in bytes */
  9519. this._extraSize = 0;
  9520. /** @type {number} encoder length */
  9521. this._encoderLength = 1;
  9522. }
  9523. /**
  9524. * Set parameters
  9525. * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
  9526. * @param {number} descriptorSize in bytes
  9527. * @param {number} extraSize in bytes
  9528. * @param {number} encoderLength positive integer
  9529. * @returns {SpeedyPipelineMessage} this message
  9530. */
  9531. set(encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  9532. // set parameters
  9533. this._encodedKeypoints = encodedKeypoints;
  9534. this._descriptorSize = descriptorSize | 0;
  9535. this._extraSize = extraSize | 0;
  9536. this._encoderLength = encoderLength | 0;
  9537. // validate
  9538. utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
  9539. utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
  9540. utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
  9541. // done!
  9542. return this;
  9543. }
  9544. /**
  9545. * Inspect this message for debugging purposes
  9546. * @param {SpeedyGPU} gpu
  9547. * @returns {SpeedyPipelineMessageDiagnosticData}
  9548. */
  9549. inspect(gpu) {
  9550. return {
  9551. type: this.constructor.name,
  9552. descriptorSize: this.descriptorSize,
  9553. extraSize: this.extraSize,
  9554. encoderLength: this.encoderLength,
  9555. encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
  9556. encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : ''
  9557. };
  9558. }
  9559. /**
  9560. * Encoded keypoints
  9561. * @returns {SpeedyDrawableTexture}
  9562. */
  9563. get encodedKeypoints() {
  9564. return this._encodedKeypoints;
  9565. }
  9566. /**
  9567. * Descriptor size, in bytes
  9568. * @returns {number}
  9569. */
  9570. get descriptorSize() {
  9571. return this._descriptorSize;
  9572. }
  9573. /**
  9574. * Extra size, in bytes
  9575. * @returns {number}
  9576. */
  9577. get extraSize() {
  9578. return this._extraSize;
  9579. }
  9580. /**
  9581. * Encoder length
  9582. * @returns {number}
  9583. */
  9584. get encoderLength() {
  9585. return this._encoderLength;
  9586. }
  9587. }
  9588. /*
  9589. * A message transporting a set of 2D vectors
  9590. */
  9591. class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage {
  9592. /**
  9593. * Constructor
  9594. */
  9595. constructor() {
  9596. super(SpeedyPipelineMessageType.Vector2);
  9597. /** @type {SpeedyDrawableTexture} the set of vectors */
  9598. this._vectors = null;
  9599. }
  9600. /**
  9601. * Set parameters
  9602. * @param {SpeedyDrawableTexture} vectors the set of vectors
  9603. * @returns {SpeedyPipelineMessage} this message
  9604. */
  9605. set(vectors) {
  9606. // set parameters
  9607. this._vectors = vectors;
  9608. // done!
  9609. return this;
  9610. }
  9611. /**
  9612. * Inspect this message for debugging purposes
  9613. * @param {SpeedyGPU} gpu
  9614. * @returns {SpeedyPipelineMessageDiagnosticData}
  9615. */
  9616. inspect(gpu) {
  9617. return {
  9618. type: this.constructor.name,
  9619. vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
  9620. vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
  9621. };
  9622. }
  9623. /**
  9624. * The set of vectors
  9625. * @returns {SpeedyDrawableTexture}
  9626. */
  9627. get vectors() {
  9628. return this._vectors;
  9629. }
  9630. }
  9631. /**
  9632. * A message transporting LSH tables
  9633. */
  9634. class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage {
  9635. /**
  9636. * Constructor
  9637. */
  9638. constructor() {
  9639. super(SpeedyPipelineMessageType.LSHTables);
  9640. /** @type {SpeedyLSH} LSH data structure */
  9641. this._lsh = null;
  9642. }
  9643. /**
  9644. * Set parameters
  9645. * @param {SpeedyLSH} lsh
  9646. * @returns {SpeedyPipelineMessage} this message
  9647. */
  9648. set(lsh) {
  9649. // set parameters
  9650. this._lsh = lsh;
  9651. // done!
  9652. return this;
  9653. }
  9654. /**
  9655. * Inspect this message for debugging purposes
  9656. * @param {SpeedyGPU} gpu
  9657. * @returns {SpeedyPipelineMessageDiagnosticData}
  9658. */
  9659. inspect(gpu) {
  9660. return {
  9661. type: this.constructor.name,
  9662. lsh: '<LSH tables>'
  9663. };
  9664. }
  9665. /**
  9666. * LSH data structure
  9667. * @returns {SpeedyLSH}
  9668. */
  9669. get lsh() {
  9670. return this._lsh;
  9671. }
  9672. }
  9673. /*
  9674. * A message transporting a set of keypoint matches
  9675. */
  9676. class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage {
  9677. /**
  9678. * Constructor
  9679. */
  9680. constructor() {
  9681. super(SpeedyPipelineMessageType.KeypointMatches);
  9682. /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
  9683. this._encodedMatches = null;
  9684. /** @type {number} number of matches per keypoint */
  9685. this._matchesPerKeypoint = 1;
  9686. }
  9687. /**
  9688. * Set parameters
  9689. * @param {SpeedyDrawableTexture} encodedMatches
  9690. * @param {number} matchesPerKeypoint
  9691. * @returns {SpeedyPipelineMessage} this message
  9692. */
  9693. set(encodedMatches, matchesPerKeypoint) {
  9694. // set parameters
  9695. this._encodedMatches = encodedMatches;
  9696. this._matchesPerKeypoint = matchesPerKeypoint | 0;
  9697. // validate
  9698. utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
  9699. // done!
  9700. return this;
  9701. }
  9702. /**
  9703. * Inspect this message for debugging purposes
  9704. * @param {SpeedyGPU} gpu
  9705. * @returns {SpeedyPipelineMessageDiagnosticData}
  9706. */
  9707. inspect(gpu) {
  9708. return {
  9709. type: this.constructor.name,
  9710. matchesPerKeypoint: this.matchesPerKeypoint,
  9711. encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
  9712. encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
  9713. };
  9714. }
  9715. /**
  9716. * The matches
  9717. * @returns {SpeedyDrawableTexture}
  9718. */
  9719. get encodedMatches() {
  9720. return this._encodedMatches;
  9721. }
  9722. /**
  9723. * Number of matches per keypoint
  9724. * @returns {number}
  9725. */
  9726. get matchesPerKeypoint() {
  9727. return this._matchesPerKeypoint;
  9728. }
  9729. }
  9730. //
  9731. // Utilities
  9732. //
  9733. /** Map message type to message class */
  9734. const MESSAGE_CLASS = Object.freeze({
  9735. [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
  9736. [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
  9737. [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
  9738. [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
  9739. [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
  9740. [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches
  9741. });
  9742. /**
  9743. * Create a message of the specified type
  9744. * @param {SpeedyPipelineMessageType} type
  9745. * @returns {SpeedyPipelineMessage}
  9746. */
  9747. function createMessage(type) {
  9748. //return Reflect.construct(MESSAGE_CLASS[type], []);
  9749. return new MESSAGE_CLASS[
  9750. // error TS2538: Type 'Symbol' cannot be used as an index type.
  9751. // heck, what the hack...
  9752. /** @type {any} */
  9753. type]();
  9754. }
  9755. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
  9756. /*
  9757. * speedy-vision.js
  9758. * GPU-accelerated Computer Vision for JavaScript
  9759. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9760. *
  9761. * Licensed under the Apache License, Version 2.0 (the "License");
  9762. * you may not use this file except in compliance with the License.
  9763. * You may obtain a copy of the License at
  9764. *
  9765. * http://www.apache.org/licenses/LICENSE-2.0
  9766. *
  9767. * Unless required by applicable law or agreed to in writing, software
  9768. * distributed under the License is distributed on an "AS IS" BASIS,
  9769. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9770. * See the License for the specific language governing permissions and
  9771. * limitations under the License.
  9772. *
  9773. * pipeline-portspec.js
  9774. * Specification (requirements) of a port of a node of a pipeline
  9775. */
  9776. /**
  9777. * A message constraint is a message validation predicate
  9778. * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
  9779. */
  9780. /**
  9781. * A validation predicate that validates all messages
  9782. * @type {SpeedyPipelineMessageConstraint}
  9783. */
  9784. const always = message => true;
  9785. /**
  9786. * Specification (requirements) of a port of a node of a pipeline
  9787. */
  9788. class SpeedyPipelinePortSpec {
  9789. /**
  9790. * Constructor
  9791. * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
  9792. * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
  9793. */
  9794. constructor(expectedMessageType, messageConstraint = always) {
  9795. /** @type {SpeedyPipelineMessageType} expected message type */
  9796. this._expectedMessageType = expectedMessageType;
  9797. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  9798. this._isValidMessage = typeof messageConstraint === 'function' ? messageConstraint : always;
  9799. // expect a valid type
  9800. utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
  9801. }
  9802. /**
  9803. * Checks if two specs have the same expected type
  9804. * @param {SpeedyPipelinePortSpec} spec
  9805. * @returns {boolean}
  9806. */
  9807. isCompatibleWith(spec) {
  9808. return this._expectedMessageType == spec._expectedMessageType;
  9809. }
  9810. /**
  9811. * Is the given message accepted by a port that abides by this specification?
  9812. * @param {SpeedyPipelineMessage} message
  9813. * @returns {boolean}
  9814. */
  9815. accepts(message) {
  9816. return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
  9817. }
  9818. /**
  9819. * Convert to string
  9820. * @returns {string}
  9821. */
  9822. toString() {
  9823. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this._expectedMessageType);
  9824. return `Port expects ${type} satisfying ${this._isValidMessage}`;
  9825. }
  9826. /**
  9827. * Expected message type
  9828. * @returns {SpeedyPipelineMessageType}
  9829. */
  9830. get expectedMessageType() {
  9831. return this._expectedMessageType;
  9832. }
  9833. }
  9834. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
  9835. /*
  9836. * speedy-vision.js
  9837. * GPU-accelerated Computer Vision for JavaScript
  9838. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9839. *
  9840. * Licensed under the Apache License, Version 2.0 (the "License");
  9841. * you may not use this file except in compliance with the License.
  9842. * You may obtain a copy of the License at
  9843. *
  9844. * http://www.apache.org/licenses/LICENSE-2.0
  9845. *
  9846. * Unless required by applicable law or agreed to in writing, software
  9847. * distributed under the License is distributed on an "AS IS" BASIS,
  9848. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9849. * See the License for the specific language governing permissions and
  9850. * limitations under the License.
  9851. *
  9852. * pipeline-port.js
  9853. * Port of a node of a pipeline
  9854. */
  9855. // Constants
  9856. const DEFAULT_INPUT_PORT_NAME = 'in';
  9857. const DEFAULT_OUTPUT_PORT_NAME = 'out';
  9858. const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
  9859. const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
  9860. /**
  9861. * Diagnostic data
  9862. * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
  9863. */
  9864. /**
  9865. * Port of a node of a pipeline
  9866. * @abstract
  9867. */
  9868. class SpeedyPipelinePort {
  9869. /**
  9870. * Constructor
  9871. * @param {string} name the name of this port
  9872. * @param {SpeedyPipelinePortSpec} spec port specification
  9873. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9874. */
  9875. constructor(name, spec, node) {
  9876. /** @type {string} the name of this port */
  9877. this._name = String(name);
  9878. /** @type {SpeedyPipelinePortSpec} the specification of this port */
  9879. this._spec = spec;
  9880. /** @type {SpeedyPipelineNode} the node to which this port belongs */
  9881. this._node = node;
  9882. /** @type {SpeedyPipelineMessage} the message located in this port */
  9883. this._message = EMPTY_MESSAGE;
  9884. // check if we've got an acceptable port name
  9885. utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
  9886. }
  9887. /**
  9888. * The name of this port
  9889. * @returns {string}
  9890. */
  9891. get name() {
  9892. return this._name;
  9893. }
  9894. /**
  9895. * The node to which this port belongs
  9896. * @returns {SpeedyPipelineNode}
  9897. */
  9898. get node() {
  9899. return this._node;
  9900. }
  9901. /**
  9902. * Connect this port to another
  9903. * @abstract
  9904. * @param {SpeedyPipelinePort} port
  9905. */
  9906. connectTo(port) {
  9907. throw new utils_errors/* AbstractMethodError */.aQ();
  9908. }
  9909. /**
  9910. * Is this an input port?
  9911. * @abstract
  9912. * @returns {boolean}
  9913. */
  9914. isInputPort() {
  9915. throw new utils_errors/* AbstractMethodError */.aQ();
  9916. }
  9917. /**
  9918. * Is this an output port?
  9919. * @returns {boolean}
  9920. */
  9921. isOutputPort() {
  9922. return !this.isInputPort();
  9923. }
  9924. /**
  9925. * Clear the message stored in this port
  9926. */
  9927. clearMessage() {
  9928. this._message = EMPTY_MESSAGE;
  9929. }
  9930. /**
  9931. * Is there a valid message located in this port?
  9932. * @returns {boolean}
  9933. */
  9934. hasMessage() {
  9935. return !this._message.isEmpty();
  9936. }
  9937. /**
  9938. * Read the message that is in this port
  9939. * @returns {SpeedyPipelineMessage}
  9940. */
  9941. read() {
  9942. if (this._message.isEmpty()) throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
  9943. return this._message;
  9944. }
  9945. /**
  9946. * Write a message to this port
  9947. * @param {SpeedyPipelineMessage} message
  9948. */
  9949. write(message) {
  9950. throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
  9951. }
  9952. /**
  9953. * Inspect this port for debugging purposes
  9954. * @param {SpeedyGPU} gpu
  9955. * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
  9956. */
  9957. inspect(gpu) {
  9958. return this._message.inspect(gpu);
  9959. }
  9960. /**
  9961. * Default port name
  9962. * @abstract
  9963. * @returns {string}
  9964. */
  9965. static get DEFAULT_NAME() {
  9966. throw new utils_errors/* AbstractMethodError */.aQ();
  9967. }
  9968. }
  9969. /**
  9970. * Output port
  9971. */
  9972. class SpeedyPipelineOutputPort extends SpeedyPipelinePort {
  9973. /**
  9974. * Constructor
  9975. * @param {string} name the name of this port
  9976. * @param {SpeedyPipelinePortSpec} spec port specification
  9977. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9978. */
  9979. constructor(name, spec, node) {
  9980. super(name, spec, node);
  9981. /** @type {SpeedyPipelineMessage} cached message */
  9982. this._cachedMessage = null;
  9983. }
  9984. /**
  9985. * Connect this port to another
  9986. * @param {SpeedyPipelineInputPort} port
  9987. */
  9988. connectTo(port) {
  9989. if (!port.isInputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
  9990. port.connectTo(this);
  9991. }
  9992. /**
  9993. * Is this an input port?
  9994. * @returns {boolean}
  9995. */
  9996. isInputPort() {
  9997. return false;
  9998. }
  9999. /**
  10000. * Write a message to this port
  10001. * @param {SpeedyPipelineMessage} message
  10002. */
  10003. write(message) {
  10004. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
  10005. this._message = message;
  10006. }
  10007. /**
  10008. * Write a message to this port using a cached message object
  10009. * @param {...any} args to be passed to SpeedyPipelineMessage.set()
  10010. */
  10011. swrite(...args) {
  10012. if (this._cachedMessage == null) this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
  10013. this.write(this._cachedMessage.set(...args));
  10014. }
  10015. /**
  10016. * Default port name
  10017. * @returns {string}
  10018. */
  10019. static get DEFAULT_NAME() {
  10020. return DEFAULT_OUTPUT_PORT_NAME;
  10021. }
  10022. }
  10023. /**
  10024. * Input port
  10025. */
  10026. class SpeedyPipelineInputPort extends SpeedyPipelinePort {
  10027. /**
  10028. * Constructor
  10029. * @param {string} name the name of this port
  10030. * @param {SpeedyPipelinePortSpec} spec port specification
  10031. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10032. */
  10033. constructor(name, spec, node) {
  10034. super(name, spec, node);
  10035. /** @type {SpeedyPipelineOutputPort|null} incoming link */
  10036. this._incomingLink = null;
  10037. }
  10038. /**
  10039. * Incoming link
  10040. * @returns {SpeedyPipelineOutputPort|null}
  10041. */
  10042. get incomingLink() {
  10043. return this._incomingLink;
  10044. }
  10045. /**
  10046. * Connect this port to another
  10047. * @param {SpeedyPipelineOutputPort} port
  10048. */
  10049. connectTo(port) {
  10050. if (!port.isOutputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);else if (!this._spec.isCompatibleWith(port._spec)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
  10051. this._incomingLink = port;
  10052. }
  10053. /**
  10054. * Unlink this port
  10055. */
  10056. disconnect() {
  10057. this._incomingLink = null;
  10058. }
  10059. /**
  10060. * Is this an input port?
  10061. * @returns {boolean}
  10062. */
  10063. isInputPort() {
  10064. return true;
  10065. }
  10066. /**
  10067. * Receive a message using the incoming link
  10068. * @param {string} [nodeName]
  10069. * @returns {SpeedyPipelineMessage}
  10070. */
  10071. pullMessage(nodeName = '') {
  10072. const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
  10073. if (this._incomingLink == null) throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
  10074. const message = this._incomingLink.read();
  10075. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
  10076. return this._message = message;
  10077. }
  10078. /**
  10079. * Default port name
  10080. * @returns {string}
  10081. */
  10082. static get DEFAULT_NAME() {
  10083. return DEFAULT_INPUT_PORT_NAME;
  10084. }
  10085. }
  10086. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
  10087. /*
  10088. * speedy-vision.js
  10089. * GPU-accelerated Computer Vision for JavaScript
  10090. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10091. *
  10092. * Licensed under the Apache License, Version 2.0 (the "License");
  10093. * you may not use this file except in compliance with the License.
  10094. * You may obtain a copy of the License at
  10095. *
  10096. * http://www.apache.org/licenses/LICENSE-2.0
  10097. *
  10098. * Unless required by applicable law or agreed to in writing, software
  10099. * distributed under the License is distributed on an "AS IS" BASIS,
  10100. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10101. * See the License for the specific language governing permissions and
  10102. * limitations under the License.
  10103. *
  10104. * pipeline-portbuilder.js
  10105. * Builder of a port of a node of a pipeline
  10106. */
  10107. /**
  10108. * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
  10109. */
  10110. /**
  10111. * Builder of a port of a node of a pipeline
  10112. */
  10113. class SpeedyPipelinePortBuilder {
  10114. /**
  10115. * Constructor
  10116. * @param {typeof SpeedyPipelinePort} portClass input or output?
  10117. * @param {string} portName
  10118. */
  10119. constructor(portClass, portName) {
  10120. /** @type {typeof SpeedyPipelinePort} input or output? */
  10121. this._class = portClass;
  10122. /** @type {string} port name */
  10123. this._name = String(portName);
  10124. /** @type {SpeedyPipelineMessageType} accepted message type */
  10125. this._type = SpeedyPipelineMessageType.Nothing;
  10126. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10127. this._messageConstraint = undefined;
  10128. }
  10129. /**
  10130. * Declare that the new port expects a certain type of message
  10131. * @param {SpeedyPipelineMessageType} type expected type
  10132. * @returns {SpeedyPipelinePortBuilder} this builder
  10133. */
  10134. expects(type) {
  10135. utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
  10136. utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
  10137. this._type = type;
  10138. return this;
  10139. }
  10140. /**
  10141. * Declare that the new port expects messages satisfying a constraint
  10142. * @param {SpeedyPipelineMessageConstraint} constraint
  10143. * @returns {SpeedyPipelinePortBuilder} this builder
  10144. */
  10145. satisfying(constraint) {
  10146. utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
  10147. utils/* Utils */.A.assert(this._messageConstraint === undefined);
  10148. utils/* Utils */.A.assert(typeof constraint === 'function');
  10149. this._messageConstraint = constraint;
  10150. return this;
  10151. }
  10152. /**
  10153. * Build a port
  10154. * @param {SpeedyPipelineNode} node the node to which the new port will belong
  10155. * @returns {SpeedyPipelinePort}
  10156. */
  10157. build(node) {
  10158. const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
  10159. return Reflect.construct(this._class, [this._name, spec, node]);
  10160. }
  10161. }
  10162. /**
  10163. * Creates a builder for an input port
  10164. * @param {string} [portName]
  10165. * @returns {SpeedyPipelinePortBuilder}
  10166. */
  10167. function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10168. return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
  10169. }
  10170. /**
  10171. * Creates a builder for an output port
  10172. * @param {string} [portName]
  10173. * @returns {SpeedyPipelinePortBuilder}
  10174. */
  10175. function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10176. return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
  10177. }
  10178. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
  10179. /*
  10180. * speedy-vision.js
  10181. * GPU-accelerated Computer Vision for JavaScript
  10182. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10183. *
  10184. * Licensed under the Apache License, Version 2.0 (the "License");
  10185. * you may not use this file except in compliance with the License.
  10186. * You may obtain a copy of the License at
  10187. *
  10188. * http://www.apache.org/licenses/LICENSE-2.0
  10189. *
  10190. * Unless required by applicable law or agreed to in writing, software
  10191. * distributed under the License is distributed on an "AS IS" BASIS,
  10192. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10193. * See the License for the specific language governing permissions and
  10194. * limitations under the License.
  10195. *
  10196. * pipeline-node.js
  10197. * Node of a pipeline
  10198. */
  10199. /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
  10200. /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
  10201. /** Generate a random name for a node */
  10202. const generateRandomName = () => Math.random().toString(16).substr(2);
  10203. /** Create an empty input port dictionary */
  10204. const createInputPortDictionary = () => ( /** @type {InputPortDictionary} */Object.create(null));
  10205. /** Create an empty output port dictionary */
  10206. const createOutputPortDictionary = () => ( /** @type {OutputPortDictionary} */Object.create(null));
  10207. /**
  10208. * Map an array of input ports to an InputPortDictionary whose keys are their names
  10209. * @param {SpeedyPipelineInputPort[]} ports
  10210. * @returns {InputPortDictionary}
  10211. */
  10212. function InputPortDictionary(ports) {
  10213. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createInputPortDictionary());
  10214. }
  10215. /**
  10216. * Map an array of output ports to an OutputPortDictionary whose keys are their names
  10217. * @param {SpeedyPipelineOutputPort[]} ports
  10218. * @returns {OutputPortDictionary}
  10219. */
  10220. function OutputPortDictionary(ports) {
  10221. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createOutputPortDictionary());
  10222. }
  10223. /** A flag used for debugging purposes */
  10224. let _texView = false;
  10225. /**
  10226. * Node of a pipeline
  10227. * @abstract
  10228. */
  10229. class SpeedyPipelineNode {
  10230. /**
  10231. * Constructor
  10232. * @param {string} [name] the name of this node
  10233. * @param {number} [texCount] number of work textures
  10234. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10235. */
  10236. constructor(name = generateRandomName(), texCount = 0, portBuilders = []) {
  10237. /** @type {string} the name of this node */
  10238. this._name = String(name);
  10239. /** @type {SpeedyDrawableTexture[]} work texture(s) */
  10240. this._tex = new Array(texCount).fill(null);
  10241. // build the ports
  10242. const ports = portBuilders.map(builder => builder.build(this));
  10243. const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ports.filter(port => port.isInputPort());
  10244. const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ports.filter(port => port.isOutputPort());
  10245. /** @type {InputPortDictionary} input ports */
  10246. this._inputPorts = InputPortDictionary(inputPorts);
  10247. /** @type {OutputPortDictionary} output ports */
  10248. this._outputPorts = OutputPortDictionary(outputPorts);
  10249. // validate
  10250. if (this._name.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);else if (portBuilders.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
  10251. }
  10252. /**
  10253. * The name of this node
  10254. * @returns {string}
  10255. */
  10256. get name() {
  10257. return this._name;
  10258. }
  10259. /**
  10260. * Name and type of this node
  10261. * @returns {string}
  10262. */
  10263. get fullName() {
  10264. return `${this.constructor.name}[${this.name}]`;
  10265. }
  10266. /**
  10267. * Find input port by name
  10268. * @param {string} [portName]
  10269. * @returns {SpeedyPipelineInputPort}
  10270. */
  10271. input(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10272. if (portName in this._inputPorts) return this._inputPorts[portName];
  10273. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
  10274. }
  10275. /**
  10276. * Find output port by name
  10277. * @param {string} [portName]
  10278. * @returns {SpeedyPipelineOutputPort}
  10279. */
  10280. output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10281. if (portName in this._outputPorts) return this._outputPorts[portName];
  10282. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
  10283. }
  10284. /**
  10285. * Get data from the input ports and execute
  10286. * the task that this node is supposed to!
  10287. * @param {SpeedyGPU} gpu
  10288. * @returns {void|SpeedyPromise<void>}
  10289. */
  10290. execute(gpu) {
  10291. let portName;
  10292. // clear output ports
  10293. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10294. // let the input ports receive what is due
  10295. for (portName in this._inputPorts) this._inputPorts[portName].pullMessage(this.fullName);
  10296. // run the task
  10297. const runTask = this._run(gpu);
  10298. if (typeof runTask === 'undefined') return void this._finishExecution(gpu);else return runTask.then(() => this._finishExecution(gpu));
  10299. }
  10300. /**
  10301. * Finish the execution of this node;
  10302. * to be called after execute()
  10303. * @param {SpeedyGPU} gpu
  10304. */
  10305. _finishExecution(gpu) {
  10306. // ensure that no output ports are empty
  10307. for (const portName in this._outputPorts) {
  10308. utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
  10309. }
  10310. // diagnosticize the node / pipeline
  10311. if (settings/* Settings */.w.logging === 'diagnostic') {
  10312. utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
  10313. // Inspecting the data has performance implications.
  10314. // It is for diagnostic purposes only, not meant to be done in production!
  10315. for (const portName in this._inputPorts) utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
  10316. for (const portName in this._outputPorts) utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
  10317. }
  10318. }
  10319. /**
  10320. * Run the specific task of this node
  10321. * @abstract
  10322. * @param {SpeedyGPU} gpu
  10323. * @returns {void|SpeedyPromise<void>}
  10324. */
  10325. _run(gpu) {
  10326. throw new utils_errors/* AbstractMethodError */.aQ();
  10327. }
  10328. /**
  10329. * Initializes this node
  10330. * @param {SpeedyGPU} gpu
  10331. */
  10332. init(gpu) {
  10333. gpu.subscribe(this._allocateWorkTextures, this, gpu);
  10334. this._allocateWorkTextures(gpu);
  10335. }
  10336. /**
  10337. * Releases this node
  10338. * @param {SpeedyGPU} gpu
  10339. */
  10340. release(gpu) {
  10341. this._deallocateWorkTextures(gpu);
  10342. gpu.unsubscribe(this._allocateWorkTextures, this);
  10343. }
  10344. /**
  10345. * Clear all ports
  10346. */
  10347. clearPorts() {
  10348. let portName;
  10349. for (portName in this._inputPorts) this._inputPorts[portName].clearMessage();
  10350. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10351. }
  10352. /**
  10353. * Find all nodes that feed input to this node
  10354. * @returns {SpeedyPipelineNode[]}
  10355. */
  10356. inputNodes() {
  10357. const nodes = [];
  10358. for (const portName in this._inputPorts) {
  10359. const port = this._inputPorts[portName];
  10360. if (port.incomingLink != null) nodes.push(port.incomingLink.node);
  10361. }
  10362. return nodes;
  10363. }
  10364. /**
  10365. * Is this a source of the pipeline?
  10366. * @returns {boolean}
  10367. */
  10368. isSource() {
  10369. return false;
  10370. }
  10371. /**
  10372. * Is this a sink of the pipeline?
  10373. * @returns {boolean}
  10374. */
  10375. isSink() {
  10376. return false;
  10377. // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
  10378. //return Object.keys(this._outputPorts).length == 0;
  10379. }
  10380. /**
  10381. * Allocate work texture(s)
  10382. * @param {SpeedyGPU} gpu
  10383. */
  10384. _allocateWorkTextures(gpu) {
  10385. for (let j = 0; j < this._tex.length; j++) this._tex[j] = gpu.texturePool.allocate();
  10386. }
  10387. /**
  10388. * Deallocate work texture(s)
  10389. * @param {SpeedyGPU} gpu
  10390. */
  10391. _deallocateWorkTextures(gpu) {
  10392. for (let j = this._tex.length - 1; j >= 0; j--) this._tex[j] = gpu.texturePool.free(this._tex[j]);
  10393. }
  10394. /**
  10395. * Visually inspect a texture for debugging purposes
  10396. * @param {SpeedyGPU} gpu
  10397. * @param {SpeedyDrawableTexture} texture
  10398. */
  10399. _visualize(gpu, texture) {
  10400. const canvas = gpu.renderToCanvas(texture);
  10401. if (!_texView) {
  10402. document.body.appendChild(canvas);
  10403. _texView = true;
  10404. }
  10405. }
  10406. }
  10407. /**
  10408. * Source node (a node with no input ports)
  10409. * @abstract
  10410. */
  10411. class SpeedyPipelineSourceNode extends SpeedyPipelineNode {
  10412. /**
  10413. * Constructor
  10414. * @param {string} [name] the name of this node
  10415. * @param {number} [texCount] number of work textures
  10416. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10417. */
  10418. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10419. super(name, texCount, portBuilders);
  10420. utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
  10421. }
  10422. /**
  10423. * Is this a source of the pipeline?
  10424. * @returns {boolean}
  10425. */
  10426. isSource() {
  10427. return true;
  10428. }
  10429. }
  10430. /**
  10431. * Sink node (a node with no output ports)
  10432. * @abstract
  10433. */
  10434. class SpeedyPipelineSinkNode extends SpeedyPipelineNode {
  10435. /**
  10436. * Constructor
  10437. * @param {string} [name] the name of this node
  10438. * @param {number} [texCount] number of work textures
  10439. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10440. */
  10441. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10442. super(name, texCount, portBuilders);
  10443. utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
  10444. }
  10445. /**
  10446. * Export data from this node to the user
  10447. * @abstract
  10448. * @returns {SpeedyPromise<any>}
  10449. */
  10450. export() {
  10451. throw new utils_errors/* AbstractMethodError */.aQ();
  10452. }
  10453. /**
  10454. * Is this a sink of the pipeline?
  10455. * @returns {boolean}
  10456. */
  10457. isSink() {
  10458. return true;
  10459. }
  10460. }
  10461. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
  10462. /*
  10463. * speedy-vision.js
  10464. * GPU-accelerated Computer Vision for JavaScript
  10465. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10466. *
  10467. * Licensed under the Apache License, Version 2.0 (the "License");
  10468. * you may not use this file except in compliance with the License.
  10469. * You may obtain a copy of the License at
  10470. *
  10471. * http://www.apache.org/licenses/LICENSE-2.0
  10472. *
  10473. * Unless required by applicable law or agreed to in writing, software
  10474. * distributed under the License is distributed on an "AS IS" BASIS,
  10475. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10476. * See the License for the specific language governing permissions and
  10477. * limitations under the License.
  10478. *
  10479. * speedy-match.js
  10480. * A match between two keypoint descriptors
  10481. */
  10482. // Constants
  10483. const MATCH_NOT_FOUND = -1;
  10484. /**
  10485. * A match between two keypoint descriptors
  10486. */
  10487. class SpeedyKeypointMatch {
  10488. /**
  10489. * Constructor
  10490. * @param {number} index index of the stored keypoint, a non-negative integer
  10491. * @param {number} distance a measure of the quality of the match, a non-negative number
  10492. */
  10493. constructor(index, distance) {
  10494. const isValid = distance < globals.MATCH_MAX_DISTANCE;
  10495. /** @type {number} index of the stored keypoint */
  10496. this._index = isValid ? index | 0 : MATCH_NOT_FOUND;
  10497. /** @type {number} a measure of the quality of the match */
  10498. this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
  10499. // done!
  10500. return Object.freeze(this);
  10501. }
  10502. /**
  10503. * The index of the stored keypoint
  10504. * @returns {number}
  10505. */
  10506. get index() {
  10507. return this._index;
  10508. }
  10509. /**
  10510. * A measure of the quality of the match (lower values indicate better matches)
  10511. * @returns {number}
  10512. */
  10513. get distance() {
  10514. return this._distance;
  10515. }
  10516. /**
  10517. * A string representation of the keypoint match
  10518. * @returns {string}
  10519. */
  10520. toString() {
  10521. return `SpeedyKeypointMatch(${this.index},${this.distance})`;
  10522. }
  10523. }
  10524. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
  10525. /*
  10526. * speedy-vision.js
  10527. * GPU-accelerated Computer Vision for JavaScript
  10528. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10529. *
  10530. * Licensed under the Apache License, Version 2.0 (the "License");
  10531. * you may not use this file except in compliance with the License.
  10532. * You may obtain a copy of the License at
  10533. *
  10534. * http://www.apache.org/licenses/LICENSE-2.0
  10535. *
  10536. * Unless required by applicable law or agreed to in writing, software
  10537. * distributed under the License is distributed on an "AS IS" BASIS,
  10538. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10539. * See the License for the specific language governing permissions and
  10540. * limitations under the License.
  10541. *
  10542. * speedy-keypoint.js
  10543. * Keypoint class
  10544. */
  10545. /**
  10546. * Represents a keypoint
  10547. */
  10548. class SpeedyKeypoint {
  10549. /**
  10550. * Constructor
  10551. * @param {number} x X position
  10552. * @param {number} y Y position
  10553. * @param {number} [lod] Level-of-detail
  10554. * @param {number} [rotation] Rotation in radians
  10555. * @param {number} [score] Cornerness measure
  10556. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10557. */
  10558. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null) {
  10559. /** @type {SpeedyPoint2} keypoint position */
  10560. this._position = new SpeedyPoint2(+x, +y);
  10561. /** @type {number} level of detail */
  10562. this._lod = +lod;
  10563. /** @type {number} rotation in radians */
  10564. this._rotation = +rotation;
  10565. /** @type {number} a cornerness measure */
  10566. this._score = +score;
  10567. /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
  10568. this._descriptor = descriptor;
  10569. }
  10570. /**
  10571. * Converts this keypoint to a descriptive string
  10572. * @returns {string}
  10573. */
  10574. toString() {
  10575. return `SpeedyKeypoint(${this.x},${this.y})`;
  10576. }
  10577. /**
  10578. * The position of this keypoint
  10579. * @returns {SpeedyPoint2}
  10580. */
  10581. get position() {
  10582. return this._position;
  10583. }
  10584. /**
  10585. * The x-position of this keypoint
  10586. * @returns {number}
  10587. */
  10588. get x() {
  10589. return this._position.x;
  10590. }
  10591. /**
  10592. * The x-position of this keypoint
  10593. * @param {number} value
  10594. */
  10595. set x(value) {
  10596. this._position.x = +value;
  10597. }
  10598. /**
  10599. * The y-position of this keypoint
  10600. * @returns {number}
  10601. */
  10602. get y() {
  10603. return this._position.y;
  10604. }
  10605. /**
  10606. * The y-position of this keypoint
  10607. * @param {number} value
  10608. */
  10609. set y(value) {
  10610. this._position.y = +value;
  10611. }
  10612. /**
  10613. * The pyramid level-of-detail from which this keypoint was extracted
  10614. * @returns {number}
  10615. */
  10616. get lod() {
  10617. return this._lod;
  10618. }
  10619. /**
  10620. * Scale: 2^lod
  10621. * @returns {number}
  10622. */
  10623. get scale() {
  10624. return Math.pow(2, this._lod);
  10625. }
  10626. /**
  10627. * The orientation of the keypoint, in radians
  10628. * @returns {number} Angle in radians
  10629. */
  10630. get rotation() {
  10631. return this._rotation;
  10632. }
  10633. /**
  10634. * Score: a cornerness measure
  10635. * @returns {number} Score
  10636. */
  10637. get score() {
  10638. return this._score;
  10639. }
  10640. /**
  10641. * Keypoint descriptor
  10642. * @return {SpeedyKeypointDescriptor|null}
  10643. */
  10644. get descriptor() {
  10645. return this._descriptor;
  10646. }
  10647. }
  10648. /**
  10649. * Represents a tracked keypoint
  10650. */
  10651. class SpeedyTrackedKeypoint extends SpeedyKeypoint {
  10652. /**
  10653. * Constructor
  10654. * @param {number} x X position
  10655. * @param {number} y Y position
  10656. * @param {number} [lod] Level-of-detail
  10657. * @param {number} [rotation] Rotation in radians
  10658. * @param {number} [score] Cornerness measure
  10659. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10660. * @param {SpeedyVector2} [flow] flow vector
  10661. */
  10662. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0, 0)) {
  10663. super(x, y, lod, rotation, score, descriptor);
  10664. /** @type {SpeedyVector2} flow vector */
  10665. this._flow = flow;
  10666. }
  10667. /**
  10668. * Flow vector
  10669. * @returns {SpeedyVector2}
  10670. */
  10671. get flow() {
  10672. return this._flow;
  10673. }
  10674. }
  10675. /**
  10676. * Represents a matched keypoint
  10677. */
  10678. class SpeedyMatchedKeypoint extends SpeedyKeypoint {
  10679. /**
  10680. * Constructor
  10681. * @param {number} x X position
  10682. * @param {number} y Y position
  10683. * @param {number} [lod] Level-of-detail
  10684. * @param {number} [rotation] Rotation in radians
  10685. * @param {number} [score] Cornerness measure
  10686. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10687. * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
  10688. */
  10689. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = []) {
  10690. super(x, y, lod, rotation, score, descriptor);
  10691. /** @type {SpeedyKeypointMatch[]} keypoint matches */
  10692. this._matches = matches;
  10693. }
  10694. /**
  10695. * Keypoint matches
  10696. * @returns {SpeedyKeypointMatch[]}
  10697. */
  10698. get matches() {
  10699. return this._matches;
  10700. }
  10701. }
  10702. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
  10703. /*
  10704. * speedy-vision.js
  10705. * GPU-accelerated Computer Vision for JavaScript
  10706. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10707. *
  10708. * Licensed under the Apache License, Version 2.0 (the "License");
  10709. * you may not use this file except in compliance with the License.
  10710. * You may obtain a copy of the License at
  10711. *
  10712. * http://www.apache.org/licenses/LICENSE-2.0
  10713. *
  10714. * Unless required by applicable law or agreed to in writing, software
  10715. * distributed under the License is distributed on an "AS IS" BASIS,
  10716. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10717. * See the License for the specific language governing permissions and
  10718. * limitations under the License.
  10719. *
  10720. * pipeline.js
  10721. * A pipeline is a network of nodes in which data flows to a sink
  10722. */
  10723. /**
  10724. * A dictionary indexed by the names of the sink nodes
  10725. * @typedef {Object<string,any>} SpeedyPipelineOutput
  10726. */
  10727. /** @type {SpeedyGPU} shared GPU programs & textures */
  10728. let gpu = null;
  10729. /** @type {number} gpu reference count */
  10730. let referenceCount = 0;
  10731. /**
  10732. * A pipeline is a network of nodes in which data flows to a sink
  10733. */
  10734. class SpeedyPipeline {
  10735. /**
  10736. * Constructor
  10737. */
  10738. constructor() {
  10739. /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
  10740. this._nodes = [];
  10741. /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
  10742. this._sequence = [];
  10743. /** @type {boolean} are we running the pipeline at this moment? */
  10744. this._busy = false;
  10745. }
  10746. /**
  10747. * Find a node by its name
  10748. * @template T extends SpeedyPipelineNode
  10749. * @param {string} name
  10750. * @returns {T|null}
  10751. */
  10752. node(name) {
  10753. for (let i = 0, n = this._nodes.length; i < n; i++) {
  10754. if (this._nodes[i].name === name) return this._nodes[i];
  10755. }
  10756. return null;
  10757. }
  10758. /**
  10759. * Initialize the pipeline
  10760. * @param {...SpeedyPipelineNode} nodes
  10761. * @returns {SpeedyPipeline} this pipeline
  10762. */
  10763. init(...nodes) {
  10764. // validate
  10765. if (this._nodes.length > 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);else if (nodes.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
  10766. // create a GPU instance and increase the reference count
  10767. if (0 == referenceCount++) {
  10768. utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
  10769. gpu = new SpeedyGPU();
  10770. }
  10771. // add nodes to the network
  10772. for (let i = 0; i < nodes.length; i++) {
  10773. const node = nodes[i];
  10774. if (!this._nodes.includes(node)) this._nodes.push(node);
  10775. }
  10776. // generate the sequence of nodes
  10777. this._sequence = SpeedyPipeline._tsort(this._nodes);
  10778. SpeedyPipeline._validateSequence(this._sequence);
  10779. // initialize nodes
  10780. for (let i = 0; i < this._sequence.length; i++) this._sequence[i].init(gpu);
  10781. // done!
  10782. return this;
  10783. }
  10784. /**
  10785. * Release the resources associated with this pipeline
  10786. * @returns {null}
  10787. */
  10788. release() {
  10789. if (this._nodes.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
  10790. // release nodes
  10791. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].release(gpu);
  10792. this._sequence.length = 0;
  10793. this._nodes.length = 0;
  10794. // decrease reference count and release GPU if necessary
  10795. if (0 == --referenceCount) gpu = gpu.release();
  10796. // done!
  10797. return null;
  10798. }
  10799. /**
  10800. * Run the pipeline
  10801. * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
  10802. */
  10803. run() {
  10804. utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
  10805. // is the pipeline busy?
  10806. if (this._busy) {
  10807. // if so, we need to wait 'til it finishes
  10808. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  10809. setTimeout(() => this.run().then(resolve, reject), 0);
  10810. });
  10811. } else {
  10812. // the pipeline is now busy and won't accept concurrent tasks
  10813. // (we allocate textures using a single pool)
  10814. this._busy = true;
  10815. }
  10816. // find the sinks
  10817. const sinks = /** @type {SpeedyPipelineSinkNode[]} */this._sequence.filter(node => node.isSink());
  10818. // create output template
  10819. const template = SpeedyPipeline._createOutputTemplate(sinks);
  10820. // diagnostic log
  10821. if (settings/* Settings */.w.logging === 'diagnostic') utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
  10822. // run the pipeline
  10823. return SpeedyPipeline._runSequence(this._sequence).then(() =>
  10824. // export results
  10825. speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
  10826. // aggregate results by the names of the sinks
  10827. results.reduce((obj, val, idx) => (obj[sinks[idx].name] = val, obj), template))).finally(() => {
  10828. // clear all ports
  10829. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].clearPorts();
  10830. // the pipeline is no longer busy
  10831. this._busy = false;
  10832. // diagnostic log
  10833. if (settings/* Settings */.w.logging === 'diagnostic') {
  10834. utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
  10835. Object.keys(template).forEach(entry => {
  10836. utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
  10837. });
  10838. }
  10839. }).turbocharge();
  10840. }
  10841. /**
  10842. * @internal
  10843. *
  10844. * GPU instance
  10845. * @returns {SpeedyGPU}
  10846. */
  10847. get _gpu() {
  10848. return gpu;
  10849. }
  10850. /**
  10851. * Execute the tasks of a sequence of nodes
  10852. * @param {SpeedyPipelineNode[]} sequence sequence of nodes
  10853. * @param {number} [i] in [0,n)
  10854. * @param {number} [n] number of nodes
  10855. * @returns {SpeedyPromise<void>}
  10856. */
  10857. static _runSequence(sequence, i = 0, n = sequence.length) {
  10858. for (; i < n; i++) {
  10859. const runTask = sequence[i].execute(gpu);
  10860. // this call greatly improves performance when downloading pixel data using PBOs
  10861. gpu.gl.flush();
  10862. if (typeof runTask !== 'undefined') return runTask.then(() => SpeedyPipeline._runSequence(sequence, i + 1, n));
  10863. }
  10864. return speedy_promise/* SpeedyPromise */.i.resolve();
  10865. }
  10866. /**
  10867. * Topological sorting
  10868. * @param {SpeedyPipelineNode[]} nodes
  10869. * @returns {SpeedyPipelineNode[]}
  10870. */
  10871. static _tsort(nodes) {
  10872. /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
  10873. const outlinks = SpeedyPipeline._outlinks(nodes);
  10874. const stack = nodes.map(node => ( /** @type {StackNode} */[node, false]));
  10875. const trash = new Set();
  10876. const sorted = new Array(nodes.length);
  10877. let j = sorted.length;
  10878. while (stack.length > 0) {
  10879. const [node, done] = stack.pop();
  10880. if (!done) {
  10881. if (!trash.has(node)) {
  10882. const outnodes = outlinks.get(node);
  10883. trash.add(node);
  10884. stack.push([node, true]);
  10885. stack.push(...outnodes.map(node => ( /** @type {StackNode} */[node, false])));
  10886. if (outnodes.some(node => trash.has(node) && !sorted.includes(node))) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
  10887. }
  10888. } else sorted[--j] = node;
  10889. }
  10890. return sorted;
  10891. }
  10892. /**
  10893. * Figure out the outgoing links of all nodes
  10894. * @param {SpeedyPipelineNode[]} nodes
  10895. * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
  10896. */
  10897. static _outlinks(nodes) {
  10898. const outlinks = new Map();
  10899. for (let k = 0; k < nodes.length; k++) outlinks.set(nodes[k], []);
  10900. for (let i = 0; i < nodes.length; i++) {
  10901. const to = nodes[i];
  10902. const inputs = to.inputNodes();
  10903. for (let j = 0; j < inputs.length; j++) {
  10904. const from = inputs[j];
  10905. const links = outlinks.get(from);
  10906. if (!links) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
  10907. if (!links.includes(to)) links.push(to);
  10908. }
  10909. }
  10910. return outlinks;
  10911. }
  10912. /**
  10913. * Generate the output template by aggregating the names of the sinks
  10914. * @param {SpeedyPipelineNode[]} [sinks]
  10915. * @returns {SpeedyPipelineOutput}
  10916. */
  10917. static _createOutputTemplate(sinks = []) {
  10918. const template = Object.create(null);
  10919. for (let i = sinks.length - 1; i >= 0; i--) template[sinks[i].name] = null;
  10920. return template;
  10921. }
  10922. /**
  10923. * Validate a sequence of nodes
  10924. * @param {SpeedyPipelineNode[]} sequence
  10925. */
  10926. static _validateSequence(sequence) {
  10927. if (sequence.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);else if (!sequence[0].isSource()) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);else if (!sequence.find(node => node.isSink())) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
  10928. }
  10929. }
  10930. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
  10931. /*
  10932. * speedy-vision.js
  10933. * GPU-accelerated Computer Vision for JavaScript
  10934. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10935. *
  10936. * Licensed under the Apache License, Version 2.0 (the "License");
  10937. * you may not use this file except in compliance with the License.
  10938. * You may obtain a copy of the License at
  10939. *
  10940. * http://www.apache.org/licenses/LICENSE-2.0
  10941. *
  10942. * Unless required by applicable law or agreed to in writing, software
  10943. * distributed under the License is distributed on an "AS IS" BASIS,
  10944. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10945. * See the License for the specific language governing permissions and
  10946. * limitations under the License.
  10947. *
  10948. * image-input.js
  10949. * Gets an image into a pipeline
  10950. */
  10951. // Constants
  10952. const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
  10953. /**
  10954. * Gets an image into a pipeline
  10955. */
  10956. class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode {
  10957. /**
  10958. * Constructor
  10959. * @param {string} [name] name of the node
  10960. */
  10961. constructor(name = undefined) {
  10962. super(name, UPLOAD_BUFFER_SIZE, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  10963. /** @type {SpeedyMedia|null} source media */
  10964. this._media = null;
  10965. /** @type {number} texture index */
  10966. this._textureIndex = 0;
  10967. }
  10968. /**
  10969. * Source media
  10970. * @returns {SpeedyMedia|null}
  10971. */
  10972. get media() {
  10973. return this._media;
  10974. }
  10975. /**
  10976. * Source media
  10977. * @param {SpeedyMedia|null} media
  10978. */
  10979. set media(media) {
  10980. if (media !== null && !(media instanceof SpeedyMedia)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
  10981. this._media = media;
  10982. }
  10983. /**
  10984. * Run the specific task of this node
  10985. * @param {SpeedyGPU} gpu
  10986. * @returns {void|SpeedyPromise<void>}
  10987. */
  10988. _run(gpu) {
  10989. if (this._media == null) throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
  10990. // use round-robin to mitigate WebGL's implicit synchronization
  10991. // and maybe minimize texture upload times
  10992. this._textureIndex = (this._textureIndex + 1) % this._tex.length;
  10993. // upload texture
  10994. const outputTexture = this._tex[this._textureIndex];
  10995. gpu.upload(this._media._source, outputTexture);
  10996. this.output().swrite(outputTexture, this._media._format);
  10997. }
  10998. }
  10999. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
  11000. /*
  11001. * speedy-vision.js
  11002. * GPU-accelerated Computer Vision for JavaScript
  11003. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11004. *
  11005. * Licensed under the Apache License, Version 2.0 (the "License");
  11006. * you may not use this file except in compliance with the License.
  11007. * You may obtain a copy of the License at
  11008. *
  11009. * http://www.apache.org/licenses/LICENSE-2.0
  11010. *
  11011. * Unless required by applicable law or agreed to in writing, software
  11012. * distributed under the License is distributed on an "AS IS" BASIS,
  11013. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11014. * See the License for the specific language governing permissions and
  11015. * limitations under the License.
  11016. *
  11017. * image-output.js
  11018. * Gets an image out of a pipeline
  11019. */
  11020. /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
  11021. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
  11022. const DEFAULT_MEDIA_TYPE = "bitmap";
  11023. /**
  11024. * Gets an image out of a pipeline
  11025. */
  11026. class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode {
  11027. /**
  11028. * Constructor
  11029. * @param {string} [name] name of the node
  11030. */
  11031. constructor(name = 'image') {
  11032. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11033. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
  11034. this._mediaType = DEFAULT_MEDIA_TYPE;
  11035. /** @type {ImageBitmap} output bitmap */
  11036. this._bitmap = null;
  11037. /** @type {ImageData} output pixel data */
  11038. this._data = null;
  11039. /** @type {ImageFormat} output format */
  11040. this._format = types/* ImageFormat */.f5.RGBA;
  11041. /** @type {SpeedyTextureReader} texture reader */
  11042. this._textureReader = new SpeedyTextureReader(1);
  11043. }
  11044. /**
  11045. * The media type that is exported from this node
  11046. * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
  11047. */
  11048. get mediaType() {
  11049. return this._mediaType;
  11050. }
  11051. /**
  11052. * The media type that is exported from this node
  11053. * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
  11054. */
  11055. set mediaType(value) {
  11056. if (value != 'bitmap' && value != 'data') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
  11057. this._mediaType = value;
  11058. }
  11059. /**
  11060. * Initializes this node
  11061. * @param {SpeedyGPU} gpu
  11062. */
  11063. init(gpu) {
  11064. super.init(gpu);
  11065. this._textureReader.init(gpu);
  11066. }
  11067. /**
  11068. * Releases this node
  11069. * @param {SpeedyGPU} gpu
  11070. */
  11071. release(gpu) {
  11072. this._textureReader.release(gpu);
  11073. super.release(gpu);
  11074. }
  11075. /**
  11076. * Export data from this node to the user
  11077. * @returns {SpeedyPromise<SpeedyMedia>}
  11078. */
  11079. export() {
  11080. const bitmapOrData = this._mediaType != 'data' ? this._bitmap : this._data;
  11081. utils/* Utils */.A.assert(bitmapOrData != null);
  11082. return SpeedyMedia.load(bitmapOrData, {
  11083. format: this._format
  11084. }, false);
  11085. }
  11086. /**
  11087. * Run the specific task of this node
  11088. * @param {SpeedyGPU} gpu
  11089. * @returns {void|SpeedyPromise<void>}
  11090. */
  11091. _run(gpu) {
  11092. const {
  11093. image,
  11094. format
  11095. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11096. if (this._mediaType != 'data') {
  11097. /* Create an ImageBitmap (default) */
  11098. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  11099. const canvas = gpu.renderToCanvas(image);
  11100. createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
  11101. this._bitmap = bitmap;
  11102. this._format = format;
  11103. this._data = null;
  11104. resolve();
  11105. });
  11106. });
  11107. } else {
  11108. /* Create an ImageData */
  11109. return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
  11110. const dataArray = new Uint8ClampedArray(pixels.buffer);
  11111. this._data = new ImageData(dataArray, image.width, image.height);
  11112. this._format = format;
  11113. this._bitmap = null;
  11114. });
  11115. }
  11116. }
  11117. }
  11118. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
  11119. /*
  11120. * speedy-vision.js
  11121. * GPU-accelerated Computer Vision for JavaScript
  11122. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11123. *
  11124. * Licensed under the Apache License, Version 2.0 (the "License");
  11125. * you may not use this file except in compliance with the License.
  11126. * You may obtain a copy of the License at
  11127. *
  11128. * http://www.apache.org/licenses/LICENSE-2.0
  11129. *
  11130. * Unless required by applicable law or agreed to in writing, software
  11131. * distributed under the License is distributed on an "AS IS" BASIS,
  11132. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11133. * See the License for the specific language governing permissions and
  11134. * limitations under the License.
  11135. *
  11136. * multiplexer.js
  11137. * Image multiplexer
  11138. */
  11139. /** @type {string[]} the names of the input ports indexed by their number */
  11140. const INPUT_PORT = ['in0', 'in1'];
  11141. /**
  11142. * Image multiplexer
  11143. */
  11144. class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode {
  11145. /**
  11146. * Constructor
  11147. * @param {string} [name] name of the node
  11148. */
  11149. constructor(name = undefined) {
  11150. super(name, 0, [...INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image)), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11151. /** @type {number} which port should be linked to the output? */
  11152. this._port = 0;
  11153. }
  11154. /**
  11155. * The number of the port that should be linked to the output
  11156. * @returns {number}
  11157. */
  11158. get port() {
  11159. return this._port;
  11160. }
  11161. /**
  11162. * The number of the port that should be linked to the output
  11163. * @param {number} port
  11164. */
  11165. set port(port) {
  11166. if (port < 0 || port >= INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  11167. this._port = port | 0;
  11168. }
  11169. /**
  11170. * Run the specific task of this node
  11171. * @param {SpeedyGPU} gpu
  11172. * @returns {void|SpeedyPromise<void>}
  11173. */
  11174. _run(gpu) {
  11175. const message = this.input(INPUT_PORT[this._port]).read();
  11176. this.output().write(message);
  11177. }
  11178. }
  11179. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
  11180. /*
  11181. * speedy-vision.js
  11182. * GPU-accelerated Computer Vision for JavaScript
  11183. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11184. *
  11185. * Licensed under the Apache License, Version 2.0 (the "License");
  11186. * you may not use this file except in compliance with the License.
  11187. * You may obtain a copy of the License at
  11188. *
  11189. * http://www.apache.org/licenses/LICENSE-2.0
  11190. *
  11191. * Unless required by applicable law or agreed to in writing, software
  11192. * distributed under the License is distributed on an "AS IS" BASIS,
  11193. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11194. * See the License for the specific language governing permissions and
  11195. * limitations under the License.
  11196. *
  11197. * buffer.js
  11198. * Image Buffer
  11199. */
  11200. /**
  11201. * Image Buffer: a node with memory.
  11202. * At time t, it outputs the image received at time t-1
  11203. */
  11204. class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode {
  11205. /**
  11206. * Constructor
  11207. * @param {string} [name] name of the node
  11208. */
  11209. constructor(name = undefined) {
  11210. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11211. /** @type {number} current page: 0 or 1 */
  11212. this._pageIndex = 0;
  11213. /** @type {boolean} first run? */
  11214. this._initialized = false;
  11215. /** @type {ImageFormat} previous image format */
  11216. this._previousFormat = types/* ImageFormat */.f5.RGBA;
  11217. /** @type {boolean} frozen buffer? */
  11218. this._frozen = false;
  11219. }
  11220. /**
  11221. * A frozen buffer discards the input, effectively increasing the buffering time
  11222. * @returns {boolean}
  11223. */
  11224. get frozen() {
  11225. return this._frozen;
  11226. }
  11227. /**
  11228. * A frozen buffer discards the input, effectively increasing the buffering time
  11229. * @param {boolean} value
  11230. */
  11231. set frozen(value) {
  11232. this._frozen = Boolean(value);
  11233. }
  11234. /**
  11235. * Releases this node
  11236. * @param {SpeedyGPU} gpu
  11237. */
  11238. release(gpu) {
  11239. this._initialized = false;
  11240. super.release(gpu);
  11241. }
  11242. /**
  11243. * Run the specific task of this node
  11244. * @param {SpeedyGPU} gpu
  11245. * @returns {void|SpeedyPromise<void>}
  11246. */
  11247. _run(gpu) {
  11248. const {
  11249. image,
  11250. format
  11251. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11252. const previousFormat = this._previousFormat;
  11253. const page = this._tex;
  11254. const previousInputTexture = page[1 - this._pageIndex];
  11255. const outputTexture = page[this._pageIndex];
  11256. // can't store pyramids
  11257. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
  11258. // bufferize
  11259. if (!this._frozen || !this._initialized) {
  11260. // store input
  11261. this._previousFormat = format;
  11262. previousInputTexture.resize(image.width, image.height);
  11263. image.copyTo(previousInputTexture);
  11264. // page flipping
  11265. this._pageIndex = 1 - this._pageIndex;
  11266. }
  11267. // first run?
  11268. if (!this._initialized) {
  11269. this._initialized = true;
  11270. this.output().swrite(previousInputTexture, format);
  11271. return;
  11272. }
  11273. // done!
  11274. this.output().swrite(outputTexture, previousFormat);
  11275. }
  11276. }
  11277. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
  11278. /*
  11279. * speedy-vision.js
  11280. * GPU-accelerated Computer Vision for JavaScript
  11281. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11282. *
  11283. * Licensed under the Apache License, Version 2.0 (the "License");
  11284. * you may not use this file except in compliance with the License.
  11285. * You may obtain a copy of the License at
  11286. *
  11287. * http://www.apache.org/licenses/LICENSE-2.0
  11288. *
  11289. * Unless required by applicable law or agreed to in writing, software
  11290. * distributed under the License is distributed on an "AS IS" BASIS,
  11291. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11292. * See the License for the specific language governing permissions and
  11293. * limitations under the License.
  11294. *
  11295. * pyramid.js
  11296. * Generate pyramid
  11297. */
  11298. // Constants
  11299. const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
  11300. const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
  11301. /**
  11302. * Generate pyramid
  11303. */
  11304. class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode {
  11305. /**
  11306. * Constructor
  11307. * @param {string} [name] name of the node
  11308. */
  11309. constructor(name = undefined) {
  11310. super(name, MAX_TEXTURES + 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11311. }
  11312. /**
  11313. * Run the specific task of this node
  11314. * @param {SpeedyGPU} gpu
  11315. * @returns {void|SpeedyPromise<void>}
  11316. */
  11317. _run(gpu) {
  11318. const {
  11319. image,
  11320. format
  11321. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11322. const outputTexture = this._tex[0];
  11323. const pyramids = gpu.programs.pyramids;
  11324. let width = image.width,
  11325. height = image.height;
  11326. // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  11327. const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
  11328. // get work textures
  11329. const mip = new Array(MAX_TEXTURES + 1);
  11330. for (let i = MAX_TEXTURES; i >= 1; i--) mip[i - 1] = this._tex[i];
  11331. // get a copy of the input image
  11332. mip[0].resize(width, height);
  11333. image.copyTo(mip[0]);
  11334. // generate gaussian pyramid
  11335. const numLevels = Math.min(mipLevels, MAX_LEVELS);
  11336. for (let level = 1; level < numLevels; level++) {
  11337. // use max(1, floor(size / 2^lod)), in accordance to
  11338. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  11339. const halfWidth = Math.max(1, width >>> 1);
  11340. const halfHeight = Math.max(1, height >>> 1);
  11341. // reduce operation
  11342. const tmp = level - 1 + MAX_LEVELS;
  11343. pyramids.smoothX.outputs(width, height, mip[tmp])(mip[level - 1]);
  11344. pyramids.smoothY.outputs(width, height, mip[level - 1])(mip[tmp]);
  11345. pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level])(mip[level - 1]);
  11346. /*
  11347. (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
  11348. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
  11349. */
  11350. // flush
  11351. gpu.gl.flush();
  11352. // next level
  11353. width = halfWidth;
  11354. height = halfHeight;
  11355. /*
  11356. // debug: view pyramid
  11357. const view = mip[level-1];
  11358. const canvas = gpu.renderToCanvas(view);
  11359. if(!window._ww) document.body.appendChild(canvas);
  11360. window._ww = 1;
  11361. */
  11362. }
  11363. // copy to output & set mipmap
  11364. outputTexture.resize(image.width, image.height);
  11365. outputTexture.clear();
  11366. image.copyTo(outputTexture);
  11367. outputTexture.generateMipmaps(mip.slice(0, numLevels));
  11368. // done!
  11369. this.output().swrite(outputTexture, format);
  11370. }
  11371. }
  11372. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
  11373. /*
  11374. * speedy-vision.js
  11375. * GPU-accelerated Computer Vision for JavaScript
  11376. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11377. *
  11378. * Licensed under the Apache License, Version 2.0 (the "License");
  11379. * you may not use this file except in compliance with the License.
  11380. * You may obtain a copy of the License at
  11381. *
  11382. * http://www.apache.org/licenses/LICENSE-2.0
  11383. *
  11384. * Unless required by applicable law or agreed to in writing, software
  11385. * distributed under the License is distributed on an "AS IS" BASIS,
  11386. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11387. * See the License for the specific language governing permissions and
  11388. * limitations under the License.
  11389. *
  11390. * mixer.js
  11391. * Image Mixer
  11392. */
  11393. /**
  11394. * Image Mixer
  11395. */
  11396. class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode {
  11397. /**
  11398. * Constructor
  11399. * @param {string} [name] name of the node
  11400. */
  11401. constructor(name = undefined) {
  11402. super(name, 1, [InputPort('in0').expects(SpeedyPipelineMessageType.Image), InputPort('in1').expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11403. /** @type {number} alpha coefficient (applied to image0) */
  11404. this._alpha = 0.5;
  11405. /** @type {number} beta coefficient (applied to image1) */
  11406. this._beta = 0.5;
  11407. /** @type {number} gamma coefficient (brightness control) */
  11408. this._gamma = 0.0;
  11409. }
  11410. /**
  11411. * Alpha coefficient (applied to image0)
  11412. * @returns {number}
  11413. */
  11414. get alpha() {
  11415. return this._alpha;
  11416. }
  11417. /**
  11418. * Alpha coefficient (applied to image0)
  11419. * @param {number} value
  11420. */
  11421. set alpha(value) {
  11422. this._alpha = +value;
  11423. }
  11424. /**
  11425. * Beta coefficient (applied to image1)
  11426. * @returns {number}
  11427. */
  11428. get beta() {
  11429. return this._beta;
  11430. }
  11431. /**
  11432. * Beta coefficient (applied to image1)
  11433. * @param {number} value
  11434. */
  11435. set beta(value) {
  11436. this._beta = +value;
  11437. }
  11438. /**
  11439. * Gamma coefficient (brightness control)
  11440. * @returns {number}
  11441. */
  11442. get gamma() {
  11443. return this._gamma;
  11444. }
  11445. /**
  11446. * Gamma coefficient (brightness control)
  11447. * @param {number} value
  11448. */
  11449. set gamma(value) {
  11450. this._gamma = +value;
  11451. }
  11452. /**
  11453. * Run the specific task of this node
  11454. * @param {SpeedyGPU} gpu
  11455. * @returns {void|SpeedyPromise<void>}
  11456. */
  11457. _run(gpu) {
  11458. const in0 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in0').read();
  11459. const in1 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in1').read();
  11460. const image0 = in0.image,
  11461. image1 = in1.image;
  11462. const format0 = in0.format,
  11463. format1 = in1.format;
  11464. const width = Math.max(image0.width, image1.width);
  11465. const height = Math.max(image0.height, image1.height);
  11466. const alpha = this._alpha,
  11467. beta = this._beta,
  11468. gamma = this._gamma;
  11469. const outputTexture = this._tex[0];
  11470. if (format0 != format1) throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
  11471. gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
  11472. gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
  11473. this.output().swrite(outputTexture, format0);
  11474. }
  11475. }
  11476. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
  11477. /*
  11478. * speedy-vision.js
  11479. * GPU-accelerated Computer Vision for JavaScript
  11480. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11481. *
  11482. * Licensed under the Apache License, Version 2.0 (the "License");
  11483. * you may not use this file except in compliance with the License.
  11484. * You may obtain a copy of the License at
  11485. *
  11486. * http://www.apache.org/licenses/LICENSE-2.0
  11487. *
  11488. * Unless required by applicable law or agreed to in writing, software
  11489. * distributed under the License is distributed on an "AS IS" BASIS,
  11490. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11491. * See the License for the specific language governing permissions and
  11492. * limitations under the License.
  11493. *
  11494. * portal.js
  11495. * Image Portals
  11496. */
  11497. /**
  11498. * A sink of an Image Portal
  11499. * This is not a pipeline sink - it doesn't export any data!
  11500. */
  11501. class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode {
  11502. /**
  11503. * Constructor
  11504. * @param {string} [name] name of the node
  11505. */
  11506. constructor(name = undefined) {
  11507. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11508. /** @type {ImageFormat} stored image format */
  11509. this._format = types/* ImageFormat */.f5.RGBA;
  11510. /** @type {boolean} is this node initialized? */
  11511. this._initialized = false;
  11512. }
  11513. /**
  11514. * Stored image
  11515. * @returns {SpeedyTexture}
  11516. */
  11517. get image() {
  11518. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11519. return this._tex[0];
  11520. }
  11521. /**
  11522. * Stored image format
  11523. * @returns {ImageFormat}
  11524. */
  11525. get format() {
  11526. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11527. return this._format;
  11528. }
  11529. /**
  11530. * Initializes this node
  11531. * @param {SpeedyGPU} gpu
  11532. */
  11533. init(gpu) {
  11534. super.init(gpu);
  11535. this._tex[0].resize(1, 1).clear(); // initial texture
  11536. this._format = types/* ImageFormat */.f5.RGBA;
  11537. this._initialized = true;
  11538. }
  11539. /**
  11540. * Releases this node
  11541. * @param {SpeedyGPU} gpu
  11542. */
  11543. release(gpu) {
  11544. this._initialized = false;
  11545. super.release(gpu);
  11546. }
  11547. /**
  11548. * Run the specific task of this node
  11549. * @param {SpeedyGPU} gpu
  11550. * @returns {void|SpeedyPromise<void>}
  11551. */
  11552. _run(gpu) {
  11553. const {
  11554. image,
  11555. format
  11556. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11557. const tex = this._tex[0];
  11558. // can't store pyramids
  11559. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
  11560. // copy input
  11561. this._format = format;
  11562. tex.resize(image.width, image.height);
  11563. image.copyTo(tex);
  11564. }
  11565. }
  11566. /**
  11567. * A source of an Image Portal
  11568. */
  11569. class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode {
  11570. /**
  11571. * Constructor
  11572. * @param {string} [name] name of the node
  11573. */
  11574. constructor(name = undefined) {
  11575. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11576. /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
  11577. this._source = null;
  11578. }
  11579. /**
  11580. * Data source
  11581. * @returns {SpeedyPipelineNodeImagePortalSink|null}
  11582. */
  11583. get source() {
  11584. return this._source;
  11585. }
  11586. /**
  11587. * Data source
  11588. * @param {SpeedyPipelineNodeImagePortalSink|null} node
  11589. */
  11590. set source(node) {
  11591. if (node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  11592. this._source = node;
  11593. }
  11594. /**
  11595. * Run the specific task of this node
  11596. * @param {SpeedyGPU} gpu
  11597. * @returns {void|SpeedyPromise<void>}
  11598. */
  11599. _run(gpu) {
  11600. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  11601. this.output().swrite(this._source.image, this._source.format);
  11602. }
  11603. }
  11604. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
  11605. /*
  11606. * speedy-vision.js
  11607. * GPU-accelerated Computer Vision for JavaScript
  11608. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11609. *
  11610. * Licensed under the Apache License, Version 2.0 (the "License");
  11611. * you may not use this file except in compliance with the License.
  11612. * You may obtain a copy of the License at
  11613. *
  11614. * http://www.apache.org/licenses/LICENSE-2.0
  11615. *
  11616. * Unless required by applicable law or agreed to in writing, software
  11617. * distributed under the License is distributed on an "AS IS" BASIS,
  11618. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11619. * See the License for the specific language governing permissions and
  11620. * limitations under the License.
  11621. *
  11622. * image-factory.js
  11623. * Image-related nodes
  11624. */
  11625. /**
  11626. * Portal nodes
  11627. */
  11628. class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11629. /**
  11630. * Create an image portal source
  11631. * @param {string} [name] name of the node
  11632. * @returns {SpeedyPipelineNodeImagePortalSource}
  11633. */
  11634. static Source(name = undefined) {
  11635. return new SpeedyPipelineNodeImagePortalSource(name);
  11636. }
  11637. /**
  11638. * Create an image portal sink
  11639. * @param {string} [name] name of the node
  11640. * @returns {SpeedyPipelineNodeImagePortalSink}
  11641. */
  11642. static Sink(name = undefined) {
  11643. return new SpeedyPipelineNodeImagePortalSink(name);
  11644. }
  11645. }
  11646. /**
  11647. * Image nodes
  11648. */
  11649. class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11650. /**
  11651. * Create an image source
  11652. * @param {string} [name] name of the node
  11653. * @returns {SpeedyPipelineNodeImageSource}
  11654. */
  11655. static Source(name = undefined) {
  11656. return new SpeedyPipelineNodeImageSource(name);
  11657. }
  11658. /**
  11659. * Create an image sink
  11660. * @param {string} [name] name of the node
  11661. * @returns {SpeedyPipelineNodeImageSink}
  11662. */
  11663. static Sink(name = undefined) {
  11664. return new SpeedyPipelineNodeImageSink(name);
  11665. }
  11666. /**
  11667. * Create an image multiplexer
  11668. * @param {string} [name] name of the node
  11669. * @returns {SpeedyPipelineNodeImageMultiplexer}
  11670. */
  11671. static Multiplexer(name = undefined) {
  11672. return new SpeedyPipelineNodeImageMultiplexer(name);
  11673. }
  11674. /**
  11675. * Create an image buffer
  11676. * @param {string} [name] name of the node
  11677. * @returns {SpeedyPipelineNodeImageBuffer}
  11678. */
  11679. static Buffer(name = undefined) {
  11680. return new SpeedyPipelineNodeImageBuffer(name);
  11681. }
  11682. /**
  11683. * Image Pyramid
  11684. * @param {string} [name] name of the node
  11685. * @returns {SpeedyPipelineNodeImagePyramid}
  11686. */
  11687. static Pyramid(name = undefined) {
  11688. return new SpeedyPipelineNodeImagePyramid(name);
  11689. }
  11690. /**
  11691. * Image Mixer (blending)
  11692. * @param {string} [name] name of the node
  11693. * @returns {SpeedyPipelineNodeImageMixer}
  11694. */
  11695. static Mixer(name = undefined) {
  11696. return new SpeedyPipelineNodeImageMixer(name);
  11697. }
  11698. /**
  11699. * Image Portals
  11700. * @returns {typeof SpeedyPipelineImagePortalFactory}
  11701. */
  11702. static get Portal() {
  11703. return SpeedyPipelineImagePortalFactory;
  11704. }
  11705. }
  11706. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
  11707. /*
  11708. * speedy-vision.js
  11709. * GPU-accelerated Computer Vision for JavaScript
  11710. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11711. *
  11712. * Licensed under the Apache License, Version 2.0 (the "License");
  11713. * you may not use this file except in compliance with the License.
  11714. * You may obtain a copy of the License at
  11715. *
  11716. * http://www.apache.org/licenses/LICENSE-2.0
  11717. *
  11718. * Unless required by applicable law or agreed to in writing, software
  11719. * distributed under the License is distributed on an "AS IS" BASIS,
  11720. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11721. * See the License for the specific language governing permissions and
  11722. * limitations under the License.
  11723. *
  11724. * greyscale.js
  11725. * Convert an image to greyscale
  11726. */
  11727. /**
  11728. * Convert an image to greyscale
  11729. */
  11730. class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode {
  11731. /**
  11732. * Constructor
  11733. * @param {string} [name] name of the node
  11734. */
  11735. constructor(name = undefined) {
  11736. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11737. }
  11738. /**
  11739. * Run the specific task of this node
  11740. * @param {SpeedyGPU} gpu
  11741. * @returns {void|SpeedyPromise<void>}
  11742. */
  11743. _run(gpu) {
  11744. const {
  11745. image,
  11746. format
  11747. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11748. const width = image.width,
  11749. height = image.height;
  11750. const outputTexture = this._tex[0];
  11751. const filters = gpu.programs.filters;
  11752. filters.rgb2grey.outputs(width, height, outputTexture);
  11753. filters.rgb2grey(image);
  11754. this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
  11755. }
  11756. }
  11757. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
  11758. /*
  11759. * speedy-vision.js
  11760. * GPU-accelerated Computer Vision for JavaScript
  11761. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11762. *
  11763. * Licensed under the Apache License, Version 2.0 (the "License");
  11764. * you may not use this file except in compliance with the License.
  11765. * You may obtain a copy of the License at
  11766. *
  11767. * http://www.apache.org/licenses/LICENSE-2.0
  11768. *
  11769. * Unless required by applicable law or agreed to in writing, software
  11770. * distributed under the License is distributed on an "AS IS" BASIS,
  11771. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11772. * See the License for the specific language governing permissions and
  11773. * limitations under the License.
  11774. *
  11775. * gaussian-blur.js
  11776. * Gaussian Blur
  11777. */
  11778. /**
  11779. * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
  11780. * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
  11781. * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
  11782. * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
  11783. */
  11784. const DEFAULT_KERNEL = Object.freeze({
  11785. 3: [0.27901008925473514, 0.44197982149052983, 0.27901008925473514],
  11786. // 1D convolution (sigma = 1)
  11787. 5: [0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021],
  11788. // 1D convolution (separable kernel)
  11789. 7: [0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274],
  11790. 9: [0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988],
  11791. 11: [0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346],
  11792. 13: [0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363],
  11793. 15: [0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383]
  11794. //3: [ 0.25, 0.5, 0.25 ],
  11795. //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
  11796. });
  11797. /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
  11798. const DEFAULT_SIGMA = new SpeedyVector2(0, 0);
  11799. /** convolution programs (x-axis) */
  11800. const CONVOLUTION_X = Object.freeze({
  11801. 3: 'convolution3x',
  11802. 5: 'convolution5x',
  11803. 7: 'convolution7x',
  11804. 9: 'convolution9x',
  11805. 11: 'convolution11x',
  11806. 13: 'convolution13x',
  11807. 15: 'convolution15x'
  11808. });
  11809. /** convolution programs (y-axis) */
  11810. const CONVOLUTION_Y = Object.freeze({
  11811. 3: 'convolution3y',
  11812. 5: 'convolution5y',
  11813. 7: 'convolution7y',
  11814. 9: 'convolution9y',
  11815. 11: 'convolution11y',
  11816. 13: 'convolution13y',
  11817. 15: 'convolution15y'
  11818. });
  11819. /**
  11820. * @typedef {object} SeparableConvolutionKernel
  11821. * @property {number[]} x
  11822. * @property {number[]} y
  11823. */
  11824. /**
  11825. * Gaussian Blur
  11826. */
  11827. class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode {
  11828. /**
  11829. * Constructor
  11830. * @param {string} [name] name of the node
  11831. */
  11832. constructor(name = undefined) {
  11833. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11834. /** @type {SpeedySize} size of the kernel */
  11835. this._kernelSize = new SpeedySize(5, 5);
  11836. /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
  11837. this._sigma = DEFAULT_SIGMA;
  11838. /** @type {SeparableConvolutionKernel} convolution kernel */
  11839. this._kernel = {
  11840. x: DEFAULT_KERNEL[this._kernelSize.width],
  11841. y: DEFAULT_KERNEL[this._kernelSize.height]
  11842. };
  11843. }
  11844. /**
  11845. * Size of the kernel
  11846. * @returns {SpeedySize}
  11847. */
  11848. get kernelSize() {
  11849. return this._kernelSize;
  11850. }
  11851. /**
  11852. * Size of the kernel
  11853. * @param {SpeedySize} kernelSize
  11854. */
  11855. set kernelSize(kernelSize) {
  11856. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  11857. const kw = kernelSize.width,
  11858. kh = kernelSize.height;
  11859. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  11860. this._kernelSize = kernelSize;
  11861. this._updateKernel();
  11862. }
  11863. /**
  11864. * Sigma of the Gaussian kernel
  11865. * @returns {SpeedyVector2}
  11866. */
  11867. get sigma() {
  11868. return this._sigma;
  11869. }
  11870. /**
  11871. * Sigma of the Gaussian kernel
  11872. * @param {SpeedyVector2} sigma
  11873. */
  11874. set sigma(sigma) {
  11875. utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
  11876. utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
  11877. this._sigma = sigma;
  11878. this._updateKernel();
  11879. }
  11880. /**
  11881. * Run the specific task of this node
  11882. * @param {SpeedyGPU} gpu
  11883. * @returns {void|SpeedyPromise<void>}
  11884. */
  11885. _run(gpu) {
  11886. const {
  11887. image,
  11888. format
  11889. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11890. const width = image.width,
  11891. height = image.height;
  11892. const kernX = this._kernel.x;
  11893. const kernY = this._kernel.y;
  11894. const convX = CONVOLUTION_X[this._kernelSize.width];
  11895. const convY = CONVOLUTION_Y[this._kernelSize.height];
  11896. const tex = this._tex[0];
  11897. const outputTexture = this._tex[1];
  11898. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  11899. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  11900. this.output().swrite(outputTexture, format);
  11901. }
  11902. /**
  11903. * Update the internal kernel to match
  11904. * sigma and kernelSize
  11905. */
  11906. _updateKernel() {
  11907. if (this._sigma.x == DEFAULT_SIGMA.x) this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];else this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
  11908. if (this._sigma.y == DEFAULT_SIGMA.y) this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];else this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
  11909. }
  11910. }
  11911. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
  11912. /*
  11913. * speedy-vision.js
  11914. * GPU-accelerated Computer Vision for JavaScript
  11915. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11916. *
  11917. * Licensed under the Apache License, Version 2.0 (the "License");
  11918. * you may not use this file except in compliance with the License.
  11919. * You may obtain a copy of the License at
  11920. *
  11921. * http://www.apache.org/licenses/LICENSE-2.0
  11922. *
  11923. * Unless required by applicable law or agreed to in writing, software
  11924. * distributed under the License is distributed on an "AS IS" BASIS,
  11925. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11926. * See the License for the specific language governing permissions and
  11927. * limitations under the License.
  11928. *
  11929. * simple-blur.js
  11930. * Simple Blur (Box Filter)
  11931. */
  11932. /** 1D convolution filters */
  11933. const BOX_FILTER = Object.freeze({
  11934. 3: new Array(3).fill(1 / 3),
  11935. 5: new Array(5).fill(1 / 5),
  11936. 7: new Array(7).fill(1 / 7),
  11937. 9: new Array(9).fill(1 / 9),
  11938. 11: new Array(11).fill(1 / 11),
  11939. 13: new Array(13).fill(1 / 13),
  11940. 15: new Array(15).fill(1 / 15)
  11941. });
  11942. /** convolution programs (x-axis) */
  11943. const simple_blur_CONVOLUTION_X = Object.freeze({
  11944. 3: 'convolution3x',
  11945. 5: 'convolution5x',
  11946. 7: 'convolution7x',
  11947. 9: 'convolution9x',
  11948. 11: 'convolution11x',
  11949. 13: 'convolution13x',
  11950. 15: 'convolution15x'
  11951. });
  11952. /** convolution programs (y-axis) */
  11953. const simple_blur_CONVOLUTION_Y = Object.freeze({
  11954. 3: 'convolution3y',
  11955. 5: 'convolution5y',
  11956. 7: 'convolution7y',
  11957. 9: 'convolution9y',
  11958. 11: 'convolution11y',
  11959. 13: 'convolution13y',
  11960. 15: 'convolution15y'
  11961. });
  11962. /**
  11963. * @typedef {object} SeparableConvolutionKernel
  11964. * @property {number[]} x
  11965. * @property {number[]} y
  11966. */
  11967. /**
  11968. * Simple Blur (Box Filter)
  11969. */
  11970. class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode {
  11971. /**
  11972. * Constructor
  11973. * @param {string} [name] name of the node
  11974. */
  11975. constructor(name = undefined) {
  11976. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11977. /** @type {SpeedySize} size of the kernel */
  11978. this._kernelSize = new SpeedySize(5, 5);
  11979. /** @type {SeparableConvolutionKernel} convolution kernel */
  11980. this._kernel = {
  11981. x: BOX_FILTER[this._kernelSize.width],
  11982. y: BOX_FILTER[this._kernelSize.height]
  11983. };
  11984. }
  11985. /**
  11986. * Size of the kernel
  11987. * @returns {SpeedySize}
  11988. */
  11989. get kernelSize() {
  11990. return this._kernelSize;
  11991. }
  11992. /**
  11993. * Size of the kernel
  11994. * @param {SpeedySize} kernelSize
  11995. */
  11996. set kernelSize(kernelSize) {
  11997. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  11998. const kw = kernelSize.width,
  11999. kh = kernelSize.height;
  12000. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  12001. this._kernelSize = kernelSize;
  12002. this._kernel.x = BOX_FILTER[this._kernelSize.width];
  12003. this._kernel.y = BOX_FILTER[this._kernelSize.height];
  12004. }
  12005. /**
  12006. * Run the specific task of this node
  12007. * @param {SpeedyGPU} gpu
  12008. * @returns {void|SpeedyPromise<void>}
  12009. */
  12010. _run(gpu) {
  12011. const {
  12012. image,
  12013. format
  12014. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12015. const width = image.width,
  12016. height = image.height;
  12017. const kernX = this._kernel.x;
  12018. const kernY = this._kernel.y;
  12019. const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
  12020. const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
  12021. const tex = this._tex[0];
  12022. const outputTexture = this._tex[1];
  12023. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  12024. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  12025. this.output().swrite(outputTexture, format);
  12026. }
  12027. }
  12028. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
  12029. /*
  12030. * speedy-vision.js
  12031. * GPU-accelerated Computer Vision for JavaScript
  12032. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12033. *
  12034. * Licensed under the Apache License, Version 2.0 (the "License");
  12035. * you may not use this file except in compliance with the License.
  12036. * You may obtain a copy of the License at
  12037. *
  12038. * http://www.apache.org/licenses/LICENSE-2.0
  12039. *
  12040. * Unless required by applicable law or agreed to in writing, software
  12041. * distributed under the License is distributed on an "AS IS" BASIS,
  12042. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12043. * See the License for the specific language governing permissions and
  12044. * limitations under the License.
  12045. *
  12046. * median-blur.js
  12047. * Median Blur
  12048. */
  12049. // Median programs
  12050. const MEDIAN = {
  12051. 3: 'median3',
  12052. 5: 'median5',
  12053. 7: 'median7'
  12054. };
  12055. /**
  12056. * Median Blur
  12057. */
  12058. class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode {
  12059. /**
  12060. * Constructor
  12061. * @param {string} [name] name of the node
  12062. */
  12063. constructor(name = undefined) {
  12064. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12065. /** @type {SpeedySize} size of the kernel (assumed to be square) */
  12066. this._kernelSize = new SpeedySize(5, 5);
  12067. }
  12068. /**
  12069. * Size of the kernel
  12070. * @returns {SpeedySize}
  12071. */
  12072. get kernelSize() {
  12073. return this._kernelSize;
  12074. }
  12075. /**
  12076. * Size of the kernel
  12077. * @param {SpeedySize} kernelSize
  12078. */
  12079. set kernelSize(kernelSize) {
  12080. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  12081. const ksize = kernelSize.width;
  12082. if (!(ksize == 3 || ksize == 5 || ksize == 7)) throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);else if (kernelSize.width != kernelSize.height) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
  12083. this._kernelSize = kernelSize;
  12084. }
  12085. /**
  12086. * Run the specific task of this node
  12087. * @param {SpeedyGPU} gpu
  12088. * @returns {void|SpeedyPromise<void>}
  12089. */
  12090. _run(gpu) {
  12091. const {
  12092. image,
  12093. format
  12094. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12095. const width = image.width,
  12096. height = image.height;
  12097. const ksize = this._kernelSize.width;
  12098. const med = MEDIAN[ksize];
  12099. const outputTexture = this._tex[0];
  12100. gpu.programs.filters[med].outputs(width, height, outputTexture)(image);
  12101. this.output().swrite(outputTexture, format);
  12102. }
  12103. }
  12104. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
  12105. /*
  12106. * speedy-vision.js
  12107. * GPU-accelerated Computer Vision for JavaScript
  12108. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12109. *
  12110. * Licensed under the Apache License, Version 2.0 (the "License");
  12111. * you may not use this file except in compliance with the License.
  12112. * You may obtain a copy of the License at
  12113. *
  12114. * http://www.apache.org/licenses/LICENSE-2.0
  12115. *
  12116. * Unless required by applicable law or agreed to in writing, software
  12117. * distributed under the License is distributed on an "AS IS" BASIS,
  12118. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12119. * See the License for the specific language governing permissions and
  12120. * limitations under the License.
  12121. *
  12122. * convolution.js
  12123. * Image convolution
  12124. */
  12125. // 2D convolution programs
  12126. const CONVOLUTION = {
  12127. 3: 'convolution3',
  12128. 5: 'convolution5',
  12129. 7: 'convolution7'
  12130. };
  12131. /**
  12132. * Image convolution
  12133. */
  12134. class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode {
  12135. /**
  12136. * Constructor
  12137. * @param {string} [name] name of the node
  12138. */
  12139. constructor(name = undefined) {
  12140. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12141. /** @type {SpeedyMatrix} convolution kernel (square matrix) */
  12142. this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
  12143. }
  12144. /**
  12145. * Convolution kernel
  12146. * @returns {SpeedyMatrix}
  12147. */
  12148. get kernel() {
  12149. return this._kernel;
  12150. }
  12151. /**
  12152. * Convolution kernel
  12153. * @param {SpeedyMatrix} kernel
  12154. */
  12155. set kernel(kernel) {
  12156. if (kernel.rows != kernel.columns) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);else if (!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7)) throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
  12157. this._kernel = kernel;
  12158. }
  12159. /**
  12160. * Run the specific task of this node
  12161. * @param {SpeedyGPU} gpu
  12162. * @returns {void|SpeedyPromise<void>}
  12163. */
  12164. _run(gpu) {
  12165. const {
  12166. image,
  12167. format
  12168. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12169. const width = image.width,
  12170. height = image.height;
  12171. const outputTexture = this._tex[0];
  12172. const ksize = this._kernel.rows;
  12173. const conv = CONVOLUTION[ksize];
  12174. const kernel = this._kernel.read();
  12175. gpu.programs.filters[conv].outputs(width, height, outputTexture)(image, kernel);
  12176. this.output().swrite(outputTexture, format);
  12177. }
  12178. }
  12179. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
  12180. /*
  12181. * speedy-vision.js
  12182. * GPU-accelerated Computer Vision for JavaScript
  12183. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12184. *
  12185. * Licensed under the Apache License, Version 2.0 (the "License");
  12186. * you may not use this file except in compliance with the License.
  12187. * You may obtain a copy of the License at
  12188. *
  12189. * http://www.apache.org/licenses/LICENSE-2.0
  12190. *
  12191. * Unless required by applicable law or agreed to in writing, software
  12192. * distributed under the License is distributed on an "AS IS" BASIS,
  12193. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12194. * See the License for the specific language governing permissions and
  12195. * limitations under the License.
  12196. *
  12197. * nightvision.js
  12198. * Nightvision filter
  12199. */
  12200. /**
  12201. * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
  12202. */
  12203. /**
  12204. * Nightvision filter: "see in the dark"
  12205. */
  12206. class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode {
  12207. /**
  12208. * Constructor
  12209. * @param {string} [name] name of the node
  12210. */
  12211. constructor(name = undefined) {
  12212. super(name, 3, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.RGBA || msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12213. /** @type {number} a value typically in [0,1]: larger number => higher contrast */
  12214. this._gain = 0.5;
  12215. /** @type {number} a value typically in [0,1]: controls brightness */
  12216. this._offset = 0.5;
  12217. /** @type {number} gain decay, a value in [0,1] */
  12218. this._decay = 0.0;
  12219. /** @type {NightvisionQualityLevel} quality level */
  12220. this._quality = 'medium';
  12221. }
  12222. /**
  12223. * Gain, a value typically in [0,1]: larger number => higher contrast
  12224. * @returns {number}
  12225. */
  12226. get gain() {
  12227. return this._gain;
  12228. }
  12229. /**
  12230. * Gain, a value typically in [0,1]: larger number => higher contrast
  12231. * @param {number} gain
  12232. */
  12233. set gain(gain) {
  12234. this._gain = +gain;
  12235. }
  12236. /**
  12237. * Offset, a value typically in [0,1] that controls the brightness
  12238. * @returns {number}
  12239. */
  12240. get offset() {
  12241. return this._offset;
  12242. }
  12243. /**
  12244. * Offset, a value typically in [0,1] that controls the brightness
  12245. * @param {number} offset
  12246. */
  12247. set offset(offset) {
  12248. this._offset = +offset;
  12249. }
  12250. /**
  12251. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12252. * @returns {number}
  12253. */
  12254. get decay() {
  12255. return this._decay;
  12256. }
  12257. /**
  12258. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12259. * @param {number} decay
  12260. */
  12261. set decay(decay) {
  12262. this._decay = Math.max(0.0, Math.min(+decay, 1.0));
  12263. }
  12264. /**
  12265. * Quality level of the filter
  12266. * @returns {NightvisionQualityLevel}
  12267. */
  12268. get quality() {
  12269. return this._quality;
  12270. }
  12271. /**
  12272. * Quality level of the filter
  12273. * @param {NightvisionQualityLevel} quality
  12274. */
  12275. set quality(quality) {
  12276. if (quality === 'high' || quality === 'medium' || quality === 'low') this._quality = quality;else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
  12277. }
  12278. /**
  12279. * Run the specific task of this node
  12280. * @param {SpeedyGPU} gpu
  12281. * @returns {void|SpeedyPromise<void>}
  12282. */
  12283. _run(gpu) {
  12284. const {
  12285. image,
  12286. format
  12287. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12288. const width = image.width,
  12289. height = image.height;
  12290. const gain = this._gain;
  12291. const offset = this._offset;
  12292. const decay = this._decay;
  12293. const quality = this._quality;
  12294. const filters = gpu.programs.filters;
  12295. const tmp = this._tex[0];
  12296. const illuminationMap = this._tex[1];
  12297. const outputTexture = this._tex[2];
  12298. // compute illumination map
  12299. if (quality == 'medium') {
  12300. filters.illuminationMapX.outputs(width, height, tmp);
  12301. filters.illuminationMapY.outputs(width, height, illuminationMap);
  12302. filters.illuminationMapX(image);
  12303. filters.illuminationMapY(tmp);
  12304. } else if (quality == 'high') {
  12305. filters.illuminationMapHiX.outputs(width, height, tmp);
  12306. filters.illuminationMapHiY.outputs(width, height, illuminationMap);
  12307. filters.illuminationMapHiX(image);
  12308. filters.illuminationMapHiY(tmp);
  12309. } else if (quality == 'low') {
  12310. filters.illuminationMapLoX.outputs(width, height, tmp);
  12311. filters.illuminationMapLoY.outputs(width, height, illuminationMap);
  12312. filters.illuminationMapLoX(image);
  12313. filters.illuminationMapLoY(tmp);
  12314. }
  12315. // run nightvision
  12316. if (format === types/* ImageFormat */.f5.GREY) {
  12317. filters.nightvisionGreyscale.outputs(width, height, outputTexture);
  12318. filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
  12319. } else if (format === types/* ImageFormat */.f5.RGBA) {
  12320. filters.nightvision.outputs(width, height, outputTexture);
  12321. filters.nightvision(image, illuminationMap, gain, offset, decay);
  12322. }
  12323. // done!
  12324. this.output().swrite(outputTexture, format);
  12325. }
  12326. }
  12327. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
  12328. /*
  12329. * speedy-vision.js
  12330. * GPU-accelerated Computer Vision for JavaScript
  12331. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12332. *
  12333. * Licensed under the Apache License, Version 2.0 (the "License");
  12334. * you may not use this file except in compliance with the License.
  12335. * You may obtain a copy of the License at
  12336. *
  12337. * http://www.apache.org/licenses/LICENSE-2.0
  12338. *
  12339. * Unless required by applicable law or agreed to in writing, software
  12340. * distributed under the License is distributed on an "AS IS" BASIS,
  12341. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12342. * See the License for the specific language governing permissions and
  12343. * limitations under the License.
  12344. *
  12345. * normalize.js
  12346. * Normalize image to a range
  12347. */
  12348. /**
  12349. * Normalize image to a range
  12350. */
  12351. class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode {
  12352. /**
  12353. * Constructor
  12354. * @param {string} [name] name of the node
  12355. */
  12356. constructor(name = undefined) {
  12357. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12358. /** @type {number} a value in [0,255] */
  12359. this._minValue = 0;
  12360. /** @type {number} a value in [0,255] */
  12361. this._maxValue = 255;
  12362. }
  12363. /**
  12364. * Minimum intensity in the output image, a value in [0,255]
  12365. * @returns {number}
  12366. */
  12367. get minValue() {
  12368. return this._minValue;
  12369. }
  12370. /**
  12371. * Minimum intensity in the output image, a value in [0,255]
  12372. * @param {number} minValue
  12373. */
  12374. set minValue(minValue) {
  12375. this._minValue = Math.max(0, Math.min(+minValue, 255));
  12376. }
  12377. /**
  12378. * Maximum intensity in the output image, a value in [0,255]
  12379. * @returns {number}
  12380. */
  12381. get maxValue() {
  12382. return this._maxValue;
  12383. }
  12384. /**
  12385. * Maximum intensity in the output image, a value in [0,255]
  12386. * @param {number} maxValue
  12387. */
  12388. set maxValue(maxValue) {
  12389. this._maxValue = Math.max(0, Math.min(+maxValue, 255));
  12390. }
  12391. /**
  12392. * Run the specific task of this node
  12393. * @param {SpeedyGPU} gpu
  12394. * @returns {void|SpeedyPromise<void>}
  12395. */
  12396. _run(gpu) {
  12397. const {
  12398. image,
  12399. format
  12400. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12401. const width = image.width,
  12402. height = image.height;
  12403. const outputTexture = this._tex[3];
  12404. let minValue = this._minValue;
  12405. let maxValue = this._maxValue;
  12406. if (minValue > maxValue) minValue = maxValue = (minValue + maxValue) / 2;
  12407. const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
  12408. gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
  12409. gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
  12410. this.output().swrite(outputTexture, format);
  12411. }
  12412. /**
  12413. * Scan a single component in all pixels of the image and find the min & max intensities
  12414. * @param {SpeedyGPU} gpu
  12415. * @param {SpeedyTexture} image input image
  12416. * @param {PixelComponent} pixelComponent a single PixelComponent flag
  12417. * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
  12418. */
  12419. _scanMinMax(gpu, image, pixelComponent) {
  12420. const tex = this._tex;
  12421. const program = gpu.programs.utils;
  12422. const width = image.width,
  12423. height = image.height;
  12424. const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
  12425. utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
  12426. program.copyComponents.outputs(width, height, tex[2]);
  12427. program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
  12428. let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
  12429. for (let i = 0; i < numIterations; i++) texture = program.scanMinMax2D(texture, i);
  12430. return texture;
  12431. }
  12432. }
  12433. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
  12434. /*
  12435. * speedy-vision.js
  12436. * GPU-accelerated Computer Vision for JavaScript
  12437. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12438. *
  12439. * Licensed under the Apache License, Version 2.0 (the "License");
  12440. * you may not use this file except in compliance with the License.
  12441. * You may obtain a copy of the License at
  12442. *
  12443. * http://www.apache.org/licenses/LICENSE-2.0
  12444. *
  12445. * Unless required by applicable law or agreed to in writing, software
  12446. * distributed under the License is distributed on an "AS IS" BASIS,
  12447. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12448. * See the License for the specific language governing permissions and
  12449. * limitations under the License.
  12450. *
  12451. * filter-factory.js
  12452. * Image filters
  12453. */
  12454. /**
  12455. * Image filters
  12456. */
  12457. class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12458. /**
  12459. * Convert image to greyscale
  12460. * @param {string} [name]
  12461. * @returns {SpeedyPipelineNodeGreyscale}
  12462. */
  12463. static Greyscale(name = undefined) {
  12464. return new SpeedyPipelineNodeGreyscale(name);
  12465. }
  12466. /**
  12467. * Gaussian Blur
  12468. * @param {string} [name]
  12469. * @returns {SpeedyPipelineNodeGaussianBlur}
  12470. */
  12471. static GaussianBlur(name = undefined) {
  12472. return new SpeedyPipelineNodeGaussianBlur(name);
  12473. }
  12474. /**
  12475. * Simple Blur (Box Filter)
  12476. * @param {string} [name]
  12477. * @returns {SpeedyPipelineNodeSimpleBlur}
  12478. */
  12479. static SimpleBlur(name = undefined) {
  12480. return new SpeedyPipelineNodeSimpleBlur(name);
  12481. }
  12482. /**
  12483. * Median Blur
  12484. * @param {string} [name]
  12485. * @returns {SpeedyPipelineNodeMedianBlur}
  12486. */
  12487. static MedianBlur(name = undefined) {
  12488. return new SpeedyPipelineNodeMedianBlur(name);
  12489. }
  12490. /**
  12491. * Image Convolution
  12492. * @param {string} [name]
  12493. * @returns {SpeedyPipelineNodeConvolution}
  12494. */
  12495. static Convolution(name = undefined) {
  12496. return new SpeedyPipelineNodeConvolution(name);
  12497. }
  12498. /**
  12499. * Nightvision
  12500. * @param {string} [name]
  12501. * @returns {SpeedyPipelineNodeNightvision}
  12502. */
  12503. static Nightvision(name = undefined) {
  12504. return new SpeedyPipelineNodeNightvision(name);
  12505. }
  12506. /**
  12507. * Normalize image
  12508. * @param {string} [name]
  12509. * @returns {SpeedyPipelineNodeNormalize}
  12510. */
  12511. static Normalize(name = undefined) {
  12512. return new SpeedyPipelineNodeNormalize(name);
  12513. }
  12514. }
  12515. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
  12516. /*
  12517. * speedy-vision.js
  12518. * GPU-accelerated Computer Vision for JavaScript
  12519. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12520. *
  12521. * Licensed under the Apache License, Version 2.0 (the "License");
  12522. * you may not use this file except in compliance with the License.
  12523. * You may obtain a copy of the License at
  12524. *
  12525. * http://www.apache.org/licenses/LICENSE-2.0
  12526. *
  12527. * Unless required by applicable law or agreed to in writing, software
  12528. * distributed under the License is distributed on an "AS IS" BASIS,
  12529. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12530. * See the License for the specific language governing permissions and
  12531. * limitations under the License.
  12532. *
  12533. * perspective-warp.js
  12534. * Warp an image using a perspective transformation
  12535. */
  12536. // Used when an invalid matrix is provided
  12537. const SINGULAR_MATRIX = [0, 0, 0, 0, 0, 0, 0, 0, 1];
  12538. /**
  12539. * Warp an image using a perspective transformation
  12540. */
  12541. class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode {
  12542. /**
  12543. * Constructor
  12544. * @param {string} [name] name of the node
  12545. */
  12546. constructor(name = undefined) {
  12547. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12548. /** @type {SpeedyMatrix} perspective transformation */
  12549. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  12550. }
  12551. /**
  12552. * Perspective transform, a 3x3 homography matrix
  12553. * @returns {SpeedyMatrix}
  12554. */
  12555. get transform() {
  12556. return this._transform;
  12557. }
  12558. /**
  12559. * Perspective transform, a 3x3 homography matrix
  12560. * @param {SpeedyMatrix} transform
  12561. */
  12562. set transform(transform) {
  12563. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  12564. this._transform = transform;
  12565. }
  12566. /**
  12567. * Run the specific task of this node
  12568. * @param {SpeedyGPU} gpu
  12569. * @returns {void|SpeedyPromise<void>}
  12570. */
  12571. _run(gpu) {
  12572. const {
  12573. image,
  12574. format
  12575. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12576. const width = image.width,
  12577. height = image.height;
  12578. const outputTexture = this._tex[0];
  12579. const homography = this._transform.read();
  12580. const inverseHomography = this._inverse3(homography);
  12581. const isValidHomography = !Number.isNaN(inverseHomography[0]);
  12582. gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
  12583. gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
  12584. this.output().swrite(outputTexture, format);
  12585. }
  12586. /**
  12587. * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
  12588. * @param {number[]} mat 3x3 matrix in column-major format
  12589. * @param {number} [eps] epsilon
  12590. * @returns {number[]} 3x3 inverse matrix in column-major format
  12591. */
  12592. _inverse3(mat, eps = 1e-6) {
  12593. // read the entries of the matrix
  12594. const a11 = mat[0];
  12595. const a21 = mat[1];
  12596. const a31 = mat[2];
  12597. const a12 = mat[3];
  12598. const a22 = mat[4];
  12599. const a32 = mat[5];
  12600. const a13 = mat[6];
  12601. const a23 = mat[7];
  12602. const a33 = mat[8];
  12603. // compute cofactors
  12604. const b1 = a33 * a22 - a32 * a23; // b11
  12605. const b2 = a33 * a12 - a32 * a13; // b21
  12606. const b3 = a23 * a12 - a22 * a13; // b31
  12607. // compute the determinant
  12608. const det = a11 * b1 - a21 * b2 + a31 * b3;
  12609. // set up the inverse
  12610. if (!(Math.abs(det) < eps)) {
  12611. const d = 1.0 / det;
  12612. mat[0] = b1 * d;
  12613. mat[1] = -(a33 * a21 - a31 * a23) * d;
  12614. mat[2] = (a32 * a21 - a31 * a22) * d;
  12615. mat[3] = -b2 * d;
  12616. mat[4] = (a33 * a11 - a31 * a13) * d;
  12617. mat[5] = -(a32 * a11 - a31 * a12) * d;
  12618. mat[6] = b3 * d;
  12619. mat[7] = -(a23 * a11 - a21 * a13) * d;
  12620. mat[8] = (a22 * a11 - a21 * a12) * d;
  12621. } else mat.fill(Number.NaN, 0, 9);
  12622. // done!
  12623. return mat;
  12624. }
  12625. }
  12626. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
  12627. /*
  12628. * speedy-vision.js
  12629. * GPU-accelerated Computer Vision for JavaScript
  12630. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12631. *
  12632. * Licensed under the Apache License, Version 2.0 (the "License");
  12633. * you may not use this file except in compliance with the License.
  12634. * You may obtain a copy of the License at
  12635. *
  12636. * http://www.apache.org/licenses/LICENSE-2.0
  12637. *
  12638. * Unless required by applicable law or agreed to in writing, software
  12639. * distributed under the License is distributed on an "AS IS" BASIS,
  12640. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12641. * See the License for the specific language governing permissions and
  12642. * limitations under the License.
  12643. *
  12644. * resize.js
  12645. * Resize image
  12646. */
  12647. /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
  12648. /**
  12649. * Resize image
  12650. */
  12651. class SpeedyPipelineNodeResize extends SpeedyPipelineNode {
  12652. /**
  12653. * Constructor
  12654. * @param {string} [name] name of the node
  12655. */
  12656. constructor(name = undefined) {
  12657. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12658. /** @type {SpeedySize} size of the output image, in pixels */
  12659. this._size = new SpeedySize(0, 0);
  12660. /** @type {SpeedyVector2} size of the output relative to the size of the input */
  12661. this._scale = new SpeedyVector2(1, 1);
  12662. /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
  12663. this._method = 'bilinear';
  12664. }
  12665. /**
  12666. * Size of the output image, in pixels (use 0 to use scale)
  12667. * @returns {SpeedySize}
  12668. */
  12669. get size() {
  12670. return this._size;
  12671. }
  12672. /**
  12673. * Size of the output image, in pixels (use 0 to use scale)
  12674. * @param {SpeedySize} size
  12675. */
  12676. set size(size) {
  12677. this._size = size;
  12678. }
  12679. /**
  12680. * Size of the output image relative to the size of the input image
  12681. * @returns {SpeedyVector2}
  12682. */
  12683. get scale() {
  12684. return this._scale;
  12685. }
  12686. /**
  12687. * Size of the output image relative to the size of the input image
  12688. * @param {SpeedyVector2} scale
  12689. */
  12690. set scale(scale) {
  12691. this._scale = scale;
  12692. }
  12693. /**
  12694. * Interpolation method
  12695. * @returns {SpeedyPipelineNodeResizeMethod}
  12696. */
  12697. get method() {
  12698. return this._method;
  12699. }
  12700. /**
  12701. * Interpolation method
  12702. * @param {SpeedyPipelineNodeResizeMethod} method
  12703. */
  12704. set method(method) {
  12705. if (method !== 'nearest' && method !== 'bilinear') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
  12706. this._method = method;
  12707. }
  12708. /**
  12709. * Run the specific task of this node
  12710. * @param {SpeedyGPU} gpu
  12711. * @returns {void|SpeedyPromise<void>}
  12712. */
  12713. _run(gpu) {
  12714. const {
  12715. image,
  12716. format
  12717. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12718. const width = image.width,
  12719. height = image.height;
  12720. const outputTexture = this._tex[0];
  12721. const method = this._method;
  12722. const newWidth = this._size.width || Math.max(1, this._scale.x * width);
  12723. const newHeight = this._size.height || Math.max(1, this._scale.y * height);
  12724. if (method == 'bilinear') {
  12725. gpu.programs.transforms.resizeBilinear.outputs(newWidth, newHeight, outputTexture)(image);
  12726. } else if (method == 'nearest') {
  12727. gpu.programs.transforms.resizeNearest.outputs(newWidth, newHeight, outputTexture)(image);
  12728. }
  12729. this.output().swrite(outputTexture, format);
  12730. }
  12731. }
  12732. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
  12733. /*
  12734. * speedy-vision.js
  12735. * GPU-accelerated Computer Vision for JavaScript
  12736. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12737. *
  12738. * Licensed under the Apache License, Version 2.0 (the "License");
  12739. * you may not use this file except in compliance with the License.
  12740. * You may obtain a copy of the License at
  12741. *
  12742. * http://www.apache.org/licenses/LICENSE-2.0
  12743. *
  12744. * Unless required by applicable law or agreed to in writing, software
  12745. * distributed under the License is distributed on an "AS IS" BASIS,
  12746. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12747. * See the License for the specific language governing permissions and
  12748. * limitations under the License.
  12749. *
  12750. * transform-factory.js
  12751. * Image transforms
  12752. */
  12753. /**
  12754. * Image transforms
  12755. */
  12756. class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12757. /**
  12758. * Resize image
  12759. * @param {string} [name]
  12760. * @returns {SpeedyPipelineNodeResize}
  12761. */
  12762. static Resize(name = undefined) {
  12763. return new SpeedyPipelineNodeResize(name);
  12764. }
  12765. /**
  12766. * Warp an image using a perspective transformation
  12767. * @param {string} [name]
  12768. * @returns {SpeedyPipelineNodePerspectiveWarp}
  12769. */
  12770. static PerspectiveWarp(name = undefined) {
  12771. return new SpeedyPipelineNodePerspectiveWarp(name);
  12772. }
  12773. }
  12774. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
  12775. /*
  12776. * speedy-vision.js
  12777. * GPU-accelerated Computer Vision for JavaScript
  12778. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12779. *
  12780. * Licensed under the Apache License, Version 2.0 (the "License");
  12781. * you may not use this file except in compliance with the License.
  12782. * You may obtain a copy of the License at
  12783. *
  12784. * http://www.apache.org/licenses/LICENSE-2.0
  12785. *
  12786. * Unless required by applicable law or agreed to in writing, software
  12787. * distributed under the License is distributed on an "AS IS" BASIS,
  12788. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12789. * See the License for the specific language governing permissions and
  12790. * limitations under the License.
  12791. *
  12792. * detector.js
  12793. * Abstract keypoint detectors
  12794. */
  12795. // Constants
  12796. const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
  12797. const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
  12798. const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
  12799. const NUMBER_OF_RGBA16_TEXTURES = 2;
  12800. // legacy constants
  12801. const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
  12802. const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
  12803. const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
  12804. /**
  12805. * Abstract keypoint detector
  12806. * @abstract
  12807. */
  12808. class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode {
  12809. /**
  12810. * Constructor
  12811. * @param {string} [name] name of the node
  12812. * @param {number} [texCount] number of work textures
  12813. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  12814. */
  12815. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  12816. super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
  12817. /** @type {number} encoder capacity */
  12818. this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
  12819. /** @type {GLint} auxiliary storage */
  12820. this._oldWrapS = 0;
  12821. /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
  12822. this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
  12823. }
  12824. /**
  12825. * Initialize this node
  12826. * @param {SpeedyGPU} gpu
  12827. */
  12828. init(gpu) {
  12829. // initialize
  12830. super.init(gpu);
  12831. // encodeKeypointSkipOffsets() relies on this
  12832. this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
  12833. // allocate RGBA16 textures
  12834. this._allocateTex16(gpu);
  12835. gpu.subscribe(this._allocateTex16, this, gpu);
  12836. }
  12837. /**
  12838. * Release this node
  12839. * @param {SpeedyGPU} gpu
  12840. */
  12841. release(gpu) {
  12842. // deallocate RGBA16 textures
  12843. gpu.unsubscribe(this._allocateTex16, this);
  12844. this._deallocateTex16(gpu);
  12845. // we need to restore the texture parameter because textures come from a pool!
  12846. this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
  12847. // release
  12848. super.release(gpu);
  12849. }
  12850. /**
  12851. * Set a parameter of the special texture
  12852. * @param {GLenum} pname
  12853. * @param {GLint} param new value
  12854. * @returns {GLint} old value of param
  12855. */
  12856. _setupSpecialTexture(pname, param) {
  12857. if (NUMBER_OF_INTERNAL_TEXTURES == 0) return;
  12858. // legacy code
  12859. const texture = this._tex[this._tex.length - 1];
  12860. const gl = texture.gl;
  12861. gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
  12862. const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
  12863. gl.texParameteri(gl.TEXTURE_2D, pname, param);
  12864. gl.bindTexture(gl.TEXTURE_2D, null);
  12865. return oldval;
  12866. }
  12867. /**
  12868. * We can encode up to this many keypoints. If you find a
  12869. * tight bound for this, download times will be faster.
  12870. * @returns {number}
  12871. */
  12872. get capacity() {
  12873. return this._capacity;
  12874. }
  12875. /**
  12876. * We can encode up to this many keypoints. If you find a
  12877. * tight bound for this, download times will be faster.
  12878. * @param {number} capacity
  12879. */
  12880. set capacity(capacity) {
  12881. this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
  12882. }
  12883. /**
  12884. * Create a tiny texture with encoded keypoints out of
  12885. * an encoded corners texture
  12886. * @param {SpeedyGPU} gpu
  12887. * @param {SpeedyTexture} corners input
  12888. * @param {SpeedyDrawableTexture} encodedKeypoints output
  12889. * @param {number} [descriptorSize] in bytes
  12890. * @param {number} [extraSize] in bytes
  12891. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12892. */
  12893. _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12894. const encoderCapacity = this._capacity;
  12895. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
  12896. const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
  12897. const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
  12898. //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
  12899. const maxSize = Math.max(width, height);
  12900. const keypoints = gpu.programs.keypoints;
  12901. // prepare programs
  12902. keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
  12903. keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
  12904. keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12905. // compute lookup table
  12906. let lookupTable = keypoints.initLookupTable(corners);
  12907. for (let b = 1; b < maxSize; b *= 2) lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
  12908. /*
  12909. // debug: view texture
  12910. const lookupView = (keypoints.viewLookupTable.outputs(
  12911. width, height, this._tex[0]
  12912. ))(lookupTable);
  12913. const canvas = gpu.renderToCanvas(lookupView);
  12914. if(!this._ww) document.body.appendChild(canvas);
  12915. this._ww = 1;
  12916. */
  12917. // encode keypoints
  12918. return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
  12919. }
  12920. _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12921. const capacity = this._capacity;
  12922. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12923. const width = corners.width,
  12924. height = corners.height;
  12925. const imageSize = [width, height];
  12926. const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
  12927. const keypoints = gpu.programs.keypoints;
  12928. const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
  12929. // prepare programs
  12930. keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
  12931. keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
  12932. keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
  12933. keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
  12934. // copy the input corners to a special texture
  12935. // that is needed by encodeKeypointSkipOffsets()
  12936. corners = gpu.programs.utils.copy.outputs(width, height, specialTexture)(corners);
  12937. // encode skip offsets
  12938. let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
  12939. for (let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) {
  12940. // to boost performance
  12941. // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
  12942. // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
  12943. offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
  12944. }
  12945. /*
  12946. // debug: view corners
  12947. let cornerview = offsets;
  12948. const canvas = gpu.renderToCanvas(cornerview);
  12949. if(!window._ww) document.body.appendChild(canvas);
  12950. window._ww = 1;
  12951. */
  12952. // encode keypoint positions
  12953. let encodedKps = tex[3].clear();
  12954. for (let j = 0; j < ENCODER_PASSES; j++) encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
  12955. // encode keypoint properties
  12956. return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
  12957. }
  12958. /**
  12959. * Create a tiny texture with zero encoded keypoints
  12960. * @param {SpeedyGPU} gpu
  12961. * @param {SpeedyDrawableTexture} encodedKeypoints output texture
  12962. * @param {number} [descriptorSize] in bytes
  12963. * @param {number} [extraSize] in bytes
  12964. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12965. */
  12966. _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12967. const capacity = 0;
  12968. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12969. const keypoints = gpu.programs.keypoints;
  12970. keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12971. return keypoints.encodeNullKeypoints();
  12972. }
  12973. /**
  12974. * Allocate RGBA16 textures
  12975. * @param {SpeedyGPU} gpu
  12976. */
  12977. _allocateTex16(gpu) {
  12978. const gl = gpu.gl;
  12979. // RGBA16UI is color renderable according to the OpenGL ES 3 spec
  12980. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
  12981. }
  12982. /**
  12983. * Deallocate RGBA16 textures
  12984. * @param {SpeedyGPU} gpu
  12985. */
  12986. _deallocateTex16(gpu) {
  12987. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = this._tex16[i].release();
  12988. }
  12989. /**
  12990. * Compute the length of the keypoint encoder, given its capacity
  12991. * @param {number} encoderCapacity how many keypoints can we fit?
  12992. * @param {number} descriptorSize in bytes
  12993. * @param {number} extraSize in bytes
  12994. */
  12995. static encoderLength(encoderCapacity, descriptorSize, extraSize) {
  12996. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  12997. const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
  12998. return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
  12999. }
  13000. /**
  13001. * The maximum number of keypoints we can store using
  13002. * a particular configuration of a keypoint encoder
  13003. * @param {number} descriptorSize in bytes
  13004. * @param {number} extraSize in bytes
  13005. * @param {number} encoderLength
  13006. */
  13007. static encoderCapacity(descriptorSize, extraSize, encoderLength) {
  13008. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  13009. const numberOfPixels = encoderLength * encoderLength;
  13010. return Math.floor(numberOfPixels / pixelsPerKeypoint);
  13011. }
  13012. }
  13013. /**
  13014. * Abstract scale-space keypoint detector
  13015. * @abstract
  13016. */
  13017. class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector {
  13018. /**
  13019. * Constructor
  13020. * @param {string} [name] name of the node
  13021. * @param {number} [texCount] number of work textures
  13022. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  13023. */
  13024. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  13025. super(name, texCount, portBuilders);
  13026. /** @type {number} number of pyramid levels */
  13027. this._levels = 1;
  13028. /** @type {number} scale factor between two pyramid levels */
  13029. this._scaleFactor = DEFAULT_SCALE_FACTOR;
  13030. }
  13031. /**
  13032. * Number of pyramid levels
  13033. * @returns {number}
  13034. */
  13035. get levels() {
  13036. return this._levels;
  13037. }
  13038. /**
  13039. * Number of pyramid levels
  13040. * @param {number} levels
  13041. */
  13042. set levels(levels) {
  13043. this._levels = Math.max(1, levels | 0);
  13044. }
  13045. /**
  13046. * Scale factor between two pyramid levels
  13047. * @returns {number}
  13048. */
  13049. get scaleFactor() {
  13050. return this._scaleFactor;
  13051. }
  13052. /**
  13053. * Scale factor between two pyramid levels
  13054. * @param {number} scaleFactor should be greater than 1
  13055. */
  13056. set scaleFactor(scaleFactor) {
  13057. this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
  13058. }
  13059. }
  13060. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
  13061. /*
  13062. * speedy-vision.js
  13063. * GPU-accelerated Computer Vision for JavaScript
  13064. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13065. *
  13066. * Licensed under the Apache License, Version 2.0 (the "License");
  13067. * you may not use this file except in compliance with the License.
  13068. * You may obtain a copy of the License at
  13069. *
  13070. * http://www.apache.org/licenses/LICENSE-2.0
  13071. *
  13072. * Unless required by applicable law or agreed to in writing, software
  13073. * distributed under the License is distributed on an "AS IS" BASIS,
  13074. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13075. * See the License for the specific language governing permissions and
  13076. * limitations under the License.
  13077. *
  13078. * source.js
  13079. * Gets keypoints into the pipeline
  13080. */
  13081. // Constants
  13082. const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
  13083. const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
  13084. const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
  13085. /**
  13086. * Gets keypoints into the pipeline
  13087. */
  13088. class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode {
  13089. /**
  13090. * Constructor
  13091. * @param {string} [name] name of the node
  13092. */
  13093. constructor(name = undefined) {
  13094. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13095. /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
  13096. this._keypoints = [];
  13097. /** @type {Float32Array} upload buffer (UBO) */
  13098. this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
  13099. /** @type {number} maximum number of keypoints */
  13100. this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
  13101. }
  13102. /**
  13103. * Keypoints to be uploaded
  13104. * @returns {SpeedyKeypoint[]}
  13105. */
  13106. get keypoints() {
  13107. return this._keypoints;
  13108. }
  13109. /**
  13110. * Keypoints to be uploaded
  13111. * @param {SpeedyKeypoint[]} keypoints
  13112. */
  13113. set keypoints(keypoints) {
  13114. if (!Array.isArray(keypoints)) throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
  13115. this._keypoints = keypoints;
  13116. }
  13117. /**
  13118. * The maximum number of keypoints we'll accept.
  13119. * This should be a tight bound for better performance.
  13120. * @returns {number}
  13121. */
  13122. get capacity() {
  13123. return this._capacity;
  13124. }
  13125. /**
  13126. * The maximum number of keypoints we'll accept.
  13127. * This should be a tight bound for better performance.
  13128. * @param {number} capacity
  13129. */
  13130. set capacity(capacity) {
  13131. this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
  13132. }
  13133. /**
  13134. * Run the specific task of this node
  13135. * @param {SpeedyGPU} gpu
  13136. * @returns {void|SpeedyPromise<void>}
  13137. */
  13138. _run(gpu) {
  13139. // Orientation, descriptors and extra bytes will be lost
  13140. const descriptorSize = 0,
  13141. extraSize = 0;
  13142. const keypoints = this._keypoints;
  13143. const maxKeypoints = this._capacity;
  13144. const numKeypoints = Math.min(keypoints.length, maxKeypoints);
  13145. const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
  13146. const buffer = this._buffer;
  13147. const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
  13148. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
  13149. uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
  13150. let startIndex = 0,
  13151. encodedKeypoints = uploadKeypoints.clear();
  13152. for (let i = 0; i < numPasses; i++) {
  13153. const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
  13154. const endIndex = startIndex + n;
  13155. uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
  13156. encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
  13157. startIndex = endIndex;
  13158. }
  13159. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13160. }
  13161. /**
  13162. * Create an upload buffer
  13163. * @param {number} bufferSize number of keypoints
  13164. * @returns {Float32Array}
  13165. */
  13166. static _createUploadBuffer(bufferSize) {
  13167. const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
  13168. utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
  13169. return new Float32Array(internalBuffer);
  13170. }
  13171. /**
  13172. * Fill upload buffer with keypoint data
  13173. * @param {Float32Array} buffer
  13174. * @param {SpeedyKeypoint[]} keypoints
  13175. * @param {number} start index, inclusive
  13176. * @param {number} end index, exclusive
  13177. * @returns {Float32Array} buffer
  13178. */
  13179. static _fillUploadBuffer(buffer, keypoints, start, end) {
  13180. const n = end - start;
  13181. for (let i = 0; i < n; i++) {
  13182. const keypoint = keypoints[start + i];
  13183. const hasPos = keypoint.position !== undefined;
  13184. const j = i * 4;
  13185. // Format data as follows:
  13186. // vec4(xpos, ypos, lod, score)
  13187. buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
  13188. buffer[j + 1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
  13189. buffer[j + 2] = +keypoint.lod || 0;
  13190. buffer[j + 3] = +keypoint.score || 0;
  13191. }
  13192. // done!
  13193. return buffer;
  13194. }
  13195. }
  13196. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
  13197. /*
  13198. * speedy-vision.js
  13199. * GPU-accelerated Computer Vision for JavaScript
  13200. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13201. *
  13202. * Licensed under the Apache License, Version 2.0 (the "License");
  13203. * you may not use this file except in compliance with the License.
  13204. * You may obtain a copy of the License at
  13205. *
  13206. * http://www.apache.org/licenses/LICENSE-2.0
  13207. *
  13208. * Unless required by applicable law or agreed to in writing, software
  13209. * distributed under the License is distributed on an "AS IS" BASIS,
  13210. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13211. * See the License for the specific language governing permissions and
  13212. * limitations under the License.
  13213. *
  13214. * speedy-keypoint-descriptor.js
  13215. * Keypoint descriptor
  13216. */
  13217. /**
  13218. * Represents a keypoint descriptor
  13219. */
  13220. class SpeedyKeypointDescriptor {
  13221. /**
  13222. * Constructor
  13223. * @param {Uint8Array} data descriptor bytes
  13224. */
  13225. constructor(data) {
  13226. this._data = data;
  13227. return Object.freeze(this);
  13228. }
  13229. /**
  13230. * Descriptor data
  13231. * @returns {Uint8Array}
  13232. */
  13233. get data() {
  13234. return this._data;
  13235. }
  13236. /**
  13237. * The size of the descriptor, in bytes
  13238. * @returns {number}
  13239. */
  13240. get size() {
  13241. return this._data.byteLength;
  13242. }
  13243. /**
  13244. * A string representation of the keypoint descriptor
  13245. * @returns {string}
  13246. */
  13247. toString() {
  13248. return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
  13249. }
  13250. }
  13251. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
  13252. /*
  13253. * speedy-vision.js
  13254. * GPU-accelerated Computer Vision for JavaScript
  13255. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13256. *
  13257. * Licensed under the Apache License, Version 2.0 (the "License");
  13258. * you may not use this file except in compliance with the License.
  13259. * You may obtain a copy of the License at
  13260. *
  13261. * http://www.apache.org/licenses/LICENSE-2.0
  13262. *
  13263. * Unless required by applicable law or agreed to in writing, software
  13264. * distributed under the License is distributed on an "AS IS" BASIS,
  13265. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13266. * See the License for the specific language governing permissions and
  13267. * limitations under the License.
  13268. *
  13269. * sink.js
  13270. * Gets keypoints out of the pipeline
  13271. */
  13272. /** next power of 2 */
  13273. const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  13274. /** empty array of bytes */
  13275. const ZERO_BYTES = new Uint8Array([]);
  13276. /**
  13277. * Gets keypoints out of the pipeline
  13278. * @template {SpeedyKeypoint} T
  13279. * @abstract
  13280. */
  13281. class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode {
  13282. /**
  13283. * Constructor
  13284. * @param {string} [name] name of the node
  13285. * @param {number} [texCount]
  13286. * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
  13287. */
  13288. constructor(name = 'keypoints', texCount = 0, portBuilders = []) {
  13289. super(name, texCount + 2, portBuilders);
  13290. /** @type {Array<T|null>} keypoints (output) */
  13291. this._keypoints = [];
  13292. /** @type {SpeedyTextureReader} texture reader */
  13293. this._textureReader = new SpeedyTextureReader();
  13294. /** @type {number} page flipping index */
  13295. this._page = 0;
  13296. /** @type {boolean} accelerate GPU-CPU transfers */
  13297. this._turbo = false;
  13298. /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
  13299. this._includeDiscarded = false;
  13300. }
  13301. /**
  13302. * Accelerate GPU-CPU transfers
  13303. * @returns {boolean}
  13304. */
  13305. get turbo() {
  13306. return this._turbo;
  13307. }
  13308. /**
  13309. * Accelerate GPU-CPU transfers
  13310. * @param {boolean} value
  13311. */
  13312. set turbo(value) {
  13313. this._turbo = Boolean(value);
  13314. }
  13315. /**
  13316. * Should discarded keypoints be exported as null or dropped altogether?
  13317. * @returns {boolean}
  13318. */
  13319. get includeDiscarded() {
  13320. return this._includeDiscarded;
  13321. }
  13322. /**
  13323. * Should discarded keypoints be exported as null or dropped altogether?
  13324. * @param {boolean} value
  13325. */
  13326. set includeDiscarded(value) {
  13327. this._includeDiscarded = Boolean(value);
  13328. }
  13329. /**
  13330. * Initializes this node
  13331. * @param {SpeedyGPU} gpu
  13332. */
  13333. init(gpu) {
  13334. super.init(gpu);
  13335. this._textureReader.init(gpu);
  13336. }
  13337. /**
  13338. * Releases this node
  13339. * @param {SpeedyGPU} gpu
  13340. */
  13341. release(gpu) {
  13342. this._textureReader.release(gpu);
  13343. super.release(gpu);
  13344. }
  13345. /**
  13346. * Export data from this node to the user
  13347. * @returns {SpeedyPromise<Array<T|null>>}
  13348. */
  13349. export() {
  13350. return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
  13351. }
  13352. /**
  13353. * Run the specific task of this node
  13354. * @param {SpeedyGPU} gpu
  13355. * @returns {void|SpeedyPromise<void>}
  13356. */
  13357. _run(gpu) {
  13358. const {
  13359. encodedKeypoints,
  13360. descriptorSize,
  13361. extraSize,
  13362. encoderLength
  13363. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13364. return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13365. }
  13366. /**
  13367. * Download and decode keypoints from the GPU
  13368. * @param {SpeedyGPU} gpu
  13369. * @param {SpeedyDrawableTexture} encodedKeypoints
  13370. * @param {number} descriptorSize
  13371. * @param {number} extraSize
  13372. * @param {number} encoderLength
  13373. * @returns {SpeedyPromise<void>}
  13374. */
  13375. _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  13376. const useBufferedDownloads = this._turbo;
  13377. /*
  13378. I have found experimentally that, in Firefox, readPixelsAsync()
  13379. performs MUCH better if the width of the target texture is a power
  13380. of two. I have no idea why this is the case, nor if it's related to
  13381. some interaction with the GL drivers, somehow. This seems to make no
  13382. difference on Chrome, however. In any case, let's convert the input
  13383. texture to POT.
  13384. */
  13385. const encoderWidth = sink_nextPot(encoderLength);
  13386. //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  13387. const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
  13388. //const encoderWidth=encoderLength,encoderHeight=encoderLength;
  13389. // copy the set of keypoints to an internal texture
  13390. const copiedTexture = this._tex[this._tex.length - 1 - this._page];
  13391. gpu.programs.utils.copyKeypoints.outputs(encoderWidth, encoderHeight, copiedTexture)(encodedKeypoints);
  13392. // flip page
  13393. this._page = 1 - this._page;
  13394. // download the internal texture
  13395. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  13396. // decode the keypoints and store them in this._keypoints
  13397. this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
  13398. });
  13399. }
  13400. /**
  13401. * Decode a sequence of keypoints, given a flattened image of encoded pixels
  13402. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  13403. * @param {number} descriptorSize in bytes
  13404. * @param {number} extraSize in bytes
  13405. * @param {number} encoderWidth
  13406. * @param {number} encoderHeight
  13407. * @returns {Array<T|null>} keypoints
  13408. */
  13409. _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight) {
  13410. const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
  13411. const m = globals.LOG2_PYRAMID_MAX_SCALE,
  13412. h = globals.PYRAMID_MAX_LEVELS;
  13413. const piOver255 = Math.PI / 255.0;
  13414. const keypoints = /** @type {Array<T|null>} */[];
  13415. const includeDiscarded = this._includeDiscarded;
  13416. let descriptorBytes = ZERO_BYTES,
  13417. extraBytes = ZERO_BYTES;
  13418. let x, y, z, w, lod, rotation, score;
  13419. let keypoint;
  13420. // validate
  13421. if (descriptorSize % 4 != 0 || extraSize % 4 != 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
  13422. // how many bytes should we read?
  13423. const e2 = encoderWidth * encoderHeight * 4;
  13424. const size = pixels.byteLength;
  13425. if (size != e2) utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
  13426. // copy the data (we use shared buffers when receiving pixels[])
  13427. if (descriptorSize + extraSize > 0) pixels = new Uint8Array(pixels);
  13428. // for each encoded keypoint
  13429. for (let i = 0; i < size; i += bytesPerKeypoint) {
  13430. // extract encoded header
  13431. x = pixels[i + 1] << 8 | pixels[i];
  13432. y = pixels[i + 3] << 8 | pixels[i + 2];
  13433. z = pixels[i + 5] << 8 | pixels[i + 4];
  13434. w = pixels[i + 7] << 8 | pixels[i + 6];
  13435. // the keypoint is "null": we have reached the end of the list
  13436. if (x == 0xFFFF && y == 0xFFFF) break;
  13437. // the header is zero: discard the keypoint
  13438. if (x + y + z + w == 0) {
  13439. if (includeDiscarded) keypoints.push(null);
  13440. continue;
  13441. }
  13442. // extract extra & descriptor bytes
  13443. if (extraSize > 0) {
  13444. extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
  13445. if (extraBytes.byteLength < extraSize) {
  13446. utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
  13447. continue; // something is off here; discard
  13448. }
  13449. }
  13450. if (descriptorSize > 0) {
  13451. descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
  13452. if (descriptorBytes.byteLength < descriptorSize) {
  13453. utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
  13454. continue; // something is off here; discard
  13455. }
  13456. }
  13457. // decode position: convert from fixed-point
  13458. x /= globals.FIX_RESOLUTION;
  13459. y /= globals.FIX_RESOLUTION;
  13460. // decode level-of-detail
  13461. lod = pixels[i + 4] < 255 ? -m + (m + h) * pixels[i + 4] / 255.0 : 0.0;
  13462. // decode orientation
  13463. rotation = (2 * pixels[i + 5] - 255) * piOver255;
  13464. // decode score
  13465. score = utils/* Utils */.A.decodeFloat16(w);
  13466. // create keypoint
  13467. keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
  13468. // register keypoint
  13469. keypoints.push(keypoint);
  13470. }
  13471. // done!
  13472. return keypoints;
  13473. }
  13474. /**
  13475. * Instantiate a new keypoint
  13476. * @param {number} x
  13477. * @param {number} y
  13478. * @param {number} lod
  13479. * @param {number} rotation
  13480. * @param {number} score
  13481. * @param {Uint8Array} descriptorBytes
  13482. * @param {Uint8Array} extraBytes
  13483. * @returns {T}
  13484. */
  13485. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13486. throw new utils_errors/* AbstractMethodError */.aQ();
  13487. }
  13488. /**
  13489. * Allocate extra space
  13490. * @param {SpeedyGPU} gpu
  13491. * @param {SpeedyDrawableTexture} output output texture
  13492. * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
  13493. * @param {number} inputDescriptorSize in bytes, must be positive
  13494. * @param {number} inputExtraSize must be 0
  13495. * @param {number} outputDescriptorSize must be inputDescriptorSize
  13496. * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
  13497. * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
  13498. */
  13499. _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize) {
  13500. utils/* Utils */.A.assert(inputExtraSize === 0);
  13501. utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
  13502. const inputEncoderLength = inputEncodedKeypoints.width;
  13503. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  13504. const outputEncoderCapacity = inputEncoderCapacity;
  13505. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  13506. return gpu.programs.keypoints.allocateExtra.outputs(outputEncoderLength, outputEncoderLength, output)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  13507. }
  13508. }
  13509. /**
  13510. * Gets standard keypoints out of the pipeline
  13511. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
  13512. */
  13513. class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13514. /**
  13515. * Constructor
  13516. * @param {string} [name] name of the node
  13517. */
  13518. constructor(name = 'keypoints') {
  13519. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13520. }
  13521. /**
  13522. * Instantiate a new keypoint
  13523. * @param {number} x
  13524. * @param {number} y
  13525. * @param {number} lod
  13526. * @param {number} rotation
  13527. * @param {number} score
  13528. * @param {Uint8Array} descriptorBytes
  13529. * @param {Uint8Array} extraBytes
  13530. * @returns {SpeedyKeypoint}
  13531. */
  13532. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13533. const descriptorSize = descriptorBytes.byteLength;
  13534. // read descriptor, if any
  13535. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13536. // create keypoint
  13537. return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
  13538. }
  13539. }
  13540. /**
  13541. * Gets tracked keypoints out of the pipeline
  13542. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
  13543. */
  13544. class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13545. /**
  13546. * Constructor
  13547. * @param {string} [name] name of the node
  13548. */
  13549. constructor(name = 'keypoints') {
  13550. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  13551. }
  13552. /**
  13553. * Run the specific task of this node
  13554. * @param {SpeedyGPU} gpu
  13555. * @returns {void|SpeedyPromise<void>}
  13556. */
  13557. _run(gpu) {
  13558. const {
  13559. encodedKeypoints,
  13560. descriptorSize,
  13561. extraSize,
  13562. encoderLength
  13563. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13564. const {
  13565. vectors
  13566. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input('flow').read();
  13567. // allocate extra space
  13568. const newDescriptorSize = descriptorSize;
  13569. const newExtraSize = 4; // 1 pixel per flow vector per keypoint
  13570. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13571. // attach flow vectors
  13572. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13573. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13574. // done!
  13575. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13576. }
  13577. /**
  13578. * Instantiate a new keypoint
  13579. * @param {number} x
  13580. * @param {number} y
  13581. * @param {number} lod
  13582. * @param {number} rotation
  13583. * @param {number} score
  13584. * @param {Uint8Array} descriptorBytes
  13585. * @param {Uint8Array} extraBytes
  13586. * @returns {SpeedyTrackedKeypoint}
  13587. */
  13588. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13589. const descriptorSize = descriptorBytes.byteLength;
  13590. const extraSize = extraBytes.byteLength;
  13591. // read descriptor, if any
  13592. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13593. // read flow vector
  13594. const fx = utils/* Utils */.A.decodeFloat16(extraBytes[1] << 8 | extraBytes[0]);
  13595. const fy = utils/* Utils */.A.decodeFloat16(extraBytes[3] << 8 | extraBytes[2]);
  13596. const flow = new SpeedyVector2(fx, fy);
  13597. // create keypoint
  13598. return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
  13599. }
  13600. }
  13601. /**
  13602. * Gets matched keypoints out of the pipeline
  13603. * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
  13604. */
  13605. class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13606. /**
  13607. * Constructor
  13608. * @param {string} [name] name of the node
  13609. */
  13610. constructor(name = 'keypoints') {
  13611. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)]);
  13612. }
  13613. /**
  13614. * Run the specific task of this node
  13615. * @param {SpeedyGPU} gpu
  13616. * @returns {void|SpeedyPromise<void>}
  13617. */
  13618. _run(gpu) {
  13619. const {
  13620. encodedKeypoints,
  13621. descriptorSize,
  13622. extraSize,
  13623. encoderLength
  13624. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13625. const {
  13626. encodedMatches,
  13627. matchesPerKeypoint
  13628. } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */this.input('matches').read();
  13629. // allocate space for the matches
  13630. const newDescriptorSize = descriptorSize;
  13631. const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
  13632. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13633. // transfer matches to a new texture
  13634. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13635. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13636. // done!
  13637. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13638. }
  13639. /**
  13640. * Instantiate a new keypoint
  13641. * @param {number} x
  13642. * @param {number} y
  13643. * @param {number} lod
  13644. * @param {number} rotation
  13645. * @param {number} score
  13646. * @param {Uint8Array} descriptorBytes
  13647. * @param {Uint8Array} extraBytes
  13648. * @returns {SpeedyMatchedKeypoint}
  13649. */
  13650. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13651. const descriptorSize = descriptorBytes.byteLength;
  13652. const extraSize = extraBytes.byteLength;
  13653. // read descriptor, if any
  13654. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13655. // decode matches
  13656. const matchesPerKeypoint = extraSize / 4;
  13657. const matches = /** @type {SpeedyKeypointMatch[]} */new Array(matchesPerKeypoint);
  13658. for (let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
  13659. const base = matchIndex * 4;
  13660. const u32 = extraBytes[base] | extraBytes[base + 1] << 8 | extraBytes[base + 2] << 16 | extraBytes[base + 3] << 24;
  13661. const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
  13662. matches[matchIndex] = match;
  13663. }
  13664. // done!
  13665. return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
  13666. }
  13667. }
  13668. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
  13669. /*
  13670. * speedy-vision.js
  13671. * GPU-accelerated Computer Vision for JavaScript
  13672. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13673. *
  13674. * Licensed under the Apache License, Version 2.0 (the "License");
  13675. * you may not use this file except in compliance with the License.
  13676. * You may obtain a copy of the License at
  13677. *
  13678. * http://www.apache.org/licenses/LICENSE-2.0
  13679. *
  13680. * Unless required by applicable law or agreed to in writing, software
  13681. * distributed under the License is distributed on an "AS IS" BASIS,
  13682. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13683. * See the License for the specific language governing permissions and
  13684. * limitations under the License.
  13685. *
  13686. * clipper.js
  13687. * Keypoint clipper
  13688. */
  13689. // Constants
  13690. const LOG2_STRIDE = 5;
  13691. const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
  13692. /**
  13693. * Keypoint clipper: filters the best keypoints from a stream
  13694. */
  13695. class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode {
  13696. /**
  13697. * Constructor
  13698. * @param {string} [name] name of the node
  13699. */
  13700. constructor(name = undefined) {
  13701. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13702. /** @type {number} the maximum number of keypoints in the output */
  13703. this._size = MAX_SIZE;
  13704. }
  13705. /**
  13706. * The maximum number of keypoints in the output
  13707. * @returns {number}
  13708. */
  13709. get size() {
  13710. return this._size;
  13711. }
  13712. /**
  13713. * The maximum number of keypoints in the output
  13714. * @param {number} size
  13715. */
  13716. set size(size) {
  13717. this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
  13718. }
  13719. /**
  13720. * Run the specific task of this node
  13721. * @param {SpeedyGPU} gpu
  13722. * @returns {void|SpeedyPromise<void>}
  13723. */
  13724. _run(gpu) {
  13725. const {
  13726. encodedKeypoints,
  13727. descriptorSize,
  13728. extraSize,
  13729. encoderLength
  13730. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13731. const keypoints = gpu.programs.keypoints;
  13732. const clipValue = this._size;
  13733. const tex = this._tex;
  13734. const outputTexture = this._tex[3];
  13735. // find the minimum power of 2 pot such that pot >= capacity
  13736. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13737. //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
  13738. // find the dimensions of the sorting shaders
  13739. const stride = 1 << LOG2_STRIDE; // must be a power of 2
  13740. //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
  13741. const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
  13742. const numberOfPixels = stride * height;
  13743. // find the dimensions of the output texture
  13744. const newCapacity = Math.min(capacity, clipValue);
  13745. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
  13746. // generate permutation of keypoints
  13747. keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
  13748. let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13749. // sort permutation
  13750. const numPasses = Math.ceil(Math.log2(numberOfPixels));
  13751. keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
  13752. for (let i = 1; i <= numPasses; i++) {
  13753. const blockSize = 1 << i; // 2, 4, 8...
  13754. const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
  13755. permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
  13756. }
  13757. // apply permutation
  13758. keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
  13759. keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
  13760. /*
  13761. // debug (read the contents of the permutation)
  13762. const pixels = permutation.inspect(gpu), debug = [];
  13763. for(let i = 0; i < pixels.length; i += 4) {
  13764. let id = pixels[i] | (pixels[i+1] << 8);
  13765. let score = pixels[i+2] / 255.0;
  13766. let valid = pixels[i+3] / 255.0;
  13767. debug.push([ id, valid, score, ].join(', '));
  13768. }
  13769. console.log(debug);
  13770. */
  13771. // done!
  13772. this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
  13773. }
  13774. }
  13775. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
  13776. /*
  13777. * speedy-vision.js
  13778. * GPU-accelerated Computer Vision for JavaScript
  13779. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13780. *
  13781. * Licensed under the Apache License, Version 2.0 (the "License");
  13782. * you may not use this file except in compliance with the License.
  13783. * You may obtain a copy of the License at
  13784. *
  13785. * http://www.apache.org/licenses/LICENSE-2.0
  13786. *
  13787. * Unless required by applicable law or agreed to in writing, software
  13788. * distributed under the License is distributed on an "AS IS" BASIS,
  13789. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13790. * See the License for the specific language governing permissions and
  13791. * limitations under the License.
  13792. *
  13793. * border-clipper.js
  13794. * Keypoint Border Clipper
  13795. */
  13796. /**
  13797. * The Border Clipper removes all keypoints within a border of the edges of an image
  13798. */
  13799. class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode {
  13800. /**
  13801. * Constructor
  13802. * @param {string} [name] name of the node
  13803. */
  13804. constructor(name = undefined) {
  13805. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13806. /** @type {SpeedySize} image size, in pixels */
  13807. this._imageSize = new SpeedySize(0, 0);
  13808. /** @type {SpeedyVector2} border size, in pixels */
  13809. this._borderSize = new SpeedyVector2(0, 0);
  13810. }
  13811. /**
  13812. * Image size, in pixels
  13813. * @returns {SpeedySize}
  13814. */
  13815. get imageSize() {
  13816. return this._imageSize;
  13817. }
  13818. /**
  13819. * Image size, in pixels
  13820. * @param {SpeedySize} imageSize
  13821. */
  13822. set imageSize(imageSize) {
  13823. this._imageSize = imageSize;
  13824. }
  13825. /**
  13826. * Border size, in pixels
  13827. * @returns {SpeedyVector2}
  13828. */
  13829. get borderSize() {
  13830. return this._borderSize;
  13831. }
  13832. /**
  13833. * Border size, in pixels
  13834. * @param {SpeedyVector2} borderSize
  13835. */
  13836. set borderSize(borderSize) {
  13837. this._borderSize = borderSize;
  13838. }
  13839. /**
  13840. * Run the specific task of this node
  13841. * @param {SpeedyGPU} gpu
  13842. * @returns {void|SpeedyPromise<void>}
  13843. */
  13844. _run(gpu) {
  13845. const {
  13846. encodedKeypoints,
  13847. descriptorSize,
  13848. extraSize,
  13849. encoderLength
  13850. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13851. const keypoints = gpu.programs.keypoints;
  13852. const imageSize = this._imageSize;
  13853. const borderSize = this._borderSize;
  13854. const imageWidth = imageSize.width,
  13855. imageHeight = imageSize.height;
  13856. const borderLeft = borderSize.x,
  13857. borderRight = borderSize.x;
  13858. const borderTop = borderSize.y,
  13859. borderBottom = borderSize.y;
  13860. const tex = this._tex;
  13861. // validate
  13862. if (imageWidth == 0 || imageHeight == 0) throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
  13863. // find the capacity of the keypoint stream
  13864. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13865. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  13866. // prepare programs
  13867. keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
  13868. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13869. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  13870. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  13871. // clip keypoints
  13872. let clippedKeypoints = keypoints.clipBorder(imageWidth, imageHeight, borderTop, borderRight, borderBottom, borderLeft, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13873. // sort keypoints
  13874. let sortedKeypoints = keypoints.mixKeypointsInit(clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  13875. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  13876. clippedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13877. /*
  13878. // debug: view keypoints
  13879. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13880. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  13881. */
  13882. // done!
  13883. this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13884. }
  13885. }
  13886. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
  13887. /*
  13888. * speedy-vision.js
  13889. * GPU-accelerated Computer Vision for JavaScript
  13890. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13891. *
  13892. * Licensed under the Apache License, Version 2.0 (the "License");
  13893. * you may not use this file except in compliance with the License.
  13894. * You may obtain a copy of the License at
  13895. *
  13896. * http://www.apache.org/licenses/LICENSE-2.0
  13897. *
  13898. * Unless required by applicable law or agreed to in writing, software
  13899. * distributed under the License is distributed on an "AS IS" BASIS,
  13900. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13901. * See the License for the specific language governing permissions and
  13902. * limitations under the License.
  13903. *
  13904. * buffer.js
  13905. * Keypoint Buffer
  13906. */
  13907. /**
  13908. * Keypoint Buffer: a node with memory.
  13909. * At time t, it outputs the keypoints received at time t-1
  13910. */
  13911. class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode {
  13912. /**
  13913. * Constructor
  13914. * @param {string} [name] name of the node
  13915. */
  13916. constructor(name = undefined) {
  13917. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13918. /** @type {number} current page: 0 or 1 */
  13919. this._pageIndex = 0;
  13920. /** @type {boolean} first run? */
  13921. this._initialized = false;
  13922. /** @type {number} previous descriptor size, in bytes */
  13923. this._previousDescriptorSize = 0;
  13924. /** @type {number} previous extra size, in bytes */
  13925. this._previousExtraSize = 0;
  13926. /** @type {number} previous encoder length */
  13927. this._previousEncoderLength = 0;
  13928. /** @type {boolean} frozen buffer? */
  13929. this._frozen = false;
  13930. }
  13931. /**
  13932. * A frozen buffer discards the input, effectively increasing the buffering time
  13933. * @returns {boolean}
  13934. */
  13935. get frozen() {
  13936. return this._frozen;
  13937. }
  13938. /**
  13939. * A frozen buffer discards the input, effectively increasing the buffering time
  13940. * @param {boolean} value
  13941. */
  13942. set frozen(value) {
  13943. this._frozen = Boolean(value);
  13944. }
  13945. /**
  13946. * Releases this node
  13947. * @param {SpeedyGPU} gpu
  13948. */
  13949. release(gpu) {
  13950. this._initialized = false;
  13951. super.release(gpu);
  13952. }
  13953. /**
  13954. * Run the specific task of this node
  13955. * @param {SpeedyGPU} gpu
  13956. * @returns {void|SpeedyPromise<void>}
  13957. */
  13958. _run(gpu) {
  13959. const {
  13960. encodedKeypoints,
  13961. descriptorSize,
  13962. extraSize,
  13963. encoderLength
  13964. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13965. const previousDescriptorSize = this._previousDescriptorSize;
  13966. const previousExtraSize = this._previousExtraSize;
  13967. const previousEncoderLength = this._previousEncoderLength;
  13968. const page = this._tex;
  13969. const previousInputTexture = page[1 - this._pageIndex];
  13970. const outputTexture = page[this._pageIndex];
  13971. // bufferize
  13972. if (!this._frozen || !this._initialized) {
  13973. // store input
  13974. this._previousDescriptorSize = descriptorSize;
  13975. this._previousExtraSize = extraSize;
  13976. this._previousEncoderLength = encoderLength;
  13977. previousInputTexture.resize(encoderLength, encoderLength);
  13978. encodedKeypoints.copyTo(previousInputTexture);
  13979. // page flipping
  13980. this._pageIndex = 1 - this._pageIndex;
  13981. }
  13982. // first run?
  13983. if (!this._initialized) {
  13984. this._initialized = true;
  13985. this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
  13986. return;
  13987. }
  13988. // done!
  13989. this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
  13990. }
  13991. }
  13992. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
  13993. /*
  13994. * speedy-vision.js
  13995. * GPU-accelerated Computer Vision for JavaScript
  13996. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13997. *
  13998. * Licensed under the Apache License, Version 2.0 (the "License");
  13999. * you may not use this file except in compliance with the License.
  14000. * You may obtain a copy of the License at
  14001. *
  14002. * http://www.apache.org/licenses/LICENSE-2.0
  14003. *
  14004. * Unless required by applicable law or agreed to in writing, software
  14005. * distributed under the License is distributed on an "AS IS" BASIS,
  14006. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14007. * See the License for the specific language governing permissions and
  14008. * limitations under the License.
  14009. *
  14010. * mixer.js
  14011. * Keypoint Mixer
  14012. */
  14013. /**
  14014. * Keypoint Mixer: merges two sets of keypoints
  14015. */
  14016. class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode {
  14017. /**
  14018. * Constructor
  14019. * @param {string} [name] name of the node
  14020. */
  14021. constructor(name = undefined) {
  14022. super(name, 5, [InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints), InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14023. }
  14024. /**
  14025. * Run the specific task of this node
  14026. * @param {SpeedyGPU} gpu
  14027. * @returns {void|SpeedyPromise<void>}
  14028. */
  14029. _run(gpu) {
  14030. const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in0').read();
  14031. const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in1').read();
  14032. const descriptorSize = kps0.descriptorSize;
  14033. const extraSize = kps0.extraSize;
  14034. const keypoints = gpu.programs.keypoints;
  14035. const tex = this._tex;
  14036. // ensure that the format of kps0 equals the format of kps1
  14037. if (!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize)) throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
  14038. // find the capacity of kps0 + kps1
  14039. const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
  14040. const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
  14041. const capacity = cap0 + cap1;
  14042. // find the dimensions of the output texture
  14043. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  14044. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  14045. // prepare programs
  14046. keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
  14047. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14048. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  14049. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  14050. // mix keypoints
  14051. let mixedKeypoints = keypoints.mixKeypointsPreInit(kps0.encodedKeypoints, kps1.encodedKeypoints, kps0.encoderLength, kps1.encoderLength, cap0, cap1, descriptorSize, extraSize, encoderLength);
  14052. let sortedKeypoints = keypoints.mixKeypointsInit(mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14053. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  14054. mixedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14055. /*
  14056. // debug: view keypoints
  14057. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14058. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  14059. */
  14060. this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14061. }
  14062. }
  14063. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
  14064. /*
  14065. * speedy-vision.js
  14066. * GPU-accelerated Computer Vision for JavaScript
  14067. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14068. *
  14069. * Licensed under the Apache License, Version 2.0 (the "License");
  14070. * you may not use this file except in compliance with the License.
  14071. * You may obtain a copy of the License at
  14072. *
  14073. * http://www.apache.org/licenses/LICENSE-2.0
  14074. *
  14075. * Unless required by applicable law or agreed to in writing, software
  14076. * distributed under the License is distributed on an "AS IS" BASIS,
  14077. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14078. * See the License for the specific language governing permissions and
  14079. * limitations under the License.
  14080. *
  14081. * shuffler.js
  14082. * Keypoint Shuffler
  14083. */
  14084. /**
  14085. * The Keypoint Shuffler shuffles a list of keypoints
  14086. */
  14087. class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode {
  14088. /**
  14089. * Constructor
  14090. * @param {string} [name] name of the node
  14091. */
  14092. constructor(name = undefined) {
  14093. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14094. /** @type {number} maximum number of keypoints */
  14095. this._maxKeypoints = Number.NaN;
  14096. }
  14097. /**
  14098. * Maximum number of keypoints (optional)
  14099. * @returns {number}
  14100. */
  14101. get maxKeypoints() {
  14102. return this._maxKeypoints;
  14103. }
  14104. /**
  14105. * Maximum number of keypoints (optional)
  14106. * @param {number} value
  14107. */
  14108. set maxKeypoints(value) {
  14109. if (!Number.isNaN(value)) this._maxKeypoints = Math.max(0, value | 0);else this._maxKeypoints = Number.NaN;
  14110. }
  14111. /**
  14112. * Run the specific task of this node
  14113. * @param {SpeedyGPU} gpu
  14114. * @returns {void|SpeedyPromise<void>}
  14115. */
  14116. _run(gpu) {
  14117. let {
  14118. encodedKeypoints,
  14119. descriptorSize,
  14120. extraSize,
  14121. encoderLength
  14122. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14123. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14124. const maxKeypoints = this._maxKeypoints;
  14125. // shuffle the keypoints (including nulls)
  14126. const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
  14127. const permutationLength = Math.min(permutationMaxLength, capacity);
  14128. const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
  14129. encodedKeypoints = gpu.programs.keypoints.shuffle.setUBO('Permutation', permutation).outputs(encoderLength, encoderLength, this._tex[0])(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14130. // sort the keypoints
  14131. gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
  14132. gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
  14133. gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
  14134. let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14135. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
  14136. encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14137. // clip the output?
  14138. if (!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
  14139. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
  14140. encodedKeypoints = gpu.programs.keypoints.clip.outputs(newEncoderLength, newEncoderLength, this._tex[5])(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
  14141. encoderLength = newEncoderLength;
  14142. }
  14143. // done!
  14144. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14145. }
  14146. /**
  14147. * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
  14148. * @param {number} n positive integer
  14149. * @param {number} [bufsize] size of the output array
  14150. * @returns {Int32Array} permutation
  14151. */
  14152. _generatePermutation(n, bufsize = n) {
  14153. const array = new Int32Array(bufsize);
  14154. const p = array.subarray(0, n).fill(-1);
  14155. const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
  14156. for (let i = 0, j = 0; i < n; i++) {
  14157. if (p[i] < 0) {
  14158. do {
  14159. p[i] = q[j++];
  14160. } while (p[i] < i);
  14161. p[p[i]] = i;
  14162. }
  14163. }
  14164. return array; // padded with zeros
  14165. }
  14166. }
  14167. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
  14168. /*
  14169. * speedy-vision.js
  14170. * GPU-accelerated Computer Vision for JavaScript
  14171. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14172. *
  14173. * Licensed under the Apache License, Version 2.0 (the "License");
  14174. * you may not use this file except in compliance with the License.
  14175. * You may obtain a copy of the License at
  14176. *
  14177. * http://www.apache.org/licenses/LICENSE-2.0
  14178. *
  14179. * Unless required by applicable law or agreed to in writing, software
  14180. * distributed under the License is distributed on an "AS IS" BASIS,
  14181. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14182. * See the License for the specific language governing permissions and
  14183. * limitations under the License.
  14184. *
  14185. * multiplexer.js
  14186. * Keypoint multiplexer
  14187. */
  14188. /** @type {string[]} the names of the input ports indexed by their number */
  14189. const multiplexer_INPUT_PORT = ['in0', 'in1'];
  14190. /**
  14191. * Keypoint multiplexer
  14192. */
  14193. class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode {
  14194. /**
  14195. * Constructor
  14196. * @param {string} [name] name of the node
  14197. */
  14198. constructor(name = undefined) {
  14199. super(name, 0, [...multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints)), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14200. /** @type {number} which port should be linked to the output? */
  14201. this._port = 0;
  14202. }
  14203. /**
  14204. * The number of the port that should be linked to the output
  14205. * @returns {number}
  14206. */
  14207. get port() {
  14208. return this._port;
  14209. }
  14210. /**
  14211. * The number of the port that should be linked to the output
  14212. * @param {number} port
  14213. */
  14214. set port(port) {
  14215. if (port < 0 || port >= multiplexer_INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  14216. this._port = port | 0;
  14217. }
  14218. /**
  14219. * Run the specific task of this node
  14220. * @param {SpeedyGPU} gpu
  14221. * @returns {void|SpeedyPromise<void>}
  14222. */
  14223. _run(gpu) {
  14224. const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
  14225. this.output().write(message);
  14226. }
  14227. }
  14228. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
  14229. /*
  14230. * speedy-vision.js
  14231. * GPU-accelerated Computer Vision for JavaScript
  14232. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14233. *
  14234. * Licensed under the Apache License, Version 2.0 (the "License");
  14235. * you may not use this file except in compliance with the License.
  14236. * You may obtain a copy of the License at
  14237. *
  14238. * http://www.apache.org/licenses/LICENSE-2.0
  14239. *
  14240. * Unless required by applicable law or agreed to in writing, software
  14241. * distributed under the License is distributed on an "AS IS" BASIS,
  14242. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14243. * See the License for the specific language governing permissions and
  14244. * limitations under the License.
  14245. *
  14246. * transformer.js
  14247. * Apply a transformation matrix to a set of keypoints
  14248. */
  14249. /**
  14250. * Apply a transformation matrix to a set of keypoints
  14251. */
  14252. class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode {
  14253. /**
  14254. * Constructor
  14255. * @param {string} [name] name of the node
  14256. */
  14257. constructor(name = undefined) {
  14258. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14259. /** @type {SpeedyMatrix} transformation matrix */
  14260. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  14261. }
  14262. /**
  14263. * Transformation matrix
  14264. * @returns {SpeedyMatrix}
  14265. */
  14266. get transform() {
  14267. return this._transform;
  14268. }
  14269. /**
  14270. * Transformation matrix. Must be 3x3
  14271. * @param {SpeedyMatrix} transform
  14272. */
  14273. set transform(transform) {
  14274. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  14275. this._transform = transform;
  14276. }
  14277. /**
  14278. * Run the specific task of this node
  14279. * @param {SpeedyGPU} gpu
  14280. * @returns {void|SpeedyPromise<void>}
  14281. */
  14282. _run(gpu) {
  14283. const {
  14284. encodedKeypoints,
  14285. descriptorSize,
  14286. extraSize,
  14287. encoderLength
  14288. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14289. const outputTexture = this._tex[0];
  14290. const homography = this._transform.read();
  14291. // apply homography
  14292. gpu.programs.keypoints.applyHomography.outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14293. // done!
  14294. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  14295. }
  14296. }
  14297. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
  14298. /*
  14299. * speedy-vision.js
  14300. * GPU-accelerated Computer Vision for JavaScript
  14301. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14302. *
  14303. * Licensed under the Apache License, Version 2.0 (the "License");
  14304. * you may not use this file except in compliance with the License.
  14305. * You may obtain a copy of the License at
  14306. *
  14307. * http://www.apache.org/licenses/LICENSE-2.0
  14308. *
  14309. * Unless required by applicable law or agreed to in writing, software
  14310. * distributed under the License is distributed on an "AS IS" BASIS,
  14311. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14312. * See the License for the specific language governing permissions and
  14313. * limitations under the License.
  14314. *
  14315. * subpixel.js
  14316. * Subpixel refinement of keypoint location
  14317. */
  14318. /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
  14319. /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
  14320. const METHOD2PROGRAM = Object.freeze({
  14321. 'quadratic1d': 'subpixelQuadratic1d',
  14322. 'taylor2d': 'subpixelTaylor2d',
  14323. 'bicubic-upsample': 'subpixelBicubic',
  14324. 'bilinear-upsample': 'subpixelBilinear'
  14325. });
  14326. /**
  14327. * Subpixel refinement of keypoint location
  14328. */
  14329. class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode {
  14330. /**
  14331. * Constructor
  14332. * @param {string} [name] name of the node
  14333. */
  14334. constructor(name = undefined) {
  14335. super(name, 2, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2)]);
  14336. /** @type {SubpixelRefinementMethod} subpixel refinement method */
  14337. this._method = 'quadratic1d';
  14338. /** @type {number} max iterations for the upsampling methods */
  14339. this._maxIterations = 6;
  14340. /** @type {number} convergence threshold for the upsampling methods */
  14341. this._epsilon = 0.1;
  14342. }
  14343. /**
  14344. * Subpixel refinement method
  14345. * @returns {SubpixelRefinementMethod}
  14346. */
  14347. get method() {
  14348. return this._method;
  14349. }
  14350. /**
  14351. * Subpixel refinement method
  14352. * @param {SubpixelRefinementMethod} name
  14353. */
  14354. set method(name) {
  14355. if (!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
  14356. this._method = name;
  14357. }
  14358. /**
  14359. * Max. iterations for the upsampling methods
  14360. * @returns {number}
  14361. */
  14362. get maxIterations() {
  14363. return this._maxIterations;
  14364. }
  14365. /**
  14366. * Max. iterations for the upsampling methods
  14367. * @param {number} value
  14368. */
  14369. set maxIterations(value) {
  14370. this._maxIterations = Math.max(0, +value);
  14371. }
  14372. /**
  14373. * Convergence threshold for the upsampling methods
  14374. * @returns {number}
  14375. */
  14376. get epsilon() {
  14377. return this._epsilon;
  14378. }
  14379. /**
  14380. * Convergence threshold for the upsampling methods
  14381. * @param {number} value
  14382. */
  14383. set epsilon(value) {
  14384. this._epsilon = Math.max(0, +value);
  14385. }
  14386. /**
  14387. * Run the specific task of this node
  14388. * @param {SpeedyGPU} gpu
  14389. * @returns {void|SpeedyPromise<void>}
  14390. */
  14391. _run(gpu) {
  14392. const {
  14393. encodedKeypoints,
  14394. descriptorSize,
  14395. extraSize,
  14396. encoderLength
  14397. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14398. const {
  14399. image,
  14400. format
  14401. } = /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read();
  14402. const tex = this._tex;
  14403. const program = METHOD2PROGRAM[this._method];
  14404. const maxIterations = this._maxIterations;
  14405. const epsilon = this._epsilon;
  14406. // note: if you detected the keypoints using a pyramid,
  14407. // you need to pass that pyramid as input!
  14408. // we'll compute the offsets for each keypoint
  14409. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14410. const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
  14411. const offsets = gpu.programs.keypoints[program].outputs(offsetEncoderLength, offsetEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
  14412. // apply the offsets to the keypoints
  14413. const refinedKeypoints = gpu.programs.keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[1])(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14414. // done!
  14415. this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
  14416. this.output('displacements').swrite(offsets);
  14417. }
  14418. }
  14419. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
  14420. /*
  14421. * speedy-vision.js
  14422. * GPU-accelerated Computer Vision for JavaScript
  14423. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14424. *
  14425. * Licensed under the Apache License, Version 2.0 (the "License");
  14426. * you may not use this file except in compliance with the License.
  14427. * You may obtain a copy of the License at
  14428. *
  14429. * http://www.apache.org/licenses/LICENSE-2.0
  14430. *
  14431. * Unless required by applicable law or agreed to in writing, software
  14432. * distributed under the License is distributed on an "AS IS" BASIS,
  14433. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14434. * See the License for the specific language governing permissions and
  14435. * limitations under the License.
  14436. *
  14437. * fast.js
  14438. * FAST corner detector
  14439. */
  14440. // Constants
  14441. const DEFAULT_THRESHOLD = 20;
  14442. /**
  14443. * FAST corner detector
  14444. */
  14445. class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14446. /**
  14447. * Constructor
  14448. * @param {string} [name] name of the node
  14449. */
  14450. constructor(name = undefined) {
  14451. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14452. /** @type {number} FAST threshold in [0,255] */
  14453. this._threshold = DEFAULT_THRESHOLD;
  14454. }
  14455. /**
  14456. * FAST threshold in [0,255]
  14457. * @returns {number}
  14458. */
  14459. get threshold() {
  14460. return this._threshold;
  14461. }
  14462. /**
  14463. * FAST threshold in [0,255]
  14464. * @param {number} threshold
  14465. */
  14466. set threshold(threshold) {
  14467. this._threshold = Math.max(0, Math.min(threshold | 0, 255));
  14468. }
  14469. /**
  14470. * Run the specific task of this node
  14471. * @param {SpeedyGPU} gpu
  14472. * @returns {void|SpeedyPromise<void>}
  14473. */
  14474. _run(gpu) {
  14475. const {
  14476. image,
  14477. format
  14478. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14479. const width = image.width,
  14480. height = image.height;
  14481. const tex = this._tex;
  14482. const capacity = this._capacity;
  14483. const threshold = this._threshold;
  14484. const lodStep = Math.log2(this.scaleFactor);
  14485. const levels = this.levels;
  14486. // validate pyramid
  14487. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14488. // skip if the capacity is zero
  14489. if (capacity == 0) {
  14490. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
  14491. const encoderLength = encodedKeypoints.width;
  14492. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14493. return;
  14494. }
  14495. // FAST
  14496. gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
  14497. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
  14498. let corners = tex[1].clear();
  14499. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14500. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14501. corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
  14502. //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14503. }
  14504. // Same-scale non-maximum suppression
  14505. // *nicer results inside the loop; faster outside
  14506. // Hard to notice a difference when using FAST
  14507. corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14508. // Multi-scale non-maximum suppression
  14509. // (doesn't seem to remove many keypoints)
  14510. if (levels > 1) {
  14511. corners = gpu.programs.keypoints.nonmaxScaleSimple.outputs(width, height, tex[1])(corners, image, lodStep);
  14512. }
  14513. // encode keypoints
  14514. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
  14515. const encoderLength = encodedKeypoints.width;
  14516. // scale refinement
  14517. if (levels > 1) {
  14518. encodedKeypoints = gpu.programs.keypoints.refineScaleFAST916.outputs(encoderLength, encoderLength, tex[4])(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
  14519. }
  14520. // done!
  14521. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14522. }
  14523. }
  14524. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
  14525. /*
  14526. * speedy-vision.js
  14527. * GPU-accelerated Computer Vision for JavaScript
  14528. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14529. *
  14530. * Licensed under the Apache License, Version 2.0 (the "License");
  14531. * you may not use this file except in compliance with the License.
  14532. * You may obtain a copy of the License at
  14533. *
  14534. * http://www.apache.org/licenses/LICENSE-2.0
  14535. *
  14536. * Unless required by applicable law or agreed to in writing, software
  14537. * distributed under the License is distributed on an "AS IS" BASIS,
  14538. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14539. * See the License for the specific language governing permissions and
  14540. * limitations under the License.
  14541. *
  14542. * harris.js
  14543. * Harris corner detector
  14544. */
  14545. /** Window size helper */
  14546. const HARRIS = Object.freeze({
  14547. 1: 'harris1',
  14548. 3: 'harris3',
  14549. 5: 'harris5',
  14550. 7: 'harris7'
  14551. });
  14552. /**
  14553. * Harris corner detector
  14554. */
  14555. class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14556. /**
  14557. * Constructor
  14558. * @param {string} [name] name of the node
  14559. */
  14560. constructor(name = undefined) {
  14561. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14562. /** @type {SpeedySize} neighborhood size */
  14563. this._windowSize = new SpeedySize(3, 3);
  14564. /** @type {number} min corner quality in [0,1] */
  14565. this._quality = 0.1;
  14566. }
  14567. /**
  14568. * Minimum corner quality in [0,1] - this is a fraction of
  14569. * the largest min. eigenvalue of the autocorrelation matrix
  14570. * over the entire image
  14571. * @returns {number}
  14572. */
  14573. get quality() {
  14574. return this._quality;
  14575. }
  14576. /**
  14577. * Minimum corner quality in [0,1]
  14578. * @param {number} quality
  14579. */
  14580. set quality(quality) {
  14581. this._quality = Math.max(0.0, Math.min(+quality, 1.0));
  14582. }
  14583. /**
  14584. * Neighborhood size
  14585. * @returns {SpeedySize}
  14586. */
  14587. get windowSize() {
  14588. return this._windowSize;
  14589. }
  14590. /**
  14591. * Neighborhood size
  14592. * @param {SpeedySize} windowSize
  14593. */
  14594. set windowSize(windowSize) {
  14595. const d = windowSize.width;
  14596. if (!(d == windowSize.height && (d == 1 || d == 3 || d == 5 || d == 7))) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
  14597. this._windowSize = windowSize;
  14598. }
  14599. /**
  14600. * Run the specific task of this node
  14601. * @param {SpeedyGPU} gpu
  14602. * @returns {void|SpeedyPromise<void>}
  14603. */
  14604. _run(gpu) {
  14605. const {
  14606. image,
  14607. format
  14608. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14609. const width = image.width,
  14610. height = image.height;
  14611. const capacity = this._capacity;
  14612. const quality = this._quality;
  14613. const windowSize = this._windowSize.width;
  14614. const levels = this.levels;
  14615. const lodStep = Math.log2(this.scaleFactor);
  14616. const intFactor = levels > 1 ? this.scaleFactor : 1;
  14617. const harris = gpu.programs.keypoints[HARRIS[windowSize]];
  14618. const tex = this._tex;
  14619. // validate pyramid
  14620. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14621. // skip if the capacity is zero
  14622. if (capacity == 0) {
  14623. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
  14624. const encoderLength = encodedKeypoints.width;
  14625. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14626. return;
  14627. }
  14628. // compute corner response map
  14629. harris.outputs(width, height, tex[0], tex[1]);
  14630. gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
  14631. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
  14632. let corners = tex[1].clear();
  14633. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14634. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14635. const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
  14636. const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
  14637. corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
  14638. corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14639. }
  14640. // Same-scale non-maximum suppression
  14641. // *performs better inside the loop
  14642. //corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14643. // Multi-scale non-maximum suppression
  14644. // (doesn't seem to remove many keypoints)
  14645. if (levels > 1) {
  14646. const laplacian = gpu.programs.keypoints.laplacian.outputs(width, height, tex[0])(corners, image, lodStep, 0);
  14647. corners = gpu.programs.keypoints.nonmaxScale.outputs(width, height, tex[2])(corners, image, laplacian, lodStep);
  14648. }
  14649. // find the maximum corner response over the entire image
  14650. gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
  14651. numPasses = Math.ceil(Math.log2(Math.max(width, height)));
  14652. let maxScore = corners;
  14653. for (let j = 0; j < numPasses; j++) maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
  14654. // discard corners below a quality level
  14655. corners = gpu.programs.keypoints.harrisScoreCutoff.outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])(corners, maxScore, quality);
  14656. // encode keypoints
  14657. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
  14658. const encoderLength = encodedKeypoints.width;
  14659. // scale refinement
  14660. if (levels > 1) {
  14661. encodedKeypoints = gpu.programs.keypoints.refineScaleLoG.outputs(encoderLength, encoderLength, tex[5])(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
  14662. }
  14663. // done!
  14664. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14665. }
  14666. }
  14667. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
  14668. /*
  14669. * speedy-vision.js
  14670. * GPU-accelerated Computer Vision for JavaScript
  14671. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14672. *
  14673. * Licensed under the Apache License, Version 2.0 (the "License");
  14674. * you may not use this file except in compliance with the License.
  14675. * You may obtain a copy of the License at
  14676. *
  14677. * http://www.apache.org/licenses/LICENSE-2.0
  14678. *
  14679. * Unless required by applicable law or agreed to in writing, software
  14680. * distributed under the License is distributed on an "AS IS" BASIS,
  14681. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14682. * See the License for the specific language governing permissions and
  14683. * limitations under the License.
  14684. *
  14685. * descriptor.js
  14686. * Abstract keypoint descriptor
  14687. */
  14688. /**
  14689. * Abstract keypoint descriptor
  14690. * @abstract
  14691. */
  14692. class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode {
  14693. /**
  14694. * Constructor
  14695. * @param {string} [name] name of the node
  14696. * @param {number} [texCount] number of work textures
  14697. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14698. */
  14699. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  14700. super(name, texCount + 1, portBuilders);
  14701. }
  14702. /**
  14703. *
  14704. * Allocate space for keypoint descriptors
  14705. * @param {SpeedyGPU} gpu
  14706. * @param {number} inputDescriptorSize should be 0
  14707. * @param {number} inputExtraSize must be non-negative
  14708. * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
  14709. * @param {number} outputExtraSize must be inputExtraSize
  14710. * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
  14711. * @returns {SpeedyDrawableTexture} encodedKeypoints
  14712. */
  14713. _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints) {
  14714. utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
  14715. utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
  14716. const inputEncoderLength = inputEncodedKeypoints.width;
  14717. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  14718. const outputEncoderCapacity = inputEncoderCapacity;
  14719. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  14720. const tex = this._tex[this._tex.length - 1];
  14721. return gpu.programs.keypoints.allocateDescriptors.outputs(outputEncoderLength, outputEncoderLength, tex)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  14722. }
  14723. }
  14724. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
  14725. /*
  14726. * speedy-vision.js
  14727. * GPU-accelerated Computer Vision for JavaScript
  14728. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14729. *
  14730. * Licensed under the Apache License, Version 2.0 (the "License");
  14731. * you may not use this file except in compliance with the License.
  14732. * You may obtain a copy of the License at
  14733. *
  14734. * http://www.apache.org/licenses/LICENSE-2.0
  14735. *
  14736. * Unless required by applicable law or agreed to in writing, software
  14737. * distributed under the License is distributed on an "AS IS" BASIS,
  14738. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14739. * See the License for the specific language governing permissions and
  14740. * limitations under the License.
  14741. *
  14742. * orb.js
  14743. * ORB descriptors
  14744. */
  14745. // Constants
  14746. const DESCRIPTOR_SIZE = 32; // 256 bits
  14747. /**
  14748. * ORB descriptors
  14749. */
  14750. class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor {
  14751. /**
  14752. * Constructor
  14753. * @param {string} [name] name of the node
  14754. */
  14755. constructor(name = undefined) {
  14756. super(name, 3, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14757. }
  14758. /**
  14759. * Run the specific task of this node
  14760. * @param {SpeedyGPU} gpu
  14761. * @returns {void|SpeedyPromise<void>}
  14762. */
  14763. _run(gpu) {
  14764. const {
  14765. encodedKeypoints,
  14766. descriptorSize,
  14767. extraSize,
  14768. encoderLength
  14769. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14770. const image = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read()).image;
  14771. const tex = this._tex;
  14772. const outputTexture = this._tex[2];
  14773. // compute orientation
  14774. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14775. const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
  14776. const encodedOrientations = gpu.programs.keypoints.orbOrientation.outputs(orientationEncoderLength, orientationEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14777. const orientedKeypoints = gpu.programs.keypoints.transferOrientation.outputs(encoderLength, encoderLength, tex[1])(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14778. // allocate space
  14779. const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
  14780. const newEncoderLength = encodedKps.width;
  14781. // compute descriptors (it's a good idea to blur the image)
  14782. const describedKeypoints = gpu.programs.keypoints.orbDescriptor.outputs(newEncoderLength, newEncoderLength, outputTexture)(image, encodedKps, extraSize, newEncoderLength);
  14783. // done!
  14784. this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
  14785. }
  14786. }
  14787. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
  14788. /*
  14789. * speedy-vision.js
  14790. * GPU-accelerated Computer Vision for JavaScript
  14791. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14792. *
  14793. * Licensed under the Apache License, Version 2.0 (the "License");
  14794. * you may not use this file except in compliance with the License.
  14795. * You may obtain a copy of the License at
  14796. *
  14797. * http://www.apache.org/licenses/LICENSE-2.0
  14798. *
  14799. * Unless required by applicable law or agreed to in writing, software
  14800. * distributed under the License is distributed on an "AS IS" BASIS,
  14801. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14802. * See the License for the specific language governing permissions and
  14803. * limitations under the License.
  14804. *
  14805. * lk.js
  14806. * LK optical-flow
  14807. */
  14808. // Constants
  14809. const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
  14810. const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
  14811. const DEFAULT_NUMBER_OF_ITERATIONS = 30;
  14812. const DEFAULT_DISCARD_THRESHOLD = 0.0001;
  14813. const DEFAULT_EPSILON = 0.01;
  14814. const LK_PROGRAM = {
  14815. 3: 'lk3',
  14816. 5: 'lk5',
  14817. 7: 'lk7',
  14818. 9: 'lk9',
  14819. 11: 'lk11',
  14820. 13: 'lk13',
  14821. 15: 'lk15',
  14822. 17: 'lk17',
  14823. 19: 'lk19',
  14824. 21: 'lk21'
  14825. };
  14826. /**
  14827. * LK optical-flow
  14828. */
  14829. class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode {
  14830. /**
  14831. * Constructor
  14832. * @param {string} [name] name of the node
  14833. */
  14834. constructor(name = undefined) {
  14835. super(name, 3, [InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  14836. /** @type {SpeedySize} window size */
  14837. this._windowSize = DEFAULT_WINDOW_SIZE;
  14838. /** @type {number} number of pyramid levels to use */
  14839. this._levels = DEFAULT_DEPTH;
  14840. /** @type {number} minimum acceptable corner response */
  14841. this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
  14842. /** @type {number} number of iterations per pyramid level (termination criteria) */
  14843. this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
  14844. /** @type {number} minimum increment per iteration (termination criteria) */
  14845. this._epsilon = DEFAULT_EPSILON;
  14846. }
  14847. /**
  14848. * Window size (use odd numbers)
  14849. * @returns {SpeedySize}
  14850. */
  14851. get windowSize() {
  14852. return this._windowSize;
  14853. }
  14854. /**
  14855. * Window size (use odd numbers)
  14856. * @param {SpeedySize} windowSize must be a square window
  14857. */
  14858. set windowSize(windowSize) {
  14859. if (windowSize.width != windowSize.height) {
  14860. throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
  14861. } else if (!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
  14862. const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a, b) => a - b).map(k => k + 'x' + k).join(', ');
  14863. throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
  14864. }
  14865. this._windowSize = windowSize;
  14866. }
  14867. /**
  14868. * Number of pyramid levels to use
  14869. * @returns {number}
  14870. */
  14871. get levels() {
  14872. return this._levels;
  14873. }
  14874. /**
  14875. * Number of pyramid levels to use
  14876. * @param {number} levels
  14877. */
  14878. set levels(levels) {
  14879. utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
  14880. this._levels = levels | 0;
  14881. }
  14882. /**
  14883. * Get the discard threshold, used to discard "bad" keypoints
  14884. * @returns {number}
  14885. */
  14886. get discardThreshold() {
  14887. return this._discardThreshold;
  14888. }
  14889. /**
  14890. * Set the discard threshold, used to discard "bad" keypoints
  14891. * @param {number} value typically 10^(-4) - increase to discard more
  14892. */
  14893. set discardThreshold(value) {
  14894. utils/* Utils */.A.assert(value >= 0);
  14895. this._discardThreshold = +value;
  14896. }
  14897. /**
  14898. * Get the maximum number of iterations of the pyramidal LK algorithm
  14899. * @returns {number}
  14900. */
  14901. get numberOfIterations() {
  14902. return this._numberOfIterations;
  14903. }
  14904. /**
  14905. * Set the maximum number of iterations of the pyramidal LK algorithm
  14906. * @param {number} value
  14907. */
  14908. set numberOfIterations(value) {
  14909. utils/* Utils */.A.assert(value >= 1);
  14910. this._numberOfIterations = value | 0;
  14911. }
  14912. /**
  14913. * Get the accuracy threshold, used to stop LK iterations
  14914. * @returns {number}
  14915. */
  14916. get epsilon() {
  14917. return this._epsilon;
  14918. }
  14919. /**
  14920. * Get the accuracy threshold, used to stop LK iterations
  14921. * @param {number} value typically 0.01
  14922. */
  14923. set epsilon(value) {
  14924. utils/* Utils */.A.assert(value >= 0);
  14925. this._epsilon = +value;
  14926. }
  14927. /**
  14928. * Run the specific task of this node
  14929. * @param {SpeedyGPU} gpu
  14930. * @returns {void|SpeedyPromise<void>}
  14931. */
  14932. _run(gpu) {
  14933. const {
  14934. encodedKeypoints,
  14935. descriptorSize,
  14936. extraSize,
  14937. encoderLength
  14938. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('previousKeypoints').read();
  14939. const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('previousImage').read()).image;
  14940. const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('nextImage').read()).image;
  14941. const previousKeypoints = encodedKeypoints;
  14942. const levels = this._levels;
  14943. const windowSize = this._windowSize;
  14944. const wsize = windowSize.width; // square window
  14945. const numberOfIterations = this._numberOfIterations;
  14946. const discardThreshold = this._discardThreshold;
  14947. const epsilon = this._epsilon;
  14948. const keypoints = gpu.programs.keypoints;
  14949. const tex = this._tex;
  14950. // do we need a pyramid?
  14951. if (!(levels == 1 || previousImage.hasMipmaps() && nextImage.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);else if (previousImage.width !== nextImage.width || previousImage.height !== nextImage.height) throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
  14952. // select the appropriate program
  14953. const lk = keypoints[LK_PROGRAM[wsize]];
  14954. // find the dimensions of the flow texture (1 pixel per flow vector)
  14955. const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14956. const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
  14957. lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
  14958. // compute optical-flow
  14959. let flow = lk.clear();
  14960. for (let lod = levels - 1; lod >= 0; lod--) flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
  14961. // transfer optical-flow to nextKeypoints
  14962. keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
  14963. const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
  14964. // done!
  14965. this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
  14966. this.output('flow').swrite(flow);
  14967. }
  14968. }
  14969. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
  14970. /*
  14971. * speedy-vision.js
  14972. * GPU-accelerated Computer Vision for JavaScript
  14973. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14974. *
  14975. * Licensed under the Apache License, Version 2.0 (the "License");
  14976. * you may not use this file except in compliance with the License.
  14977. * You may obtain a copy of the License at
  14978. *
  14979. * http://www.apache.org/licenses/LICENSE-2.0
  14980. *
  14981. * Unless required by applicable law or agreed to in writing, software
  14982. * distributed under the License is distributed on an "AS IS" BASIS,
  14983. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14984. * See the License for the specific language governing permissions and
  14985. * limitations under the License.
  14986. *
  14987. * lsh-static-tables.js
  14988. * Static LSH tables
  14989. */
  14990. /**
  14991. * Static LSH tables
  14992. */
  14993. class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode {
  14994. /**
  14995. * Constructor
  14996. * @param {string} [name] name of the node
  14997. */
  14998. constructor(name = undefined) {
  14999. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.LSHTables)]);
  15000. /** @type {SpeedyKeypoint[]} "training" keypoints */
  15001. this._keypoints = [];
  15002. /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
  15003. this._keypointsCopy = [];
  15004. /** @type {number} number of tables in the LSH data structure */
  15005. this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
  15006. /** @type {number} number of bits of a hash */
  15007. this._hashSize = LSH_DEFAULT_HASH_SIZE;
  15008. /** @type {SpeedyLSH|null} LSH data structure */
  15009. this._lsh = null;
  15010. }
  15011. /**
  15012. * "Training" keypoints
  15013. * @returns {SpeedyKeypoint[]}
  15014. */
  15015. get keypoints() {
  15016. return this._keypoints;
  15017. }
  15018. /**
  15019. * "Training" keypoints
  15020. * @param {SpeedyKeypoint[]} keypoints
  15021. */
  15022. set keypoints(keypoints) {
  15023. if (!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint))) throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
  15024. if (this._keypoints !== keypoints) {
  15025. this._keypoints = keypoints; // update internal pointer
  15026. this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
  15027. this._lsh = null; // (re)train the model
  15028. }
  15029. }
  15030. /**
  15031. * Number of tables in the LSH data structure
  15032. * @returns {number}
  15033. */
  15034. get numberOfTables() {
  15035. return this._numberOfTables;
  15036. }
  15037. /**
  15038. * Number of tables in the LSH data structure
  15039. * @param {number} n
  15040. */
  15041. set numberOfTables(n) {
  15042. if (!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
  15043. if (n !== this._numberOfTables) {
  15044. this._numberOfTables = n | 0;
  15045. this._lsh = null; // need to retrain the model
  15046. }
  15047. }
  15048. /**
  15049. * Number of bits of a hash
  15050. * @returns {number}
  15051. */
  15052. get hashSize() {
  15053. return this._hashSize;
  15054. }
  15055. /**
  15056. * Number of bits of a hash
  15057. * @param {number} h
  15058. */
  15059. set hashSize(h) {
  15060. if (!LSH_ACCEPTABLE_HASH_SIZES.includes(h)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
  15061. if (h !== this._hashSize) {
  15062. this._hashSize = h | 0;
  15063. this._lsh = null; // need to retrain the model
  15064. }
  15065. }
  15066. /**
  15067. * Run the specific task of this node
  15068. * @param {SpeedyGPU} gpu
  15069. * @returns {void|SpeedyPromise<void>}
  15070. */
  15071. _run(gpu) {
  15072. // Need to train the model?
  15073. if (this._lsh == null) {
  15074. // internal work textures are only available after initialization,
  15075. // i.e., after calling this._init()
  15076. this._lsh = this._train();
  15077. }
  15078. // Pass it forward
  15079. this.output().swrite(this._lsh);
  15080. }
  15081. /**
  15082. * Train the model
  15083. * @returns {SpeedyLSH}
  15084. */
  15085. _train() {
  15086. const keypoints = this._keypointsCopy;
  15087. const numberOfTables = this._numberOfTables;
  15088. const hashSize = this._hashSize;
  15089. if (keypoints.find(keypoint => keypoint.descriptor == null)) throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
  15090. const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
  15091. const lshTables = this._tex[0];
  15092. const descriptorDB = this._tex[1];
  15093. return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
  15094. }
  15095. }
  15096. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
  15097. /*
  15098. * speedy-vision.js
  15099. * GPU-accelerated Computer Vision for JavaScript
  15100. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15101. *
  15102. * Licensed under the Apache License, Version 2.0 (the "License");
  15103. * you may not use this file except in compliance with the License.
  15104. * You may obtain a copy of the License at
  15105. *
  15106. * http://www.apache.org/licenses/LICENSE-2.0
  15107. *
  15108. * Unless required by applicable law or agreed to in writing, software
  15109. * distributed under the License is distributed on an "AS IS" BASIS,
  15110. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15111. * See the License for the specific language governing permissions and
  15112. * limitations under the License.
  15113. *
  15114. * lsh-knn.js
  15115. * K approximate nearest neighbors matcher
  15116. */
  15117. /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
  15118. /** @type {number} how many neighbors to search for, by default */
  15119. const DEFAULT_K = 1;
  15120. /** @type {LSHKNNQualityLevel} default quality level */
  15121. const DEFAULT_QUALITY = 'default';
  15122. /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
  15123. const NUMBER_OF_BIT_SWAPS = {
  15124. 'fastest': 0,
  15125. 'default': 1,
  15126. 'demanding': 2
  15127. };
  15128. /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
  15129. const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o, d) => (o[d] = fd(d), o), {}))(d => (fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o, h) => (o[h] = fh(h), o), {}))(h => (fl => [0, 1, 2].reduce((o, l) => (o[l] = fl(l), o), {}))(l => `lshKnn${d}h${h}lv${l}`)));
  15130. /**
  15131. * K approximate nearest neighbors matcher
  15132. */
  15133. class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode {
  15134. /**
  15135. * Constructor
  15136. * @param {string} [name] name of the node
  15137. */
  15138. constructor(name = undefined) {
  15139. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15140. /** @type {number} how many neighbors do you want? */
  15141. this._k = DEFAULT_K;
  15142. /** @type {LSHKNNQualityLevel} quality of the matching */
  15143. this._quality = DEFAULT_QUALITY;
  15144. }
  15145. /**
  15146. * How many neighbors do you want?
  15147. * @returns {number}
  15148. */
  15149. get k() {
  15150. return this._k;
  15151. }
  15152. /**
  15153. * How many neighbors do you want?
  15154. * @param {number} k number of neighbors
  15155. */
  15156. set k(k) {
  15157. this._k = Math.max(1, k | 0);
  15158. }
  15159. /**
  15160. * Quality of the matching
  15161. * @returns {LSHKNNQualityLevel}
  15162. */
  15163. get quality() {
  15164. return this._quality;
  15165. }
  15166. /**
  15167. * Quality of the matching
  15168. * @param {LSHKNNQualityLevel} quality
  15169. */
  15170. set quality(quality) {
  15171. if (!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
  15172. this._quality = quality;
  15173. }
  15174. /**
  15175. * Run the specific task of this node
  15176. * @param {SpeedyGPU} gpu
  15177. * @returns {void|SpeedyPromise<void>}
  15178. */
  15179. _run(gpu) {
  15180. const {
  15181. encodedKeypoints,
  15182. descriptorSize,
  15183. extraSize,
  15184. encoderLength
  15185. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15186. /** @type {SpeedyLSH} */
  15187. const lsh = this.input('lsh').read().lsh;
  15188. const keypoints = gpu.programs.keypoints;
  15189. const tables = lsh.tables;
  15190. const descriptorDB = lsh.descriptorDB;
  15191. const tablesStride = tables.width;
  15192. const descriptorDBStride = descriptorDB.width;
  15193. const tableCount = lsh.tableCount;
  15194. const hashSize = lsh.hashSize;
  15195. const bucketCapacity = lsh.bucketCapacity;
  15196. const bucketsPerTable = lsh.bucketsPerTable;
  15197. const sequences = lsh.sequences;
  15198. const candidatesA = this._tex[0];
  15199. const candidatesB = this._tex[1];
  15200. const candidatesC = this._tex[2];
  15201. const filters = this._tex[3];
  15202. const transferA = this._tex[4];
  15203. const transferB = this._tex[5];
  15204. const level = NUMBER_OF_BIT_SWAPS[this._quality];
  15205. const matchesPerKeypoint = this._k;
  15206. // validate parameters
  15207. if (descriptorSize !== lsh.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
  15208. utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
  15209. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
  15210. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
  15211. // configure the output texture
  15212. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15213. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15214. let encodedMatches = transferB;
  15215. keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
  15216. // prepare the LSH matching
  15217. const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15218. keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
  15219. keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
  15220. const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
  15221. lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
  15222. lshKnn.setUBO('LSHSequences', sequences);
  15223. // match keypoints
  15224. encodedMatches.clear();
  15225. keypoints.lshKnnInitFilters();
  15226. for (let i = 0; i < matchesPerKeypoint; i++) {
  15227. // find the (i+1)-th best match
  15228. let candidates = keypoints.lshKnnInitCandidates();
  15229. for (let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
  15230. candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15231. gpu.gl.flush();
  15232. }
  15233. candidates.copyTo(filters);
  15234. // transfer matches to an encoded matches texture
  15235. encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
  15236. }
  15237. // done
  15238. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15239. /*
  15240. // debug
  15241. let data = filters.inspect32(gpu), debug = [];
  15242. for(let i = 0; i < data.length; i++) {
  15243. const bits = MATCH_INDEX_BITS;
  15244. const mask = (1 << bits) - 1;
  15245. const u32 = data[i];
  15246. const index = u32 & mask, distance = u32 >>> bits;
  15247. //debug.push('|'+[ u32 ].toString());
  15248. debug.push('|'+[ index, distance ].toString());
  15249. }
  15250. console.log(debug.join(','));
  15251. */
  15252. }
  15253. }
  15254. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
  15255. /*
  15256. * speedy-vision.js
  15257. * GPU-accelerated Computer Vision for JavaScript
  15258. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15259. *
  15260. * Licensed under the Apache License, Version 2.0 (the "License");
  15261. * you may not use this file except in compliance with the License.
  15262. * You may obtain a copy of the License at
  15263. *
  15264. * http://www.apache.org/licenses/LICENSE-2.0
  15265. *
  15266. * Unless required by applicable law or agreed to in writing, software
  15267. * distributed under the License is distributed on an "AS IS" BASIS,
  15268. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15269. * See the License for the specific language governing permissions and
  15270. * limitations under the License.
  15271. *
  15272. * bf-knn.js
  15273. * Brute Force KNN Keypoint Matcher
  15274. */
  15275. /** @type {Object<number,string>} program name indexed by descriptor size */
  15276. const PROGRAM_NAME = {
  15277. 32: 'bfMatcher32',
  15278. 64: 'bfMatcher64'
  15279. };
  15280. /**
  15281. * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
  15282. * invoking this (use a database of 50 keypoints or so - your mileage may vary)
  15283. */
  15284. class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode {
  15285. /**
  15286. * Constructor
  15287. * @param {string} [name] name of the node
  15288. */
  15289. constructor(name = undefined) {
  15290. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15291. /** @type {number} number of matches per keypoint (the "k" of knn) */
  15292. this._matchesPerKeypoint = 1;
  15293. }
  15294. /**
  15295. * Number of matches per keypoint
  15296. * @returns {number}
  15297. */
  15298. get k() {
  15299. return this._matchesPerKeypoint;
  15300. }
  15301. /**
  15302. * Number of matches per keypoint
  15303. * @param {number} value
  15304. */
  15305. set k(value) {
  15306. this._matchesPerKeypoint = Math.max(1, value | 0);
  15307. }
  15308. /**
  15309. * Run the specific task of this node
  15310. * @param {SpeedyGPU} gpu
  15311. * @returns {void|SpeedyPromise<void>}
  15312. */
  15313. _run(gpu) {
  15314. const {
  15315. encodedKeypoints,
  15316. descriptorSize,
  15317. extraSize,
  15318. encoderLength
  15319. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15320. const database = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('database').read();
  15321. const candidatesA = this._tex[0];
  15322. const candidatesB = this._tex[1];
  15323. const candidatesC = this._tex[2];
  15324. const encodedFiltersA = this._tex[3];
  15325. const encodedMatchesA = this._tex[4];
  15326. const encodedMatchesB = this._tex[5];
  15327. const matchesPerKeypoint = this._matchesPerKeypoint;
  15328. const keypoints = gpu.programs.keypoints;
  15329. // validate parameters
  15330. if (descriptorSize !== database.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);else if (!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
  15331. // prepare the brute force matching
  15332. const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
  15333. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15334. const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
  15335. const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
  15336. const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
  15337. const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15338. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15339. keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
  15340. keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
  15341. keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
  15342. bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
  15343. // match keypoints
  15344. let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
  15345. let encodedFilters = keypoints.bfMatcherInitFilters();
  15346. for (let k = 0; k < matchesPerKeypoint; k++) {
  15347. let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
  15348. // find the (k+1)-th best match
  15349. for (let passId = 0; passId < numberOfPasses; passId++) {
  15350. encodedPartialMatches = bfMatcher(encodedPartialMatches, encodedFilters, partialMatcherLength, database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength, encodedKeypoints, descriptorSize, extraSize, encoderLength, passId);
  15351. gpu.gl.flush();
  15352. }
  15353. //gpu.gl.flush();
  15354. // copy the (k+1)-th best match to the filter
  15355. if (matchesPerKeypoint > 1) encodedPartialMatches.copyTo(encodedFilters);
  15356. // aggregate matches
  15357. encodedMatches = keypoints.bfMatcherTransfer(encodedMatches, encodedPartialMatches, matchesPerKeypoint, k);
  15358. }
  15359. // done!
  15360. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15361. }
  15362. }
  15363. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
  15364. /*
  15365. * speedy-vision.js
  15366. * GPU-accelerated Computer Vision for JavaScript
  15367. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15368. *
  15369. * Licensed under the Apache License, Version 2.0 (the "License");
  15370. * you may not use this file except in compliance with the License.
  15371. * You may obtain a copy of the License at
  15372. *
  15373. * http://www.apache.org/licenses/LICENSE-2.0
  15374. *
  15375. * Unless required by applicable law or agreed to in writing, software
  15376. * distributed under the License is distributed on an "AS IS" BASIS,
  15377. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15378. * See the License for the specific language governing permissions and
  15379. * limitations under the License.
  15380. *
  15381. * distance-filter.js
  15382. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15383. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15384. */
  15385. /**
  15386. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15387. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15388. *
  15389. * The pairs of keypoints are provided as two separate sets, "in" and
  15390. * "reference". Keypoints that are kept will have their data extracted
  15391. * from the "in" set.
  15392. */
  15393. class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode {
  15394. /**
  15395. * Constructor
  15396. * @param {string} [name] name of the node
  15397. */
  15398. constructor(name = undefined) {
  15399. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15400. /** @type {number} maximum accepted distance */
  15401. this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
  15402. }
  15403. /**
  15404. * Maximum accepted distance
  15405. * @returns {number}
  15406. */
  15407. get threshold() {
  15408. return this._threshold;
  15409. }
  15410. /**
  15411. * Maximum accepted distance
  15412. * @param {number} value
  15413. */
  15414. set threshold(value) {
  15415. this._threshold = Math.max(0, +value);
  15416. }
  15417. /**
  15418. * Run the specific task of this node
  15419. * @param {SpeedyGPU} gpu
  15420. * @returns {void|SpeedyPromise<void>}
  15421. */
  15422. _run(gpu) {
  15423. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15424. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15425. const threshold = this._threshold;
  15426. // validate shapes
  15427. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
  15428. // calculate the shape of the output
  15429. const outputTexture = this._tex[0];
  15430. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15431. const descriptorSize = set0.descriptorSize;
  15432. const extraSize = set0.extraSize;
  15433. // apply the distance filter
  15434. gpu.programs.keypoints.distanceFilter.outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15435. // done!
  15436. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15437. }
  15438. }
  15439. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
  15440. /*
  15441. * speedy-vision.js
  15442. * GPU-accelerated Computer Vision for JavaScript
  15443. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15444. *
  15445. * Licensed under the Apache License, Version 2.0 (the "License");
  15446. * you may not use this file except in compliance with the License.
  15447. * You may obtain a copy of the License at
  15448. *
  15449. * http://www.apache.org/licenses/LICENSE-2.0
  15450. *
  15451. * Unless required by applicable law or agreed to in writing, software
  15452. * distributed under the License is distributed on an "AS IS" BASIS,
  15453. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15454. * See the License for the specific language governing permissions and
  15455. * limitations under the License.
  15456. *
  15457. * hamming-distance-filter.js
  15458. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15459. * distance (of descriptor) is above a user-defined threshold
  15460. */
  15461. /** @type {Object<number,string>} Program names */
  15462. const hamming_distance_filter_PROGRAM_NAME = {
  15463. 32: 'hammingDistanceFilter32',
  15464. 64: 'hammingDistanceFilter64'
  15465. };
  15466. /**
  15467. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15468. * distance (of descriptor) is above a user-defined threshold
  15469. *
  15470. * The pairs of keypoints are provided as two separate sets, "in" and
  15471. * "reference". Keypoints that are kept will have their data extracted
  15472. * from the "in" set.
  15473. */
  15474. class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode {
  15475. /**
  15476. * Constructor
  15477. * @param {string} [name] name of the node
  15478. */
  15479. constructor(name = undefined) {
  15480. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15481. /** @type {number} distance threshold, an integer */
  15482. this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
  15483. }
  15484. /**
  15485. * Distance threshold, an integer
  15486. * @returns {number}
  15487. */
  15488. get threshold() {
  15489. return this._threshold;
  15490. }
  15491. /**
  15492. * Distance threshold, an integer
  15493. * @param {number} value
  15494. */
  15495. set threshold(value) {
  15496. this._threshold = Math.max(0, value | 0);
  15497. }
  15498. /**
  15499. * Run the specific task of this node
  15500. * @param {SpeedyGPU} gpu
  15501. * @returns {void|SpeedyPromise<void>}
  15502. */
  15503. _run(gpu) {
  15504. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15505. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15506. const threshold = this._threshold;
  15507. // validate shapes
  15508. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
  15509. // validate descriptor size
  15510. if (!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
  15511. // calculate the shape of the output
  15512. const outputTexture = this._tex[0];
  15513. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15514. const descriptorSize = set0.descriptorSize;
  15515. const extraSize = set0.extraSize;
  15516. // apply the distance filter
  15517. const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
  15518. gpu.programs.keypoints[program].outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15519. // done!
  15520. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15521. }
  15522. }
  15523. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
  15524. /*
  15525. * speedy-vision.js
  15526. * GPU-accelerated Computer Vision for JavaScript
  15527. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15528. *
  15529. * Licensed under the Apache License, Version 2.0 (the "License");
  15530. * you may not use this file except in compliance with the License.
  15531. * You may obtain a copy of the License at
  15532. *
  15533. * http://www.apache.org/licenses/LICENSE-2.0
  15534. *
  15535. * Unless required by applicable law or agreed to in writing, software
  15536. * distributed under the License is distributed on an "AS IS" BASIS,
  15537. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15538. * See the License for the specific language governing permissions and
  15539. * limitations under the License.
  15540. *
  15541. * portal.js
  15542. * Keypoint Portals
  15543. */
  15544. /**
  15545. * A sink of a Keypoint Portal
  15546. * This is not a pipeline sink - it doesn't export any data!
  15547. */
  15548. class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode {
  15549. /**
  15550. * Constructor
  15551. * @param {string} [name] name of the node
  15552. */
  15553. constructor(name = undefined) {
  15554. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15555. /** @type {number} descriptor size, in bytes */
  15556. this._descriptorSize = 0;
  15557. /** @type {number} extra size, in bytes */
  15558. this._extraSize = 0;
  15559. /** @type {number} extra size */
  15560. this._encoderLength = 0;
  15561. /** @type {boolean} is this node initialized? */
  15562. this._initialized = false;
  15563. }
  15564. /**
  15565. * Encoded keypoints
  15566. * @returns {SpeedyTexture}
  15567. */
  15568. get encodedKeypoints() {
  15569. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15570. return this._tex[0];
  15571. }
  15572. /**
  15573. * Descriptor size, in bytes
  15574. * @returns {number}
  15575. */
  15576. get descriptorSize() {
  15577. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15578. return this._descriptorSize;
  15579. }
  15580. /**
  15581. * Extra size, in bytes
  15582. * @returns {number}
  15583. */
  15584. get extraSize() {
  15585. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15586. return this._extraSize;
  15587. }
  15588. /**
  15589. * Encoder length
  15590. * @returns {number}
  15591. */
  15592. get encoderLength() {
  15593. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15594. return this._encoderLength;
  15595. }
  15596. /**
  15597. * Initializes this node
  15598. * @param {SpeedyGPU} gpu
  15599. */
  15600. init(gpu) {
  15601. super.init(gpu);
  15602. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
  15603. this._tex[0].resize(encoderLength, encoderLength).clearToColor(1, 1, 1, 1); // initial texture
  15604. this._descriptorSize = this._extraSize = 0;
  15605. this._encoderLength = encoderLength;
  15606. this._initialized = true;
  15607. }
  15608. /**
  15609. * Releases this node
  15610. * @param {SpeedyGPU} gpu
  15611. */
  15612. release(gpu) {
  15613. this._initialized = false;
  15614. super.release(gpu);
  15615. }
  15616. /**
  15617. * Run the specific task of this node
  15618. * @param {SpeedyGPU} gpu
  15619. * @returns {void|SpeedyPromise<void>}
  15620. */
  15621. _run(gpu) {
  15622. const {
  15623. encodedKeypoints,
  15624. descriptorSize,
  15625. extraSize,
  15626. encoderLength
  15627. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  15628. const tex = this._tex[0];
  15629. // copy input
  15630. tex.resize(encodedKeypoints.width, encodedKeypoints.height);
  15631. encodedKeypoints.copyTo(tex);
  15632. this._descriptorSize = descriptorSize;
  15633. this._extraSize = extraSize;
  15634. this._encoderLength = encoderLength;
  15635. }
  15636. }
  15637. /**
  15638. * A source of a Keypoint Portal
  15639. */
  15640. class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode {
  15641. /**
  15642. * Constructor
  15643. * @param {string} [name] name of the node
  15644. */
  15645. constructor(name = undefined) {
  15646. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15647. /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
  15648. this._source = null;
  15649. }
  15650. /**
  15651. * Data source
  15652. * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
  15653. */
  15654. get source() {
  15655. return this._source;
  15656. }
  15657. /**
  15658. * Data source
  15659. * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
  15660. */
  15661. set source(node) {
  15662. if (node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  15663. this._source = node;
  15664. }
  15665. /**
  15666. * Run the specific task of this node
  15667. * @param {SpeedyGPU} gpu
  15668. * @returns {void|SpeedyPromise<void>}
  15669. */
  15670. _run(gpu) {
  15671. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  15672. this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
  15673. }
  15674. }
  15675. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
  15676. /*
  15677. * speedy-vision.js
  15678. * GPU-accelerated Computer Vision for JavaScript
  15679. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15680. *
  15681. * Licensed under the Apache License, Version 2.0 (the "License");
  15682. * you may not use this file except in compliance with the License.
  15683. * You may obtain a copy of the License at
  15684. *
  15685. * http://www.apache.org/licenses/LICENSE-2.0
  15686. *
  15687. * Unless required by applicable law or agreed to in writing, software
  15688. * distributed under the License is distributed on an "AS IS" BASIS,
  15689. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15690. * See the License for the specific language governing permissions and
  15691. * limitations under the License.
  15692. *
  15693. * keypoint-factory.js
  15694. * Keypoint-related nodes
  15695. */
  15696. /**
  15697. * Keypoint detectors
  15698. */
  15699. class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15700. /**
  15701. * FAST corner detector
  15702. * @param {string} [name]
  15703. * @returns {SpeedyPipelineNodeFASTKeypointDetector}
  15704. */
  15705. static FAST(name = undefined) {
  15706. return new SpeedyPipelineNodeFASTKeypointDetector(name);
  15707. }
  15708. /**
  15709. * Harris corner detector
  15710. * @param {string} [name]
  15711. * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
  15712. */
  15713. static Harris(name = undefined) {
  15714. return new SpeedyPipelineNodeHarrisKeypointDetector(name);
  15715. }
  15716. }
  15717. /**
  15718. * Keypoint descriptors
  15719. */
  15720. class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15721. /**
  15722. * ORB descriptors
  15723. * @param {string} [name]
  15724. * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
  15725. */
  15726. static ORB(name = undefined) {
  15727. return new SpeedyPipelineNodeORBKeypointDescriptor(name);
  15728. }
  15729. }
  15730. /**
  15731. * Keypoint trackers
  15732. */
  15733. class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15734. /**
  15735. * LK optical-flow
  15736. * @param {string} [name]
  15737. * @returns {SpeedyPipelineNodeLKKeypointTracker}
  15738. */
  15739. static LK(name = undefined) {
  15740. return new SpeedyPipelineNodeLKKeypointTracker(name);
  15741. }
  15742. }
  15743. /**
  15744. * Keypoint matchers
  15745. */
  15746. class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15747. /**
  15748. * Static LSH tables
  15749. * @param {string} [name]
  15750. * @returns {SpeedyPipelineNodeStaticLSHTables}
  15751. */
  15752. static StaticLSHTables(name = undefined) {
  15753. return new SpeedyPipelineNodeStaticLSHTables(name);
  15754. }
  15755. /**
  15756. * LSH-based K-approximate nearest neighbors
  15757. * @param {string} [name]
  15758. * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
  15759. */
  15760. static LSHKNN(name = undefined) {
  15761. return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
  15762. }
  15763. /**
  15764. * Brute-force K-nearest neighbors keypoint matcher
  15765. * @param {string} [name]
  15766. * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
  15767. */
  15768. static BFKNN(name = undefined) {
  15769. return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
  15770. }
  15771. }
  15772. /**
  15773. * Portal nodes
  15774. */
  15775. class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15776. /**
  15777. * Create an image portal source
  15778. * @param {string} [name] name of the node
  15779. * @returns {SpeedyPipelineNodeKeypointPortalSource}
  15780. */
  15781. static Source(name = undefined) {
  15782. return new SpeedyPipelineNodeKeypointPortalSource(name);
  15783. }
  15784. /**
  15785. * Create an image portal sink
  15786. * @param {string} [name] name of the node
  15787. * @returns {SpeedyPipelineNodeKeypointPortalSink}
  15788. */
  15789. static Sink(name = undefined) {
  15790. return new SpeedyPipelineNodeKeypointPortalSink(name);
  15791. }
  15792. }
  15793. /**
  15794. * Keypoint-related nodes
  15795. */
  15796. class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15797. /**
  15798. * Keypoint detectors
  15799. * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
  15800. */
  15801. static get Detector() {
  15802. return SpeedyPipelineKeypointDetectorFactory;
  15803. }
  15804. /**
  15805. * Keypoint descriptors
  15806. * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
  15807. */
  15808. static get Descriptor() {
  15809. return SpeedyPipelineKeypointDescriptorFactory;
  15810. }
  15811. /**
  15812. * Keypoint trackers
  15813. * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
  15814. */
  15815. static get Tracker() {
  15816. return SpeedyPipelineKeypointTrackerFactory;
  15817. }
  15818. /**
  15819. * Keypoint matchers
  15820. * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
  15821. */
  15822. static get Matcher() {
  15823. return SpeedyPipelineKeypointMatcherFactory;
  15824. }
  15825. /**
  15826. * Keypoint Portals
  15827. * @returns {typeof SpeedyPipelineKeypointPortalFactory}
  15828. */
  15829. static get Portal() {
  15830. return SpeedyPipelineKeypointPortalFactory;
  15831. }
  15832. /**
  15833. * Create a keypoint source
  15834. * @param {string} [name]
  15835. * @returns {SpeedyPipelineNodeKeypointSource}
  15836. */
  15837. static Source(name = undefined) {
  15838. return new SpeedyPipelineNodeKeypointSource(name);
  15839. }
  15840. /**
  15841. * Create a keypoint sink
  15842. * @param {string} [name]
  15843. * @returns {SpeedyPipelineNodeKeypointSink}
  15844. */
  15845. static Sink(name = undefined) {
  15846. return new SpeedyPipelineNodeKeypointSink(name);
  15847. }
  15848. /**
  15849. * Create a sink of tracked keypoints
  15850. * @param {string} [name]
  15851. * @returns {SpeedyPipelineNodeTrackedKeypointSink}
  15852. */
  15853. static SinkOfTrackedKeypoints(name = undefined) {
  15854. return new SpeedyPipelineNodeTrackedKeypointSink(name);
  15855. }
  15856. /**
  15857. * Create a sink of matched keypoints
  15858. * @param {string} [name]
  15859. * @returns {SpeedyPipelineNodeMatchedKeypointSink}
  15860. */
  15861. static SinkOfMatchedKeypoints(name = undefined) {
  15862. return new SpeedyPipelineNodeMatchedKeypointSink(name);
  15863. }
  15864. /**
  15865. * Keypoint clipper
  15866. * @param {string} [name]
  15867. * @returns {SpeedyPipelineNodeKeypointClipper}
  15868. */
  15869. static Clipper(name = undefined) {
  15870. return new SpeedyPipelineNodeKeypointClipper(name);
  15871. }
  15872. /**
  15873. * Border Clipper
  15874. * @param {string} [name]
  15875. * @returns {SpeedyPipelineNodeKeypointBorderClipper}
  15876. */
  15877. static BorderClipper(name = undefined) {
  15878. return new SpeedyPipelineNodeKeypointBorderClipper(name);
  15879. }
  15880. /**
  15881. * Create a keypoint buffer
  15882. * @param {string} [name]
  15883. * @returns {SpeedyPipelineNodeKeypointBuffer}
  15884. */
  15885. static Buffer(name = undefined) {
  15886. return new SpeedyPipelineNodeKeypointBuffer(name);
  15887. }
  15888. /**
  15889. * Create a keypoint mixer
  15890. * @param {string} [name]
  15891. * @returns {SpeedyPipelineNodeKeypointMixer}
  15892. */
  15893. static Mixer(name = undefined) {
  15894. return new SpeedyPipelineNodeKeypointMixer(name);
  15895. }
  15896. /**
  15897. * Create a keypoint shuffler
  15898. * @param {string} [name]
  15899. * @returns {SpeedyPipelineNodeKeypointShuffler}
  15900. */
  15901. static Shuffler(name = undefined) {
  15902. return new SpeedyPipelineNodeKeypointShuffler(name);
  15903. }
  15904. /**
  15905. * Create a keypoint multiplexer
  15906. * @param {string} [name]
  15907. * @returns {SpeedyPipelineNodeKeypointMultiplexer}
  15908. */
  15909. static Multiplexer(name = undefined) {
  15910. return new SpeedyPipelineNodeKeypointMultiplexer(name);
  15911. }
  15912. /**
  15913. * Create a keypoint transformer
  15914. * @param {string} [name]
  15915. * @returns {SpeedyPipelineNodeKeypointTransformer}
  15916. */
  15917. static Transformer(name = undefined) {
  15918. return new SpeedyPipelineNodeKeypointTransformer(name);
  15919. }
  15920. /**
  15921. * Create a subpixel refiner of keypoint locations
  15922. * @param {string} [name]
  15923. * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
  15924. */
  15925. static SubpixelRefiner(name = undefined) {
  15926. return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
  15927. }
  15928. /**
  15929. * Distance filter
  15930. * @param {string} [name]
  15931. * @returns {SpeedyPipelineNodeDistanceFilter}
  15932. */
  15933. static DistanceFilter(name = undefined) {
  15934. return new SpeedyPipelineNodeKeypointDistanceFilter(name);
  15935. }
  15936. /**
  15937. * Hamming distance filter
  15938. * @param {string} [name]
  15939. * @returns {SpeedyPipelineNodeHammingDistanceFilter}
  15940. */
  15941. static HammingDistanceFilter(name = undefined) {
  15942. return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
  15943. }
  15944. }
  15945. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
  15946. /*
  15947. * speedy-vision.js
  15948. * GPU-accelerated Computer Vision for JavaScript
  15949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15950. *
  15951. * Licensed under the Apache License, Version 2.0 (the "License");
  15952. * you may not use this file except in compliance with the License.
  15953. * You may obtain a copy of the License at
  15954. *
  15955. * http://www.apache.org/licenses/LICENSE-2.0
  15956. *
  15957. * Unless required by applicable law or agreed to in writing, software
  15958. * distributed under the License is distributed on an "AS IS" BASIS,
  15959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15960. * See the License for the specific language governing permissions and
  15961. * limitations under the License.
  15962. *
  15963. * sink.js
  15964. * Gets keypoints out of the pipeline
  15965. */
  15966. // next power of 2
  15967. const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  15968. /**
  15969. * Gets 2D vectors out of the pipeline
  15970. */
  15971. class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode {
  15972. /**
  15973. * Constructor
  15974. * @param {string} [name] name of the node
  15975. */
  15976. constructor(name = 'vec2') {
  15977. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Vector2)]);
  15978. /** @type {SpeedyVector2[]} 2D vectors (output) */
  15979. this._vectors = [];
  15980. /** @type {SpeedyTextureReader} texture reader */
  15981. this._textureReader = new SpeedyTextureReader();
  15982. /** @type {number} page flipping index */
  15983. this._page = 0;
  15984. /** @type {boolean} accelerate GPU-CPU transfers */
  15985. this._turbo = false;
  15986. }
  15987. /**
  15988. * Accelerate GPU-CPU transfers
  15989. * @returns {boolean}
  15990. */
  15991. get turbo() {
  15992. return this._turbo;
  15993. }
  15994. /**
  15995. * Accelerate GPU-CPU transfers
  15996. * @param {boolean} value
  15997. */
  15998. set turbo(value) {
  15999. this._turbo = Boolean(value);
  16000. }
  16001. /**
  16002. * Initializes this node
  16003. * @param {SpeedyGPU} gpu
  16004. */
  16005. init(gpu) {
  16006. super.init(gpu);
  16007. this._textureReader.init(gpu);
  16008. }
  16009. /**
  16010. * Releases this node
  16011. * @param {SpeedyGPU} gpu
  16012. */
  16013. release(gpu) {
  16014. this._textureReader.release(gpu);
  16015. super.release(gpu);
  16016. }
  16017. /**
  16018. * Export data from this node to the user
  16019. * @returns {SpeedyPromise<SpeedyVector2[]>}
  16020. */
  16021. export() {
  16022. return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
  16023. }
  16024. /**
  16025. * Run the specific task of this node
  16026. * @param {SpeedyGPU} gpu
  16027. * @returns {void|SpeedyPromise<void>}
  16028. */
  16029. _run(gpu) {
  16030. const {
  16031. vectors
  16032. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input().read();
  16033. const useBufferedDownloads = this._turbo;
  16034. const encoderLength = vectors.width;
  16035. /*
  16036. I have found experimentally that, in Firefox, readPixelsAsync()
  16037. performs MUCH better if the width of the target texture is a power
  16038. of two. I have no idea why this is the case, nor if it's related to
  16039. some interaction with the GL drivers, somehow. This seems to make no
  16040. difference on Chrome, however. In any case, let's convert the input
  16041. texture to POT.
  16042. */
  16043. const encoderWidth = vector2_sink_nextPot(encoderLength);
  16044. const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  16045. //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
  16046. // copy the set of vectors to an internal texture
  16047. const copiedTexture = this._tex[this._page];
  16048. gpu.programs.utils.copy2DVectors.outputs(encoderWidth, encoderHeight, copiedTexture)(vectors);
  16049. // flip page
  16050. this._page = 1 - this._page;
  16051. // download the internal texture
  16052. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  16053. this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
  16054. });
  16055. }
  16056. /**
  16057. * Decode a sequence of vectors, given a flattened image of encoded pixels
  16058. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  16059. * @param {number} encoderWidth
  16060. * @param {number} encoderHeight
  16061. * @returns {SpeedyVector2[]} vectors
  16062. */
  16063. static _decode(pixels, encoderWidth, encoderHeight) {
  16064. const bytesPerVector = 4; // 1 pixel per vector
  16065. const vectors = [];
  16066. let hi = 0,
  16067. lo = 0;
  16068. let x = 0,
  16069. y = 0;
  16070. // how many bytes should we read?
  16071. const e2 = encoderWidth * encoderHeight * bytesPerVector;
  16072. const size = Math.min(pixels.length, e2);
  16073. // for each encoded vector
  16074. for (let i = 0; i < size; i += bytesPerVector) {
  16075. // extract 16-bit words
  16076. lo = pixels[i + 1] << 8 | pixels[i];
  16077. hi = pixels[i + 3] << 8 | pixels[i + 2];
  16078. // the vector is "null": we have reached the end of the list
  16079. if (lo == 0xFFFF && hi == 0xFFFF) break;
  16080. // the vector must be discarded
  16081. if (lo == 0xFF00 && hi == 0xFF00) continue;
  16082. // decode floats
  16083. x = utils/* Utils */.A.decodeFloat16(lo);
  16084. y = utils/* Utils */.A.decodeFloat16(hi);
  16085. // register vector
  16086. vectors.push(new SpeedyVector2(x, y));
  16087. }
  16088. // done!
  16089. return vectors;
  16090. }
  16091. }
  16092. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
  16093. /*
  16094. * speedy-vision.js
  16095. * GPU-accelerated Computer Vision for JavaScript
  16096. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16097. *
  16098. * Licensed under the Apache License, Version 2.0 (the "License");
  16099. * you may not use this file except in compliance with the License.
  16100. * You may obtain a copy of the License at
  16101. *
  16102. * http://www.apache.org/licenses/LICENSE-2.0
  16103. *
  16104. * Unless required by applicable law or agreed to in writing, software
  16105. * distributed under the License is distributed on an "AS IS" BASIS,
  16106. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16107. * See the License for the specific language governing permissions and
  16108. * limitations under the License.
  16109. *
  16110. * vector2-factory.js
  16111. * 2D vectors
  16112. */
  16113. /**
  16114. * 2D vectors
  16115. */
  16116. class SpeedyPipelineVector2Factory extends Function {
  16117. /**
  16118. * Constructor
  16119. */
  16120. constructor() {
  16121. // This factory can be invoked as a function
  16122. super('...args', 'return this._create(...args)');
  16123. return this.bind(this);
  16124. }
  16125. /**
  16126. * @private
  16127. *
  16128. * Create a 2D vector
  16129. * @param {number} x x-coordinate
  16130. * @param {number} y y-coordinate
  16131. * @returns {SpeedyVector2}
  16132. */
  16133. _create(x, y) {
  16134. return new SpeedyVector2(x, y);
  16135. }
  16136. /**
  16137. * Create a Vector2 sink
  16138. * @param {string} [name]
  16139. * @returns {SpeedyPipelineNodeVector2Sink}
  16140. */
  16141. Sink(name = undefined) {
  16142. return new SpeedyPipelineNodeVector2Sink(name);
  16143. }
  16144. }
  16145. ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
  16146. /*
  16147. * speedy-vision.js
  16148. * GPU-accelerated Computer Vision for JavaScript
  16149. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16150. *
  16151. * Licensed under the Apache License, Version 2.0 (the "License");
  16152. * you may not use this file except in compliance with the License.
  16153. * You may obtain a copy of the License at
  16154. *
  16155. * http://www.apache.org/licenses/LICENSE-2.0
  16156. *
  16157. * Unless required by applicable law or agreed to in writing, software
  16158. * distributed under the License is distributed on an "AS IS" BASIS,
  16159. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16160. * See the License for the specific language governing permissions and
  16161. * limitations under the License.
  16162. *
  16163. * fps-counter.js
  16164. * A FPS counter
  16165. */
  16166. /** @const {number} update interval in milliseconds */
  16167. const UPDATE_INTERVAL = 500;
  16168. /** @type {FPSCounter|null} Singleton */
  16169. let instance = null;
  16170. /**
  16171. * FPS counter
  16172. */
  16173. class FPSCounter {
  16174. /**
  16175. * Creates a new FPSCounter
  16176. * @private
  16177. */
  16178. constructor() {
  16179. /** @type {number} current FPS rate */
  16180. this._fps = 60;
  16181. /** @type {number} frame counter */
  16182. this._frames = 0;
  16183. /** @type {number} update interval in milliseconds */
  16184. this._updateInterval = UPDATE_INTERVAL;
  16185. /** @type {number} time of the last update */
  16186. this._lastUpdate = performance.now();
  16187. /** @type {function(): void} bound update function */
  16188. this._boundUpdate = this._update.bind(this);
  16189. // this should never happen...
  16190. if (instance !== null) throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
  16191. // start FPS counter
  16192. this._boundUpdate();
  16193. }
  16194. /**
  16195. * Gets an instance of the FPS counter.
  16196. * We use lazy loading, i.e., we will not
  16197. * create a FPS counter unless we need to!
  16198. * @returns {FPSCounter}
  16199. */
  16200. static get instance() {
  16201. if (instance === null) instance = new FPSCounter();
  16202. return instance;
  16203. }
  16204. /**
  16205. * Get the FPS rate
  16206. * @returns {number} frames per second
  16207. */
  16208. get fps() {
  16209. return this._fps;
  16210. }
  16211. /**
  16212. * Updates the FPS counter
  16213. */
  16214. _update() {
  16215. const now = performance.now();
  16216. const deltaTime = now - this._lastUpdate;
  16217. if (deltaTime >= this._updateInterval) {
  16218. this._fps = Math.round(this._frames / (deltaTime * 0.001));
  16219. this._frames = 0;
  16220. this._lastUpdate = now;
  16221. }
  16222. this._frames++;
  16223. requestAnimationFrame(this._boundUpdate);
  16224. }
  16225. }
  16226. ;// CONCATENATED MODULE: ./src/main.js
  16227. /*
  16228. * speedy-vision.js
  16229. * GPU-accelerated Computer Vision for JavaScript
  16230. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16231. *
  16232. * Licensed under the Apache License, Version 2.0 (the "License");
  16233. * you may not use this file except in compliance with the License.
  16234. * You may obtain a copy of the License at
  16235. *
  16236. * http://www.apache.org/licenses/LICENSE-2.0
  16237. *
  16238. * Unless required by applicable law or agreed to in writing, software
  16239. * distributed under the License is distributed on an "AS IS" BASIS,
  16240. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16241. * See the License for the specific language governing permissions and
  16242. * limitations under the License.
  16243. *
  16244. * main.js
  16245. * The entry point of the library
  16246. */
  16247. /* eslint-disable no-undef */
  16248. /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  16249. /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
  16250. /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
  16251. /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  16252. // Constants
  16253. /** @type {SpeedyMatrixFactory} */
  16254. const matrixFactory = new SpeedyMatrixFactory();
  16255. /** @type {SpeedyPipelineVector2Factory} */
  16256. const vector2Factory = new SpeedyPipelineVector2Factory();
  16257. /**
  16258. * GPU-accelerated Computer Vision for JavaScript
  16259. */
  16260. class Speedy {
  16261. /**
  16262. * The version of the library
  16263. * @returns {string}
  16264. */
  16265. static get version() {
  16266. if (false) {}else return "0.9.1";
  16267. }
  16268. /**
  16269. * Checks if Speedy can be executed in this machine & browser
  16270. * @returns {boolean}
  16271. */
  16272. static isSupported() {
  16273. return typeof WebAssembly !== 'undefined' && typeof WebGL2RenderingContext !== 'undefined' && speedy_gl/* SpeedyGL */.c.instance.gl != null;
  16274. }
  16275. /**
  16276. * Global settings
  16277. * @returns {typeof Settings}
  16278. */
  16279. static get Settings() {
  16280. return settings/* Settings */.w;
  16281. }
  16282. /**
  16283. * Create a 2D vector
  16284. * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
  16285. */
  16286. static get Vector2() {
  16287. return vector2Factory;
  16288. }
  16289. /**
  16290. * Create a 2D point
  16291. * @param {number} x
  16292. * @param {number} y
  16293. * @returns {SpeedyPoint2}
  16294. */
  16295. static Point2(x, y) {
  16296. return new SpeedyPoint2(x, y);
  16297. }
  16298. /**
  16299. * Create a new size object
  16300. * @param {number} width
  16301. * @param {number} height
  16302. * @returns {SpeedySize}
  16303. */
  16304. static Size(width, height) {
  16305. return new SpeedySize(width, height);
  16306. }
  16307. /**
  16308. * Create a Matrix (entries are given in column-major format)
  16309. * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
  16310. */
  16311. static get Matrix() {
  16312. return matrixFactory;
  16313. }
  16314. /**
  16315. * Speedy Promises
  16316. * @returns {typeof SpeedyPromise}
  16317. */
  16318. static get Promise() {
  16319. return speedy_promise/* SpeedyPromise */.i;
  16320. }
  16321. /**
  16322. * Create a new Pipeline
  16323. * @returns {SpeedyPipeline}
  16324. */
  16325. static Pipeline() {
  16326. return new SpeedyPipeline();
  16327. }
  16328. /**
  16329. * Image-related nodes
  16330. * @returns {typeof SpeedyPipelineImageFactory}
  16331. */
  16332. static get Image() {
  16333. return SpeedyPipelineImageFactory;
  16334. }
  16335. /**
  16336. * Image filters
  16337. * @returns {typeof SpeedyPipelineFilterFactory}
  16338. */
  16339. static get Filter() {
  16340. return SpeedyPipelineFilterFactory;
  16341. }
  16342. /**
  16343. * Image transforms
  16344. * @returns {typeof SpeedyPipelineTransformFactory}
  16345. */
  16346. static get Transform() {
  16347. return SpeedyPipelineTransformFactory;
  16348. }
  16349. /**
  16350. * Keypoint-related nodes
  16351. * @returns {typeof SpeedyPipelineKeypointFactory}
  16352. */
  16353. static get Keypoint() {
  16354. return SpeedyPipelineKeypointFactory;
  16355. }
  16356. /**
  16357. * Loads a SpeedyMedia object based on the provided source element
  16358. * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
  16359. * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
  16360. * @returns {SpeedyPromise<SpeedyMedia>}
  16361. */
  16362. static load(sourceElement, options = {}) {
  16363. return SpeedyMedia.load(sourceElement, options);
  16364. }
  16365. /**
  16366. * Loads a camera stream
  16367. * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
  16368. * @param {number} [height] height of the stream
  16369. * @returns {SpeedyPromise<SpeedyMedia>}
  16370. */
  16371. static camera(widthOrConstraints = 640, height = 360) {
  16372. const constraints = typeof widthOrConstraints === 'object' ? widthOrConstraints : {
  16373. audio: false,
  16374. video: {
  16375. width: widthOrConstraints | 0,
  16376. height: height | 0
  16377. }
  16378. };
  16379. return utils/* Utils */.A.requestCameraStream(constraints).then(video => SpeedyMedia.load(video));
  16380. }
  16381. /**
  16382. * Utilities to query information about the graphics driver
  16383. * @returns {typeof SpeedyPlatform}
  16384. */
  16385. static get Platform() {
  16386. return SpeedyPlatform;
  16387. }
  16388. /**
  16389. * The FPS rate
  16390. * @returns {number} Frames per second (FPS)
  16391. */
  16392. static get fps() {
  16393. return FPSCounter.instance.fps;
  16394. }
  16395. }
  16396. // Freeze the namespace
  16397. Object.freeze(Speedy);
  16398. // Display a notice
  16399. utils/* Utils */.A.log(`Speedy Vision version ${Speedy.version}. ` + `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` + "https://github.com/alemart/speedy-vision");
  16400. // Big-endian machine? Currently untested.
  16401. if (!globals.LITTLE_ENDIAN) utils/* Utils */.A.warning('Running on a big-endian machine');
  16402. })();
  16403. __nested_webpack_exports__ = __nested_webpack_exports__["default"];
  16404. /******/ return __nested_webpack_exports__;
  16405. /******/ })()
  16406. ;
  16407. });
  16408. /***/ })
  16409. /******/ });
  16410. /************************************************************************/
  16411. /******/ // The module cache
  16412. /******/ var __webpack_module_cache__ = {};
  16413. /******/
  16414. /******/ // The require function
  16415. /******/ function __webpack_require__(moduleId) {
  16416. /******/ // Check if module is in cache
  16417. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  16418. /******/ if (cachedModule !== undefined) {
  16419. /******/ return cachedModule.exports;
  16420. /******/ }
  16421. /******/ // Create a new module (and put it into the cache)
  16422. /******/ var module = __webpack_module_cache__[moduleId] = {
  16423. /******/ // no module.id needed
  16424. /******/ // no module.loaded needed
  16425. /******/ exports: {}
  16426. /******/ };
  16427. /******/
  16428. /******/ // Execute the module function
  16429. /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
  16430. /******/
  16431. /******/ // Return the exports of the module
  16432. /******/ return module.exports;
  16433. /******/ }
  16434. /******/
  16435. /************************************************************************/
  16436. /******/ /* webpack/runtime/compat get default export */
  16437. /******/ (() => {
  16438. /******/ // getDefaultExport function for compatibility with non-harmony modules
  16439. /******/ __webpack_require__.n = (module) => {
  16440. /******/ var getter = module && module.__esModule ?
  16441. /******/ () => (module['default']) :
  16442. /******/ () => (module);
  16443. /******/ __webpack_require__.d(getter, { a: getter });
  16444. /******/ return getter;
  16445. /******/ };
  16446. /******/ })();
  16447. /******/
  16448. /******/ /* webpack/runtime/define property getters */
  16449. /******/ (() => {
  16450. /******/ // define getter functions for harmony exports
  16451. /******/ __webpack_require__.d = (exports, definition) => {
  16452. /******/ for(var key in definition) {
  16453. /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
  16454. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  16455. /******/ }
  16456. /******/ }
  16457. /******/ };
  16458. /******/ })();
  16459. /******/
  16460. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  16461. /******/ (() => {
  16462. /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  16463. /******/ })();
  16464. /******/
  16465. /************************************************************************/
  16466. var __webpack_exports__ = {};
  16467. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  16468. (() => {
  16469. "use strict";
  16470. // EXPORTS
  16471. __webpack_require__.d(__webpack_exports__, {
  16472. "default": () => (/* binding */ Martins)
  16473. });
  16474. // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
  16475. var speedy_vision = __webpack_require__(774);
  16476. var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
  16477. ;// CONCATENATED MODULE: ./src/utils/errors.ts
  16478. /*
  16479. * MARTINS.js
  16480. * GPU-accelerated Augmented Reality for the web
  16481. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16482. *
  16483. * This program is free software: you can redistribute it and/or modify
  16484. * it under the terms of the GNU Lesser General Public License as published
  16485. * by the Free Software Foundation, either version 3 of the License, or
  16486. * (at your option) any later version.
  16487. *
  16488. * This program is distributed in the hope that it will be useful,
  16489. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16490. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16491. * GNU Lesser General Public License for more details.
  16492. *
  16493. * You should have received a copy of the GNU Lesser General Public License
  16494. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16495. *
  16496. * errors.ts
  16497. * Error classes
  16498. */
  16499. /**
  16500. * Generic error class
  16501. */
  16502. class MartinsError extends Error {
  16503. /**
  16504. * Constructor
  16505. * @param message error message
  16506. * @param cause optional error cause
  16507. */
  16508. constructor(message = '', cause = null) {
  16509. super([
  16510. message,
  16511. cause ? cause.toString() : '[martins-js]'
  16512. ].join('\n-> '));
  16513. this.cause = cause;
  16514. }
  16515. /**
  16516. * Error name
  16517. */
  16518. get name() {
  16519. return this.constructor.name;
  16520. }
  16521. }
  16522. /**
  16523. * A method has received one or more illegal arguments
  16524. */
  16525. class IllegalArgumentError extends MartinsError {
  16526. }
  16527. /**
  16528. * The method arguments are valid, but the method can't be called due to the
  16529. * current state of the object
  16530. */
  16531. class IllegalOperationError extends MartinsError {
  16532. }
  16533. /**
  16534. * The requested operation is not supported
  16535. */
  16536. class NotSupportedError extends MartinsError {
  16537. }
  16538. /**
  16539. * Access denied
  16540. */
  16541. class AccessDeniedError extends MartinsError {
  16542. }
  16543. /**
  16544. * Timeout
  16545. */
  16546. class TimeoutError extends MartinsError {
  16547. }
  16548. /**
  16549. * Assertion error
  16550. */
  16551. class AssertionError extends MartinsError {
  16552. }
  16553. /**
  16554. * Tracking error
  16555. */
  16556. class TrackingError extends MartinsError {
  16557. }
  16558. /**
  16559. * Detection error
  16560. */
  16561. class DetectionError extends MartinsError {
  16562. }
  16563. /**
  16564. * Training error
  16565. */
  16566. class TrainingError extends MartinsError {
  16567. }
  16568. ;// CONCATENATED MODULE: ./src/core/resolution.ts
  16569. /*
  16570. * MARTINS.js
  16571. * GPU-accelerated Augmented Reality for the web
  16572. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16573. *
  16574. * This program is free software: you can redistribute it and/or modify
  16575. * it under the terms of the GNU Lesser General Public License as published
  16576. * by the Free Software Foundation, either version 3 of the License, or
  16577. * (at your option) any later version.
  16578. *
  16579. * This program is distributed in the hope that it will be useful,
  16580. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16581. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16582. * GNU Lesser General Public License for more details.
  16583. *
  16584. * You should have received a copy of the GNU Lesser General Public License
  16585. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16586. *
  16587. * resolution.ts
  16588. * Resolution utilities
  16589. */
  16590. /** Reference heights when in landscape mode, measured in pixels */
  16591. const REFERENCE_HEIGHT = {
  16592. 'xs': 120,
  16593. 'xs+': 160,
  16594. 'sm': 200,
  16595. 'sm+': 240,
  16596. 'md': 320,
  16597. 'md+': 360,
  16598. 'lg': 480,
  16599. 'lg+': 600,
  16600. };
  16601. /**
  16602. * Convert a resolution type to a (width, height) pair
  16603. * @param resolution resolution type
  16604. * @param aspectRatio desired width / height ratio
  16605. * @returns size in pixels
  16606. */
  16607. function computeResolution(resolution, aspectRatio) {
  16608. const referenceHeight = REFERENCE_HEIGHT[resolution];
  16609. let width = 0, height = 0;
  16610. if (aspectRatio >= 1) {
  16611. // landscape
  16612. height = referenceHeight;
  16613. width = Math.round(height * aspectRatio);
  16614. width -= width % 2;
  16615. }
  16616. else {
  16617. // portrait
  16618. width = referenceHeight;
  16619. height = Math.round(width / aspectRatio);
  16620. height -= height % 2;
  16621. }
  16622. return speedy_vision_default().Size(width, height);
  16623. }
  16624. ;// CONCATENATED MODULE: ./src/utils/utils.ts
  16625. /*
  16626. * MARTINS.js
  16627. * GPU-accelerated Augmented Reality for the web
  16628. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16629. *
  16630. * This program is free software: you can redistribute it and/or modify
  16631. * it under the terms of the GNU Lesser General Public License as published
  16632. * by the Free Software Foundation, either version 3 of the License, or
  16633. * (at your option) any later version.
  16634. *
  16635. * This program is distributed in the hope that it will be useful,
  16636. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16637. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16638. * GNU Lesser General Public License for more details.
  16639. *
  16640. * You should have received a copy of the GNU Lesser General Public License
  16641. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16642. *
  16643. * utils.ts
  16644. * Generic utilities
  16645. */
  16646. /**
  16647. * Generic utilities
  16648. */
  16649. class Utils {
  16650. /**
  16651. * Log a message
  16652. * @param message
  16653. * @param args optional additional messages
  16654. */
  16655. static log(message, ...args) {
  16656. console.log('[martins-js]', message, ...args);
  16657. }
  16658. /**
  16659. * Display a warning
  16660. * @param message
  16661. * @param args optional additional messages
  16662. */
  16663. static warning(message, ...args) {
  16664. console.warn('[martins-js]', message, ...args);
  16665. }
  16666. /**
  16667. * Display an error message
  16668. * @param message
  16669. * @param args optional additional messages
  16670. */
  16671. static error(message, ...args) {
  16672. console.error('[martins-js]', message, ...args);
  16673. }
  16674. /**
  16675. * Assertion
  16676. * @param expr expression
  16677. * @param errorMessage optional error message
  16678. * @throws {AssertionError}
  16679. */
  16680. static assert(expr, errorMessage = '') {
  16681. if (!expr)
  16682. throw new AssertionError(errorMessage);
  16683. }
  16684. /**
  16685. * Returns a range [0, 1, ..., n-1]
  16686. * @param n non-negative integer
  16687. * @returns range from 0 to n-1, inclusive
  16688. */
  16689. static range(n) {
  16690. if ((n |= 0) < 0)
  16691. throw new IllegalArgumentError();
  16692. return Array.from({ length: n }, (_, i) => i);
  16693. }
  16694. /**
  16695. * Convert a resolution type to a resolution measured in pixels
  16696. * @param resolution resolution type
  16697. * @param aspectRatio width / height ratio
  16698. * @returns resolution measured in pixels
  16699. */
  16700. static resolution(resolution, aspectRatio) {
  16701. return computeResolution(resolution, aspectRatio);
  16702. }
  16703. /**
  16704. * Returns a string containing platform brand information
  16705. * @returns platform brand information
  16706. */
  16707. static platformString() {
  16708. return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
  16709. navigator.userAgentData.platform : // use only low entropy data
  16710. navigator.platform // navigator.platform is deprecated
  16711. )(navigator);
  16712. }
  16713. /**
  16714. * Checks if we're on iOS
  16715. * @returns true if we're on iOS
  16716. */
  16717. static isIOS() {
  16718. // at the time of this writing, navigator.userAgentData is not yet
  16719. // compatible with Safari. navigator.platform is deprecated, but
  16720. // predictable.
  16721. //if(/(iOS|iPhone|iPad|iPod)/i.test(Utils.platformString()))
  16722. if (/(iOS|iPhone|iPad|iPod)/i.test(navigator.platform))
  16723. return true;
  16724. if (/Mac/i.test(navigator.platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
  16725. return navigator.maxTouchPoints > 2;
  16726. return false;
  16727. }
  16728. /**
  16729. * Checks if we're on a WebKit-based browser
  16730. * @returns true if we're on a WebKit-based browser
  16731. */
  16732. static isWebKit() {
  16733. // note: navigator.vendor is deprecated.
  16734. if (/Apple/.test(navigator.vendor))
  16735. return true;
  16736. // Can a non WebKit-based browser pass this test?
  16737. // Test masked GL_RENDERER == "Apple GPU" (valid since Feb 2020)
  16738. // https://bugs.webkit.org/show_bug.cgi?id=207608
  16739. /*if(Speedy.Platform.renderer == 'Apple GPU' && Speedy.Platform.vendor == 'Apple Inc.')
  16740. return true;*/
  16741. // Desktop and Mobile Safari, Epiphany on Linux
  16742. if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
  16743. return true;
  16744. // Chrome, Firefox, Edge on iOS
  16745. if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
  16746. return true;
  16747. // not WebKit
  16748. return false;
  16749. }
  16750. /**
  16751. * Device-specific information for debugging purposes
  16752. */
  16753. static deviceInfo() {
  16754. return 'Device info: ' + JSON.stringify({
  16755. isIOS: Utils.isIOS(),
  16756. isWebKit: Utils.isWebKit(),
  16757. renderer: (speedy_vision_default()).Platform.renderer,
  16758. vendor: (speedy_vision_default()).Platform.vendor,
  16759. screen: [screen.width, screen.height].join('x'),
  16760. platform: [navigator.platform, navigator.vendor].join('; '),
  16761. userAgent: navigator.userAgent,
  16762. userAgentData: navigator.userAgentData || null,
  16763. }, null, 2);
  16764. }
  16765. }
  16766. ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
  16767. /*
  16768. * MARTINS.js
  16769. * GPU-accelerated Augmented Reality for the web
  16770. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16771. *
  16772. * This program is free software: you can redistribute it and/or modify
  16773. * it under the terms of the GNU Lesser General Public License as published
  16774. * by the Free Software Foundation, either version 3 of the License, or
  16775. * (at your option) any later version.
  16776. *
  16777. * This program is distributed in the hope that it will be useful,
  16778. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16779. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16780. * GNU Lesser General Public License for more details.
  16781. *
  16782. * You should have received a copy of the GNU Lesser General Public License
  16783. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16784. *
  16785. * ar-events.ts
  16786. * AR-related Events
  16787. */
  16788. /**
  16789. * AR Event
  16790. */
  16791. class AREvent extends Event {
  16792. /**
  16793. * Constructor
  16794. * @param type event type
  16795. */
  16796. constructor(type) {
  16797. super(type);
  16798. }
  16799. /**
  16800. * Event type
  16801. */
  16802. get type() {
  16803. return super.type;
  16804. }
  16805. }
  16806. /**
  16807. * AR Event Target
  16808. */
  16809. class AREventTarget {
  16810. /**
  16811. * Constructor
  16812. */
  16813. constructor() {
  16814. this._delegate = new EventTarget();
  16815. }
  16816. /**
  16817. * Add event listener
  16818. * @param type event type
  16819. * @param callback
  16820. */
  16821. addEventListener(type, callback) {
  16822. this._delegate.addEventListener(type, callback);
  16823. }
  16824. /**
  16825. * Remove event listener
  16826. * @param type event type
  16827. * @param callback
  16828. */
  16829. removeEventListener(type, callback) {
  16830. this._delegate.removeEventListener(type, callback);
  16831. }
  16832. /**
  16833. * Synchronously trigger an event
  16834. * @param event
  16835. * @returns same value as a standard event target
  16836. * @internal
  16837. */
  16838. dispatchEvent(event) {
  16839. return this._delegate.dispatchEvent(event);
  16840. }
  16841. }
  16842. ;// CONCATENATED MODULE: ./src/core/hud.ts
  16843. /*
  16844. * MARTINS.js
  16845. * GPU-accelerated Augmented Reality for the web
  16846. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16847. *
  16848. * This program is free software: you can redistribute it and/or modify
  16849. * it under the terms of the GNU Lesser General Public License as published
  16850. * by the Free Software Foundation, either version 3 of the License, or
  16851. * (at your option) any later version.
  16852. *
  16853. * This program is distributed in the hope that it will be useful,
  16854. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16855. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16856. * GNU Lesser General Public License for more details.
  16857. *
  16858. * You should have received a copy of the GNU Lesser General Public License
  16859. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16860. *
  16861. * hud.ts
  16862. * Heads Up Display
  16863. */
  16864. /**
  16865. * Heads Up Display: an overlay displayed in front of the augmented scene
  16866. */
  16867. class HUD {
  16868. /**
  16869. * Constructor
  16870. * @param parent parent of the hud container
  16871. * @param hudContainer an existing hud container (optional)
  16872. */
  16873. constructor(parent, hudContainer) {
  16874. this._container = hudContainer || this._createContainer(parent);
  16875. this._ownContainer = (hudContainer == null);
  16876. // validate
  16877. if (this._container.parentElement !== parent)
  16878. throw new IllegalArgumentError('The container of the HUD must be a direct child of the container of the viewport');
  16879. // the HUD should be hidden initially
  16880. if (!this._container.hidden)
  16881. Utils.warning(`The container of the HUD should have the hidden attribute`);
  16882. }
  16883. /**
  16884. * The container of the HUD
  16885. */
  16886. get container() {
  16887. return this._container;
  16888. }
  16889. /**
  16890. * Whether or not the HUD is visible
  16891. */
  16892. get visible() {
  16893. return !this._container.hidden;
  16894. }
  16895. /**
  16896. * Whether or not the HUD is visible
  16897. */
  16898. set visible(visible) {
  16899. this._container.hidden = !visible;
  16900. }
  16901. /**
  16902. * Initialize the HUD
  16903. * @param zIndex the z-index of the container
  16904. * @internal
  16905. */
  16906. _init(zIndex) {
  16907. const container = this._container;
  16908. container.style.position = 'absolute';
  16909. container.style.left = container.style.top = '0px';
  16910. container.style.right = container.style.bottom = '0px';
  16911. container.style.padding = container.style.margin = '0px';
  16912. container.style.zIndex = String(zIndex);
  16913. container.style.userSelect = 'none';
  16914. }
  16915. /**
  16916. * Release the HUD
  16917. * @internal
  16918. */
  16919. _release() {
  16920. if (this._ownContainer) {
  16921. this._ownContainer = false;
  16922. this._container.remove();
  16923. }
  16924. }
  16925. /**
  16926. * Create a HUD container as an immediate child of the input node
  16927. * @param parent parent container
  16928. * @returns HUD container
  16929. */
  16930. _createContainer(parent) {
  16931. const node = document.createElement('div');
  16932. node.hidden = true;
  16933. parent.appendChild(node);
  16934. return node;
  16935. }
  16936. }
  16937. ;// CONCATENATED MODULE: ./src/core/viewport.ts
  16938. /*
  16939. * MARTINS.js
  16940. * GPU-accelerated Augmented Reality for the web
  16941. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16942. *
  16943. * This program is free software: you can redistribute it and/or modify
  16944. * it under the terms of the GNU Lesser General Public License as published
  16945. * by the Free Software Foundation, either version 3 of the License, or
  16946. * (at your option) any later version.
  16947. *
  16948. * This program is distributed in the hope that it will be useful,
  16949. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16950. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16951. * GNU Lesser General Public License for more details.
  16952. *
  16953. * You should have received a copy of the GNU Lesser General Public License
  16954. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16955. *
  16956. * viewport.ts
  16957. * Viewport
  16958. */
  16959. /** An event emitted by a Viewport */
  16960. class ViewportEvent extends AREvent {
  16961. }
  16962. /** Default viewport constructor settings */
  16963. const DEFAULT_VIEWPORT_SETTINGS = {
  16964. container: null,
  16965. hudContainer: null,
  16966. resolution: 'lg',
  16967. canvas: null,
  16968. };
  16969. /** Base z-index of the children of the viewport container */
  16970. const BASE_ZINDEX = 0;
  16971. /** Default viewport width, in pixels */
  16972. const DEFAULT_VIEWPORT_WIDTH = 300;
  16973. /** Default viewport height, in pixels */
  16974. const DEFAULT_VIEWPORT_HEIGHT = 150;
  16975. /**
  16976. * Viewport
  16977. */
  16978. class BaseViewport extends AREventTarget {
  16979. /**
  16980. * Constructor
  16981. * @param viewportSettings
  16982. */
  16983. constructor(viewportSettings) {
  16984. super();
  16985. // validate settings
  16986. const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
  16987. if (settings.container == null)
  16988. throw new IllegalArgumentError('Unspecified viewport container');
  16989. // initialize attributes
  16990. this._resolution = settings.resolution;
  16991. this._container = settings.container;
  16992. this._hud = new HUD(settings.container, settings.hudContainer);
  16993. this._parentOfImportedForegroundCanvas = settings.canvas ? settings.canvas.parentNode : null;
  16994. // create canvas elements
  16995. const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
  16996. this._backgroundCanvas = this._createBackgroundCanvas(this._container, size);
  16997. this._foregroundCanvas = settings.canvas == null ?
  16998. this._createForegroundCanvas(this._container, size) :
  16999. this._foregroundCanvas = this._importForegroundCanvas(settings.canvas, this._container, size);
  17000. }
  17001. /**
  17002. * Viewport container
  17003. */
  17004. get container() {
  17005. return this._container;
  17006. }
  17007. /**
  17008. * HUD
  17009. */
  17010. get hud() {
  17011. return this._hud;
  17012. }
  17013. /**
  17014. * Resolution of the virtual scene
  17015. */
  17016. get resolution() {
  17017. return this._resolution;
  17018. }
  17019. /**
  17020. * Size in pixels of the drawing buffer of the canvas
  17021. * on which the virtual scene will be drawn
  17022. */
  17023. get virtualSize() {
  17024. const aspectRatio = this._backgroundCanvas.width / this._backgroundCanvas.height;
  17025. return Utils.resolution(this._resolution, aspectRatio);
  17026. }
  17027. /**
  17028. * The canvas on which the virtual scene will be drawn
  17029. */
  17030. get canvas() {
  17031. return this._foregroundCanvas;
  17032. }
  17033. /**
  17034. * Background canvas
  17035. * @internal
  17036. */
  17037. get _background() {
  17038. return this._backgroundCanvas;
  17039. }
  17040. /**
  17041. * Size of the drawing buffer of the background canvas, in pixels
  17042. * @internal
  17043. */
  17044. get _size() {
  17045. throw new IllegalOperationError();
  17046. }
  17047. /**
  17048. * Initialize the viewport (when the session starts)
  17049. * @internal
  17050. */
  17051. _init() {
  17052. this._container.style.touchAction = 'none';
  17053. this._hud._init(BASE_ZINDEX + 2);
  17054. this._hud.visible = true;
  17055. }
  17056. /**
  17057. * Release the viewport (when the session starts)
  17058. * @internal
  17059. */
  17060. _release() {
  17061. //this._hud.visible = false; // depends on the type of the viewport
  17062. this._hud._release();
  17063. this._restoreImportedForegroundCanvas();
  17064. this._container.style.touchAction = 'auto';
  17065. }
  17066. /**
  17067. * Function to be called when the viewport is resized
  17068. * @internal
  17069. */
  17070. _onResize() {
  17071. // Resize the drawing buffer of the foreground canvas, so that it
  17072. // matches the desired resolution and the aspect ratio of the
  17073. // background canvas
  17074. const virtualSize = this.virtualSize;
  17075. this._foregroundCanvas.width = virtualSize.width;
  17076. this._foregroundCanvas.height = virtualSize.height;
  17077. this._styleCanvas(this._foregroundCanvas, 'foreground');
  17078. // dispatch event
  17079. const event = new ViewportEvent('resize');
  17080. this.dispatchEvent(event);
  17081. }
  17082. /**
  17083. * Create the background canvas
  17084. * @param parent parent container
  17085. * @param size size of the drawing buffer
  17086. * @returns a new canvas as a child of parent
  17087. */
  17088. _createBackgroundCanvas(parent, size) {
  17089. const canvas = this._createCanvas(parent, size);
  17090. return this._styleCanvas(canvas, 'background');
  17091. }
  17092. /**
  17093. * Create the foreground canvas
  17094. * @param parent parent container
  17095. * @param size size of the drawing buffer
  17096. * @returns a new canvas as a child of parent
  17097. */
  17098. _createForegroundCanvas(parent, size) {
  17099. const canvas = this._createCanvas(parent, size);
  17100. return this._styleCanvas(canvas, 'foreground');
  17101. }
  17102. /**
  17103. * Create a canvas and attach it to another HTML element
  17104. * @param parent parent container
  17105. * @param size size of the drawing buffer
  17106. * @returns a new canvas as a child of parent
  17107. */
  17108. _createCanvas(parent, size) {
  17109. const canvas = document.createElement('canvas');
  17110. canvas.width = size.width;
  17111. canvas.height = size.height;
  17112. parent.appendChild(canvas);
  17113. return canvas;
  17114. }
  17115. /**
  17116. * Add suitable CSS rules to a canvas
  17117. * @param canvas
  17118. * @param canvasType
  17119. * @returns canvas
  17120. */
  17121. _styleCanvas(canvas, canvasType) {
  17122. const offset = (canvasType == 'foreground') ? 1 : 0;
  17123. const zIndex = BASE_ZINDEX + offset;
  17124. canvas.setAttribute('style', [
  17125. 'position: absolute',
  17126. 'left: 0px',
  17127. 'top: 0px',
  17128. 'z-index: ' + String(zIndex),
  17129. 'width: 100% !important',
  17130. 'height: 100% !important',
  17131. ].join('; '));
  17132. return canvas;
  17133. }
  17134. /**
  17135. * Import an existing foreground canvas to the viewport
  17136. * @param canvas existing canvas
  17137. * @param parent parent container
  17138. * @param size size of the drawing buffer
  17139. * @returns the input canvas
  17140. */
  17141. _importForegroundCanvas(canvas, parent, size) {
  17142. if (!(canvas instanceof HTMLCanvasElement))
  17143. throw new IllegalArgumentError(`Not a <canvas>: ${canvas}`);
  17144. // borrow the canvas; add it as a child of the viewport container
  17145. canvas.remove();
  17146. parent.appendChild(canvas);
  17147. canvas.width = size.width;
  17148. canvas.height = size.height;
  17149. canvas.dataset.cssText = canvas.style.cssText; // save CSS
  17150. canvas.style.cssText = ''; // clear CSS
  17151. this._styleCanvas(canvas, 'foreground');
  17152. return canvas;
  17153. }
  17154. /**
  17155. * Restore a previously imported foreground canvas to its original parent
  17156. */
  17157. _restoreImportedForegroundCanvas() {
  17158. // not an imported canvas; nothing to do
  17159. if (this._parentOfImportedForegroundCanvas == null)
  17160. return;
  17161. const canvas = this._foregroundCanvas;
  17162. canvas.style.cssText = canvas.dataset.cssText || ''; // restore CSS
  17163. canvas.remove();
  17164. this._parentOfImportedForegroundCanvas.appendChild(canvas);
  17165. }
  17166. }
  17167. /**
  17168. * Viewport decorator
  17169. */
  17170. class ViewportDecorator extends AREventTarget {
  17171. /**
  17172. * Constructor
  17173. * @param base to be decorated
  17174. * @param getSize size getter
  17175. */
  17176. constructor(base, getSize) {
  17177. super();
  17178. this._base = base;
  17179. this._getSize = getSize;
  17180. }
  17181. /**
  17182. * Viewport container
  17183. */
  17184. get container() {
  17185. return this._base.container;
  17186. }
  17187. /**
  17188. * HUD
  17189. */
  17190. get hud() {
  17191. return this._base.hud;
  17192. }
  17193. /**
  17194. * Resolution of the virtual scene
  17195. */
  17196. get resolution() {
  17197. return this._base.resolution;
  17198. }
  17199. /**
  17200. * Size in pixels of the drawing buffer of the canvas
  17201. * on which the virtual scene will be drawn
  17202. */
  17203. get virtualSize() {
  17204. return this._base.virtualSize;
  17205. }
  17206. /**
  17207. * The canvas on which the virtual scene will be drawn
  17208. */
  17209. get canvas() {
  17210. return this._base.canvas;
  17211. }
  17212. /**
  17213. * Background canvas
  17214. * @internal
  17215. */
  17216. get _background() {
  17217. return this._base._background;
  17218. }
  17219. /**
  17220. * Size of the drawing buffer of the background canvas, in pixels
  17221. * @internal
  17222. */
  17223. get _size() {
  17224. return this._getSize();
  17225. }
  17226. /**
  17227. * Initialize the viewport
  17228. * @internal
  17229. */
  17230. _init() {
  17231. this._base._init();
  17232. }
  17233. /**
  17234. * Release the viewport
  17235. * @internal
  17236. */
  17237. _release() {
  17238. this._base._release();
  17239. }
  17240. /**
  17241. * Function to be called when the viewport is resized
  17242. * @internal
  17243. */
  17244. _onResize() {
  17245. this._base._onResize();
  17246. }
  17247. /**
  17248. * Add event listener
  17249. * @param type event type
  17250. * @param callback
  17251. */
  17252. addEventListener(type, callback) {
  17253. this._base.addEventListener(type, callback);
  17254. }
  17255. /**
  17256. * Remove event listener
  17257. * @param type event type
  17258. * @param callback
  17259. */
  17260. removeEventListener(type, callback) {
  17261. this._base.removeEventListener(type, callback);
  17262. }
  17263. /**
  17264. * Synchronously trigger an event
  17265. * @param event
  17266. * @returns same value as a standard event target
  17267. * @internal
  17268. */
  17269. dispatchEvent(event) {
  17270. return this._base.dispatchEvent(event);
  17271. }
  17272. }
  17273. /**
  17274. * A viewport that watches for page resizes
  17275. */
  17276. class ResizableViewport extends ViewportDecorator {
  17277. /**
  17278. * Constructor
  17279. * @param base to be decorated
  17280. * @param getSize size getter
  17281. */
  17282. constructor(base, getSize) {
  17283. super(base, getSize);
  17284. this._active = false;
  17285. }
  17286. /**
  17287. * Initialize the viewport
  17288. * @internal
  17289. */
  17290. _init() {
  17291. super._init();
  17292. this._active = true;
  17293. // Configure the resize listener. We want the viewport
  17294. // to adjust itself if the phone/screen is resized or
  17295. // changes orientation
  17296. let timeout = null;
  17297. const onresize = () => {
  17298. if (!this._active) {
  17299. window.removeEventListener('resize', onresize);
  17300. return;
  17301. }
  17302. if (timeout !== null)
  17303. clearTimeout(timeout);
  17304. timeout = setTimeout(() => {
  17305. timeout = null;
  17306. this._resize.call(this);
  17307. this._onResize.call(this);
  17308. }, 100);
  17309. };
  17310. window.addEventListener('resize', onresize);
  17311. this._resize();
  17312. this._onResize();
  17313. }
  17314. /**
  17315. * Release the viewport
  17316. * @internal
  17317. */
  17318. _release() {
  17319. this._active = false;
  17320. super._release();
  17321. }
  17322. }
  17323. /**
  17324. * Immersive viewport: it occupies the entire page
  17325. */
  17326. class ImmersiveViewport extends ResizableViewport {
  17327. /**
  17328. * Release the viewport
  17329. * @internal
  17330. */
  17331. _release() {
  17332. this.canvas.remove();
  17333. this._background.remove();
  17334. this.hud.visible = false;
  17335. this.container.style.cssText = ''; // reset CSS
  17336. super._release();
  17337. }
  17338. /**
  17339. * Resize the immersive viewport, so that it occupies the entire page.
  17340. * We respect the aspect ratio of the source media
  17341. */
  17342. _resize() {
  17343. const { width, height } = this._size;
  17344. const viewportSize = speedy_vision_default().Size(0, 0);
  17345. const viewportAspectRatio = width / height;
  17346. const windowSize = speedy_vision_default().Size(window.innerWidth, window.innerHeight);
  17347. const windowAspectRatio = windowSize.width / windowSize.height;
  17348. // figure out the viewport size
  17349. if (viewportAspectRatio <= windowAspectRatio) {
  17350. viewportSize.height = windowSize.height;
  17351. viewportSize.width = (viewportSize.height * viewportAspectRatio) | 0;
  17352. }
  17353. else {
  17354. viewportSize.width = windowSize.width;
  17355. viewportSize.height = (viewportSize.width / viewportAspectRatio) | 0;
  17356. }
  17357. // position the viewport and set its size
  17358. const container = this.container;
  17359. container.style.position = 'fixed';
  17360. container.style.left = `calc(50% - ${viewportSize.width >>> 1}px)`;
  17361. container.style.top = `calc(50% - ${viewportSize.height >>> 1}px)`;
  17362. container.style.zIndex = '1000000000'; // 1B //String(2147483647);
  17363. container.style.width = viewportSize.width + 'px';
  17364. container.style.height = viewportSize.height + 'px';
  17365. container.style.backgroundColor = '#000';
  17366. // set the size of the drawing buffer of the background canvas
  17367. const backgroundCanvas = this._background;
  17368. const backgroundCanvasAspectRatio = viewportAspectRatio;
  17369. const referenceHeight = height;
  17370. backgroundCanvas.height = referenceHeight;
  17371. backgroundCanvas.width = (backgroundCanvas.height * backgroundCanvasAspectRatio) | 0;
  17372. }
  17373. }
  17374. /**
  17375. * Inline viewport: it follows the typical flow of a web page
  17376. */
  17377. class InlineViewport extends ResizableViewport {
  17378. /**
  17379. * Resize the inline viewport
  17380. */
  17381. _resize() {
  17382. const { width, height } = this._size;
  17383. this.container.style.position = 'relative';
  17384. this.container.style.width = width + 'px';
  17385. this.container.style.height = height + 'px';
  17386. //this.container.style.display = 'inline-block';
  17387. this._background.width = width;
  17388. this._background.height = height;
  17389. }
  17390. }
  17391. ;// CONCATENATED MODULE: ./src/core/stats.ts
  17392. /*
  17393. * MARTINS.js
  17394. * GPU-accelerated Augmented Reality for the web
  17395. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17396. *
  17397. * This program is free software: you can redistribute it and/or modify
  17398. * it under the terms of the GNU Lesser General Public License as published
  17399. * by the Free Software Foundation, either version 3 of the License, or
  17400. * (at your option) any later version.
  17401. *
  17402. * This program is distributed in the hope that it will be useful,
  17403. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17404. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17405. * GNU Lesser General Public License for more details.
  17406. *
  17407. * You should have received a copy of the GNU Lesser General Public License
  17408. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17409. *
  17410. * stats.ts
  17411. * Stats for performance measurements
  17412. */
  17413. /** update interval, given in seconds */
  17414. const UPDATE_INTERVAL = 0.5;
  17415. /**
  17416. * Stats for performance measurements
  17417. */
  17418. class Stats {
  17419. /**
  17420. * Constructor
  17421. */
  17422. constructor() {
  17423. this._timeOfLastUpdate = this._now();
  17424. this._partialCycleCount = 0;
  17425. this._cyclesPerSecond = 0;
  17426. }
  17427. /**
  17428. * Update stats - call every frame
  17429. */
  17430. update() {
  17431. const now = this._now();
  17432. ++this._partialCycleCount;
  17433. if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
  17434. this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
  17435. this._partialCycleCount = 0;
  17436. this._timeOfLastUpdate = now;
  17437. }
  17438. }
  17439. /**
  17440. * Reset stats
  17441. */
  17442. reset() {
  17443. this._timeOfLastUpdate = this._now();
  17444. this._partialCycleCount = 0;
  17445. this._cyclesPerSecond = 0;
  17446. }
  17447. /**
  17448. * Number of cycles per second
  17449. */
  17450. get cyclesPerSecond() {
  17451. return this._cyclesPerSecond;
  17452. }
  17453. /**
  17454. * A measurement of time, in milliseconds
  17455. * @returns time in ms
  17456. */
  17457. _now() {
  17458. return performance.now();
  17459. }
  17460. }
  17461. ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
  17462. /*
  17463. * MARTINS.js
  17464. * GPU-accelerated Augmented Reality for the web
  17465. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17466. *
  17467. * This program is free software: you can redistribute it and/or modify
  17468. * it under the terms of the GNU Lesser General Public License as published
  17469. * by the Free Software Foundation, either version 3 of the License, or
  17470. * (at your option) any later version.
  17471. *
  17472. * This program is distributed in the hope that it will be useful,
  17473. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17474. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17475. * GNU Lesser General Public License for more details.
  17476. *
  17477. * You should have received a copy of the GNU Lesser General Public License
  17478. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17479. *
  17480. * stats-panel.ts
  17481. * Stats panel used for development purposes
  17482. */
  17483. /** Update interval, in ms */
  17484. const stats_panel_UPDATE_INTERVAL = 500;
  17485. /** Icons for different power profiles */
  17486. const POWER_ICON = Object.freeze({
  17487. 'default': '',
  17488. 'low-power': '<span style="color:#0f0">&#x1F50B</span>',
  17489. 'high-performance': '<span style="color:#ff0">&#x26A1</span>'
  17490. });
  17491. /**
  17492. * Stats panel used for development purposes
  17493. */
  17494. class StatsPanel {
  17495. /**
  17496. * Constructor
  17497. * @param parent parent element of the panel
  17498. */
  17499. constructor(parent) {
  17500. this._container = this._createContainer(parent);
  17501. this._lastUpdate = 0;
  17502. }
  17503. /**
  17504. * Release the panel
  17505. */
  17506. release() {
  17507. this._container.remove();
  17508. }
  17509. /**
  17510. * A method to be called in the update loop
  17511. * @param time current time in ms
  17512. * @param trackers the trackers attached to the session
  17513. * @param sources the sources of media linked to the session
  17514. * @param gpu GPU cycles per second
  17515. * @param fps frames per second
  17516. */
  17517. update(time, trackers, sources, gpu, fps) {
  17518. if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
  17519. this._lastUpdate = time;
  17520. this._update(trackers, sources, fps, gpu);
  17521. }
  17522. }
  17523. /**
  17524. * Visibility of the panel
  17525. */
  17526. get visible() {
  17527. return !this._container.hidden;
  17528. }
  17529. /**
  17530. * Visibility of the panel
  17531. */
  17532. set visible(visible) {
  17533. this._container.hidden = !visible;
  17534. }
  17535. /**
  17536. * Update the contents of the panel
  17537. * @param trackers the trackers attached to the session
  17538. * @param sources the sources of media linked to the session
  17539. * @param fps frames per second
  17540. * @param gpu GPU cycles per second
  17541. */
  17542. _update(trackers, sources, fps, gpu) {
  17543. const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
  17544. const sourceStats = sources.map(source => source._stats).join(', ');
  17545. const param = {
  17546. fps: this._colorize(fps),
  17547. gpu: this._colorize(gpu),
  17548. powerIcon: POWER_ICON[Settings.powerPreference]
  17549. };
  17550. this._container.textContent = (`MARTINS.js v${Martins.version}
  17551. FPS: [fps] | GPU: [gpu] [powerIcon]
  17552. IN : ${sourceStats}
  17553. OUT: ${trackerStats}`);
  17554. const fn = (_, x) => param[x];
  17555. this._container.innerHTML = this._container.innerHTML.replace(/\[(\w+)\]/g, fn);
  17556. }
  17557. /**
  17558. * Colorize a frequency number
  17559. * @param f frequency given in cycles per second
  17560. * @returns colorized number (HTML)
  17561. */
  17562. _colorize(f) {
  17563. const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
  17564. const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
  17565. const color2 = f >= 30 ? GREEN : RED;
  17566. const color = Settings.powerPreference != 'low-power' ? color3 : color2;
  17567. return `<span style="color:${color}">${Number(f)}</span>`;
  17568. }
  17569. /**
  17570. * Create the container for the panel
  17571. * @param parent parent element
  17572. * @returns a container
  17573. */
  17574. _createContainer(parent) {
  17575. const container = document.createElement('div');
  17576. container.style.position = 'absolute';
  17577. container.style.left = container.style.top = '0px';
  17578. container.style.zIndex = '1000000';
  17579. container.style.padding = '4px';
  17580. container.style.whiteSpace = 'pre-line';
  17581. container.style.backgroundColor = 'rgba(0,0,0,0.5)';
  17582. container.style.color = '#fff';
  17583. container.style.fontFamily = 'monospace';
  17584. container.style.fontSize = '14px';
  17585. parent.appendChild(container);
  17586. return container;
  17587. }
  17588. }
  17589. ;// CONCATENATED MODULE: ./src/core/frame.ts
  17590. /*
  17591. * MARTINS.js
  17592. * GPU-accelerated Augmented Reality for the web
  17593. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17594. *
  17595. * This program is free software: you can redistribute it and/or modify
  17596. * it under the terms of the GNU Lesser General Public License as published
  17597. * by the Free Software Foundation, either version 3 of the License, or
  17598. * (at your option) any later version.
  17599. *
  17600. * This program is distributed in the hope that it will be useful,
  17601. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17602. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17603. * GNU Lesser General Public License for more details.
  17604. *
  17605. * You should have received a copy of the GNU Lesser General Public License
  17606. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17607. *
  17608. * frame.ts
  17609. * A Frame holds information used to render a single animation frame of a Session
  17610. */
  17611. /**
  17612. * Iterable frame results (helper class)
  17613. */
  17614. class IterableTrackerResults {
  17615. constructor(_results) {
  17616. this._results = _results;
  17617. this._index = 0;
  17618. }
  17619. next() {
  17620. const i = this._index++;
  17621. return i < this._results.length ?
  17622. { done: false, value: this._results[i] } :
  17623. { done: true, value: undefined };
  17624. }
  17625. [Symbol.iterator]() {
  17626. return this;
  17627. }
  17628. }
  17629. /**
  17630. * A Frame holds information used to render a single animation frame of a Session
  17631. */
  17632. class Frame {
  17633. /**
  17634. * Constructor
  17635. * @param session
  17636. * @param results
  17637. */
  17638. constructor(session, results) {
  17639. this._session = session;
  17640. this._results = new IterableTrackerResults(results);
  17641. }
  17642. /**
  17643. * The session of which this frame holds data
  17644. */
  17645. get session() {
  17646. return this._session;
  17647. }
  17648. /**
  17649. * The results of all trackers in this frame
  17650. */
  17651. get results() {
  17652. return this._results;
  17653. }
  17654. }
  17655. ;// CONCATENATED MODULE: ./src/core/time.ts
  17656. /*
  17657. * MARTINS.js
  17658. * GPU-accelerated Augmented Reality for the web
  17659. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17660. *
  17661. * This program is free software: you can redistribute it and/or modify
  17662. * it under the terms of the GNU Lesser General Public License as published
  17663. * by the Free Software Foundation, either version 3 of the License, or
  17664. * (at your option) any later version.
  17665. *
  17666. * This program is distributed in the hope that it will be useful,
  17667. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17668. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17669. * GNU Lesser General Public License for more details.
  17670. *
  17671. * You should have received a copy of the GNU Lesser General Public License
  17672. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17673. *
  17674. * time.ts
  17675. * Time utilities
  17676. */
  17677. /**
  17678. * Time Manager
  17679. */
  17680. class Time {
  17681. constructor() {
  17682. /** time scale */
  17683. this._scale = 1;
  17684. /** time since the start of the session, in milliseconds */
  17685. this._time = 0;
  17686. /** unscaled time since the start of the session, in milliseconds */
  17687. this._unscaledTime = 0;
  17688. /** elapsed time between the current and the previous frame, in milliseconds */
  17689. this._delta = 0;
  17690. /** time of the first update call, in milliseconds */
  17691. this._firstUpdate = 0;
  17692. /** time of the last update call, in milliseconds */
  17693. this._lastUpdate = Number.POSITIVE_INFINITY;
  17694. }
  17695. /**
  17696. * Update the Time Manager
  17697. * @param timestamp in milliseconds
  17698. * @internal
  17699. */
  17700. _update(timestamp) {
  17701. if (timestamp < this._lastUpdate) {
  17702. this._firstUpdate = this._lastUpdate = timestamp;
  17703. return;
  17704. }
  17705. this._delta = (timestamp - this._lastUpdate) * this._scale;
  17706. this._time += this._delta;
  17707. this._unscaledTime = timestamp - this._firstUpdate;
  17708. this._lastUpdate = timestamp;
  17709. }
  17710. /**
  17711. * Elapsed time since the start of the session, measured at the
  17712. * beginning of the current animation frame and given in seconds
  17713. */
  17714. get elapsed() {
  17715. return this._time * 0.001;
  17716. }
  17717. /**
  17718. * Elapsed time between the current and the previous animation
  17719. * frame, given in seconds
  17720. */
  17721. get delta() {
  17722. return this._delta * 0.001;
  17723. }
  17724. /**
  17725. * Time scale (defaults to 1)
  17726. */
  17727. get scale() {
  17728. return this._scale;
  17729. }
  17730. /**
  17731. * Time scale (defaults to 1)
  17732. */
  17733. set scale(scale) {
  17734. this._scale = Math.max(0, +scale);
  17735. }
  17736. /**
  17737. * Time scale independent elapsed time since the start of the session,
  17738. * measured at the beginning of the current animation frame and given
  17739. * in seconds
  17740. */
  17741. get unscaled() {
  17742. return this._unscaledTime * 0.001;
  17743. }
  17744. }
  17745. ;// CONCATENATED MODULE: ./src/core/gizmos.ts
  17746. /*
  17747. * MARTINS.js
  17748. * GPU-accelerated Augmented Reality for the web
  17749. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17750. *
  17751. * This program is free software: you can redistribute it and/or modify
  17752. * it under the terms of the GNU Lesser General Public License as published
  17753. * by the Free Software Foundation, either version 3 of the License, or
  17754. * (at your option) any later version.
  17755. *
  17756. * This program is distributed in the hope that it will be useful,
  17757. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17758. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17759. * GNU Lesser General Public License for more details.
  17760. *
  17761. * You should have received a copy of the GNU Lesser General Public License
  17762. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17763. *
  17764. * gizmos.ts
  17765. * Visual cues for testing & debugging
  17766. */
  17767. /** The maximum match distance ratio we'll consider to be "good" */
  17768. const GOOD_MATCH_THRESHOLD = 0.7;
  17769. /**
  17770. * Visual cues for testing & debugging
  17771. */
  17772. class Gizmos {
  17773. /**
  17774. * Constructor
  17775. */
  17776. constructor() {
  17777. this._visible = false;
  17778. }
  17779. /**
  17780. * Whether or not the gizmos will be rendered
  17781. */
  17782. get visible() {
  17783. return this._visible;
  17784. }
  17785. /**
  17786. * Whether or not the gizmos will be rendered
  17787. */
  17788. set visible(visible) {
  17789. this._visible = visible;
  17790. }
  17791. /**
  17792. * Render gizmos
  17793. * @param viewport
  17794. * @param trackers
  17795. * @internal
  17796. */
  17797. _render(viewport, trackers) {
  17798. // no need to render?
  17799. if (!this._visible)
  17800. return;
  17801. // viewport
  17802. const viewportSize = viewport._size;
  17803. const canvas = viewport._background;
  17804. const ctx = canvas.getContext('2d', { alpha: false });
  17805. if (!ctx)
  17806. throw new IllegalOperationError();
  17807. // debug
  17808. //ctx.fillStyle = '#000';
  17809. //ctx.fillRect(0, 0, canvas.width, canvas.height);
  17810. //ctx.clearRect(0, 0, canvas.width, canvas.height);
  17811. // render keypoints
  17812. for (let i = 0; i < trackers.length; i++) {
  17813. if (trackers[i].type != 'image-tracker')
  17814. continue;
  17815. const output = trackers[i]._output;
  17816. const keypoints = output.keypoints;
  17817. const screenSize = output.screenSize;
  17818. if (keypoints !== undefined && screenSize !== undefined)
  17819. this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
  17820. }
  17821. // render polylines
  17822. for (let i = 0; i < trackers.length; i++) {
  17823. if (trackers[i].type != 'image-tracker')
  17824. continue;
  17825. const output = trackers[i]._output;
  17826. const polyline = output.polyline;
  17827. const screenSize = output.screenSize;
  17828. if (polyline !== undefined && screenSize !== undefined)
  17829. this._renderPolyline(ctx, polyline, screenSize, viewportSize);
  17830. }
  17831. // render the axes of the 3D coordinate system
  17832. for (let i = 0; i < trackers.length; i++) {
  17833. if (trackers[i].type != 'image-tracker')
  17834. continue;
  17835. const output = trackers[i]._output;
  17836. const cameraMatrix = output.cameraMatrix;
  17837. const screenSize = output.screenSize;
  17838. if (cameraMatrix !== undefined && screenSize !== undefined)
  17839. this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
  17840. }
  17841. }
  17842. /**
  17843. * Split keypoints in matched/unmatched categories and
  17844. * render them for testing & development purposes
  17845. * @param ctx canvas 2D context
  17846. * @param keypoints keypoints to render
  17847. * @param screenSize AR screen size
  17848. * @param viewportSize viewport size
  17849. * @param size base keypoint rendering size
  17850. */
  17851. _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
  17852. if (keypoints.length == 0)
  17853. return;
  17854. if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
  17855. this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
  17856. return;
  17857. }
  17858. const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
  17859. (keypoint.matches.length > 1 &&
  17860. keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
  17861. keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
  17862. const matchedKeypoints = keypoints;
  17863. const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
  17864. const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
  17865. this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
  17866. this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
  17867. }
  17868. /**
  17869. * Render keypoints for testing & development purposes
  17870. * @param ctx canvas 2D context
  17871. * @param keypoints keypoints to render
  17872. * @param screenSize AR screen size
  17873. * @param viewportSize viewport size
  17874. * @param color color of the rendered keypoints
  17875. * @param size base keypoint rendering size
  17876. */
  17877. _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
  17878. const sx = viewportSize.width / screenSize.width;
  17879. const sy = viewportSize.height / screenSize.height;
  17880. ctx.beginPath();
  17881. for (let i = keypoints.length - 1; i >= 0; i--) {
  17882. const keypoint = keypoints[i];
  17883. const x = (keypoint.x * sx + 0.5) | 0;
  17884. const y = (keypoint.y * sy + 0.5) | 0;
  17885. const r = (size * keypoint.scale + 0.5) | 0;
  17886. ctx.rect(x - r, y - r, 2 * r, 2 * r);
  17887. }
  17888. ctx.strokeStyle = color;
  17889. ctx.lineWidth = 1;
  17890. ctx.stroke();
  17891. }
  17892. /**
  17893. * Render polyline for testing & development purposes
  17894. * @param ctx canvas 2D context
  17895. * @param polyline vertices
  17896. * @param screenSize AR screen size
  17897. * @param viewportSize viewport size
  17898. * @param color color of the rendered polyline
  17899. * @param lineWidth
  17900. */
  17901. _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
  17902. if (polyline.length == 0)
  17903. return;
  17904. const n = polyline.length;
  17905. const sx = viewportSize.width / screenSize.width;
  17906. const sy = viewportSize.height / screenSize.height;
  17907. // render polyline
  17908. ctx.beginPath();
  17909. ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
  17910. for (let j = 0; j < n; j++)
  17911. ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
  17912. ctx.strokeStyle = color;
  17913. ctx.lineWidth = lineWidth;
  17914. ctx.stroke();
  17915. }
  17916. /**
  17917. * Render the axes of a 3D coordinate system
  17918. * @param ctx canvas 2D context
  17919. * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
  17920. * @param screenSize AR screen size
  17921. * @param viewportSize viewport size
  17922. * @param lineWidth
  17923. */
  17924. _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
  17925. const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
  17926. const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
  17927. const length = 1; // length of each axis-corresponding line, given in normalized space units
  17928. const sx = viewportSize.width / screenSize.width;
  17929. const sy = viewportSize.height / screenSize.height;
  17930. /*
  17931. Multiply the 3x4 camera matrix P by:
  17932. [ 0 L 0 0 ]
  17933. [ 0 0 L 0 ] , where L = length in normalized space of the lines
  17934. [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
  17935. [ 1 1 1 1 ]
  17936. Each column of the resulting matrix will give us the pixel coordinates
  17937. we're looking for.
  17938. Note: we're working with homogeneous coordinates
  17939. */
  17940. const p = cameraMatrix.read();
  17941. const l = length;
  17942. const o = [p[9], p[10], p[11]]; // origin of the coordinate system
  17943. const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
  17944. const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
  17945. const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
  17946. const axis = [x, y, z];
  17947. // draw each axis
  17948. const ox = o[0] / o[2], oy = o[1] / o[2];
  17949. for (let i = 0; i < 3; i++) {
  17950. const q = axis[i];
  17951. const x = q[0] / q[2], y = q[1] / q[2];
  17952. ctx.beginPath();
  17953. ctx.moveTo(ox * sx, oy * sy);
  17954. ctx.lineTo(x * sx, y * sy);
  17955. ctx.strokeStyle = color[i];
  17956. ctx.lineWidth = lineWidth;
  17957. ctx.stroke();
  17958. }
  17959. //console.log("Origin",ox,oy);
  17960. }
  17961. }
  17962. ;// CONCATENATED MODULE: ./src/utils/asap.ts
  17963. /*
  17964. * MARTINS.js
  17965. * GPU-accelerated Augmented Reality for the web
  17966. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17967. *
  17968. * This program is free software: you can redistribute it and/or modify
  17969. * it under the terms of the GNU Lesser General Public License as published
  17970. * by the Free Software Foundation, either version 3 of the License, or
  17971. * (at your option) any later version.
  17972. *
  17973. * This program is distributed in the hope that it will be useful,
  17974. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17975. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17976. * GNU Lesser General Public License for more details.
  17977. *
  17978. * You should have received a copy of the GNU Lesser General Public License
  17979. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17980. *
  17981. * asap.ts
  17982. * Schedule a function to run "as soon as possible"
  17983. */
  17984. /** callbacks */
  17985. const callbacks = [];
  17986. /** arguments to be passed to the callbacks */
  17987. const args = [];
  17988. /** asap key */
  17989. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  17990. // Register an event listener
  17991. window.addEventListener('message', event => {
  17992. if (event.source !== window || event.data !== ASAP_KEY)
  17993. return;
  17994. event.stopPropagation();
  17995. if (callbacks.length == 0)
  17996. return;
  17997. const fn = callbacks.pop();
  17998. const argArray = args.pop();
  17999. fn.apply(undefined, argArray);
  18000. }, true);
  18001. /**
  18002. * Schedule a function to run "as soon as possible"
  18003. * @param fn callback
  18004. * @param params optional parameters
  18005. */
  18006. function asap(fn, ...params) {
  18007. callbacks.unshift(fn);
  18008. args.unshift(params);
  18009. window.postMessage(ASAP_KEY, '*');
  18010. }
  18011. ;// CONCATENATED MODULE: ./src/core/session.ts
  18012. /*
  18013. * MARTINS.js
  18014. * GPU-accelerated Augmented Reality for the web
  18015. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18016. *
  18017. * This program is free software: you can redistribute it and/or modify
  18018. * it under the terms of the GNU Lesser General Public License as published
  18019. * by the Free Software Foundation, either version 3 of the License, or
  18020. * (at your option) any later version.
  18021. *
  18022. * This program is distributed in the hope that it will be useful,
  18023. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18024. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18025. * GNU Lesser General Public License for more details.
  18026. *
  18027. * You should have received a copy of the GNU Lesser General Public License
  18028. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18029. *
  18030. * session.ts
  18031. * WebAR Session
  18032. */
  18033. /** An event emitted by a Session */
  18034. class SessionEvent extends AREvent {
  18035. }
  18036. /** Default options when starting a session */
  18037. const DEFAULT_OPTIONS = {
  18038. mode: 'immersive',
  18039. trackers: [],
  18040. sources: [],
  18041. viewport: null,
  18042. stats: false,
  18043. gizmos: false,
  18044. };
  18045. /**
  18046. * A Session represents an intent to display AR content
  18047. * and encapsulates the main loop (update-render cycle)
  18048. */
  18049. class Session extends AREventTarget {
  18050. /**
  18051. * Constructor
  18052. * @param sources previously initialized sources of data
  18053. * @param mode session mode
  18054. * @param viewport viewport
  18055. * @param stats render stats panel?
  18056. * @param gizmos render gizmos?
  18057. */
  18058. constructor(sources, mode, viewport, stats, gizmos) {
  18059. super();
  18060. this._mode = mode;
  18061. this._trackers = [];
  18062. this._sources = sources;
  18063. this._updateStats = new Stats();
  18064. this._renderStats = new Stats();
  18065. this._active = true;
  18066. this._frameReady = true; // no trackers at the moment
  18067. this._rafQueue = [];
  18068. this._time = new Time();
  18069. this._gizmos = new Gizmos();
  18070. this._gizmos.visible = gizmos;
  18071. // get media
  18072. const media = this.media;
  18073. // setup the viewport
  18074. if (mode == 'immersive')
  18075. this._viewport = new ImmersiveViewport(viewport, () => media.size);
  18076. else if (mode == 'inline')
  18077. this._viewport = new InlineViewport(viewport, () => media.size);
  18078. else
  18079. throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
  18080. this._viewport._init();
  18081. // setup the main loop
  18082. this._setupUpdateLoop();
  18083. this._setupRenderLoop();
  18084. // setup the stats panel
  18085. this._statsPanel = new StatsPanel(this._viewport.hud.container);
  18086. this._statsPanel.visible = stats;
  18087. // done!
  18088. Session._count++;
  18089. Utils.log(`The ${mode} session is now active!`);
  18090. }
  18091. /**
  18092. * Checks if the engine can be run in the browser the client is using
  18093. * @returns true if the engine is compatible with the browser
  18094. */
  18095. static isSupported() {
  18096. //alert(Utils.deviceInfo()); // debug
  18097. // If Safari / iOS, require version 15.2 or later
  18098. if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
  18099. /*
  18100. iOS compatibility
  18101. -----------------
  18102. The engine is known to work on iPhone 8 or later, with iOS 15.2 or
  18103. later. Tested on many devices, including iPads, on the cloud.
  18104. The engine crashes on an iPhone 13 Pro Max with iOS 15.1 and on an
  18105. iPhone 12 Pro with iOS 15.0.2. A (valid) shader from speedy-vision
  18106. version 0.9.1 (bf-knn) fails to compile: "WebGL error. Program has
  18107. not been successfully linked".
  18108. The engine freezes on an older iPhone 6S (2015) with iOS 15.8.2.
  18109. The exact cause is unknown, but it happens when training an image
  18110. tracker, at ImageTrackerTrainingState._gpuUpdate() (a WebGL error?
  18111. a hardware limitation?)
  18112. Successfully tested down to iPhone 8 so far.
  18113. Successfully tested down to iOS 15.2.
  18114. >> WebGL2 support was introduced in Safari 15 <<
  18115. Note: the webp image format used in the demos is supported on
  18116. Safari for iOS 14+. Desktop Safari 14-15.6 supports webp, but
  18117. requires macOS 11 Big Sur or later. https://caniuse.com/webp
  18118. */
  18119. const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
  18120. const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
  18121. const matches = safari || ios; // match safari first (min version)
  18122. if (matches !== null) {
  18123. const version = matches[3] || '0.0';
  18124. const [x, y] = version.split(/[\._]/).map(v => parseInt(v) | 0);
  18125. if ((x < 15) || (x == 15 && y < 2)) {
  18126. Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
  18127. return false;
  18128. }
  18129. // XXX reject older iPhone models? Which ones?
  18130. /*if(navigator.userAgent.includes('iPhone')) {
  18131. // detect screen size?
  18132. }*/
  18133. }
  18134. else
  18135. Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
  18136. }
  18137. // Android: reject very old / weak devices?
  18138. // XXX establish criteria?
  18139. /*if(Utils.isAndroid()) {
  18140. }*/
  18141. // Check if WebGL2 and WebAssembly are supported
  18142. return speedy_vision_default().isSupported();
  18143. }
  18144. /**
  18145. * Instantiate a session
  18146. * @param options options
  18147. * @returns a promise that resolves to a new session
  18148. */
  18149. static instantiate(options = DEFAULT_OPTIONS) {
  18150. const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
  18151. Utils.log(`Starting a new ${mode} session...`);
  18152. return speedy_vision_default().Promise.resolve().then(() => {
  18153. // is the engine supported?
  18154. if (!Session.isSupported())
  18155. throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with the MARTINS.js engine');
  18156. // block multiple immersive sessions
  18157. if (mode !== 'inline' && Session.count > 0)
  18158. throw new IllegalOperationError(`Can't start more than one immersive session`);
  18159. // initialize matrix routines
  18160. return speedy_vision_default().Matrix.ready();
  18161. }).then(() => {
  18162. // validate sources of data
  18163. const videoSources = sources.filter(source => source._type == 'video');
  18164. if (videoSources.length != 1)
  18165. throw new IllegalArgumentError(`One video source of data must be provided`);
  18166. for (let i = sources.length - 1; i >= 0; i--) {
  18167. if (sources.indexOf(sources[i]) < i)
  18168. throw new IllegalArgumentError(`Found repeated sources of data`);
  18169. }
  18170. // initialize sources of data
  18171. return speedy_vision_default().Promise.all(sources.map(source => source._init()));
  18172. }).then(() => {
  18173. // get the viewport
  18174. if (!viewport)
  18175. throw new IllegalArgumentError(`Can't create a session without a viewport`);
  18176. // instantiate session
  18177. return new Session(sources, mode, viewport, stats, gizmos);
  18178. }).then(session => {
  18179. // validate trackers
  18180. if (trackers.length == 0)
  18181. Utils.warning(`No trackers have been attached to the session!`);
  18182. for (let i = trackers.length - 1; i >= 0; i--) {
  18183. if (trackers.indexOf(trackers[i]) < i)
  18184. throw new IllegalArgumentError(`Found repeated trackers`);
  18185. }
  18186. // attach trackers and return the session
  18187. return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
  18188. }).catch(err => {
  18189. // log errors, if any
  18190. Utils.error(`Can't start session: ${err.message}`);
  18191. throw err;
  18192. });
  18193. }
  18194. /**
  18195. * Number of active sessions
  18196. */
  18197. static get count() {
  18198. return this._count;
  18199. }
  18200. /**
  18201. * End the session
  18202. * @returns promise that resolves after the session is shut down
  18203. */
  18204. end() {
  18205. // is the session inactive?
  18206. if (!this._active)
  18207. return speedy_vision_default().Promise.resolve();
  18208. // deactivate the session
  18209. Utils.log('Shutting down the session...');
  18210. this._active = false; // set before wait()
  18211. // wait a few ms, so that the GPU is no longer sending any data
  18212. const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
  18213. setTimeout(resolve, ms);
  18214. });
  18215. // release resources
  18216. return wait(100).then(() => speedy_vision_default().Promise.all(
  18217. // release trackers
  18218. this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
  18219. // release input sources
  18220. this._sources.map(source => source._release()))).then(() => {
  18221. this._sources.length = 0;
  18222. this._trackers.length = 0;
  18223. // release internal components
  18224. this._updateStats.reset();
  18225. this._renderStats.reset();
  18226. this._statsPanel.release();
  18227. this._viewport._release();
  18228. // end the session
  18229. Session._count--;
  18230. // dispatch event
  18231. const event = new SessionEvent('end');
  18232. this.dispatchEvent(event);
  18233. // done!
  18234. Utils.log('Session ended.');
  18235. });
  18236. }
  18237. /**
  18238. * Analogous to window.requestAnimationFrame()
  18239. * @param callback
  18240. * @returns a handle
  18241. */
  18242. requestAnimationFrame(callback) {
  18243. const handle = Symbol('raf-handle');
  18244. if (this._active)
  18245. this._rafQueue.push([handle, callback]);
  18246. else
  18247. throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
  18248. return handle;
  18249. }
  18250. /**
  18251. * Analogous to window.cancelAnimationFrame()
  18252. * @param handle a handle returned by this.requestAnimationFrame()
  18253. */
  18254. cancelAnimationFrame(handle) {
  18255. for (let i = this._rafQueue.length - 1; i >= 0; i--) {
  18256. if (this._rafQueue[i][0] === handle) {
  18257. this._rafQueue.splice(i, 1);
  18258. break;
  18259. }
  18260. }
  18261. }
  18262. /**
  18263. * The underlying media (generally a camera stream)
  18264. * @internal
  18265. */
  18266. get media() {
  18267. for (let i = this._sources.length - 1; i >= 0; i--) {
  18268. if (this._sources[i]._type == 'video')
  18269. return this._sources[i]._data;
  18270. }
  18271. // this shouldn't happen
  18272. throw new IllegalOperationError(`Invalid input source`);
  18273. }
  18274. /**
  18275. * Session mode
  18276. */
  18277. get mode() {
  18278. return this._mode;
  18279. }
  18280. /**
  18281. * Rendering viewport
  18282. */
  18283. get viewport() {
  18284. return this._viewport;
  18285. }
  18286. /**
  18287. * Time utilities
  18288. */
  18289. get time() {
  18290. return this._time;
  18291. }
  18292. /**
  18293. * Visual cues for testing & debugging
  18294. */
  18295. get gizmos() {
  18296. return this._gizmos;
  18297. }
  18298. /**
  18299. * Attach a tracker to the session
  18300. * @param tracker
  18301. */
  18302. _attachTracker(tracker) {
  18303. if (this._trackers.indexOf(tracker) >= 0)
  18304. throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
  18305. else if (!this._active)
  18306. throw new IllegalOperationError(`Inactive session`);
  18307. this._trackers.push(tracker);
  18308. return tracker._init(this);
  18309. }
  18310. /**
  18311. * Render the user media to the background canvas
  18312. */
  18313. _renderUserMedia() {
  18314. const canvas = this._viewport._background;
  18315. const ctx = canvas.getContext('2d', { alpha: false });
  18316. if (ctx && this.media.type != 'data') {
  18317. ctx.imageSmoothingEnabled = false;
  18318. // draw user media
  18319. const image = this.media.source;
  18320. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  18321. // render output image(s)
  18322. for (let i = 0; i < this._trackers.length; i++) {
  18323. const media = this._trackers[i]._output.image;
  18324. if (media !== undefined) {
  18325. const image = media.source;
  18326. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  18327. //ctx.drawImage(image, canvas.width - media.width, canvas.height - media.height, media.width, media.height);
  18328. }
  18329. }
  18330. // render gizmos
  18331. this._gizmos._render(this._viewport, this._trackers);
  18332. }
  18333. }
  18334. /**
  18335. * Setup the update loop
  18336. */
  18337. _setupUpdateLoop() {
  18338. const scheduleNextFrame = () => {
  18339. if (this._active) {
  18340. if (Settings.powerPreference == 'high-performance')
  18341. asap(repeat);
  18342. else
  18343. window.requestAnimationFrame(repeat);
  18344. }
  18345. };
  18346. const update = () => {
  18347. this._update().then(scheduleNextFrame).turbocharge();
  18348. };
  18349. function repeat() {
  18350. if (Settings.powerPreference == 'low-power') // 30 fps
  18351. window.requestAnimationFrame(update);
  18352. else
  18353. update();
  18354. }
  18355. window.requestAnimationFrame(update);
  18356. }
  18357. /**
  18358. * The core of the update loop
  18359. */
  18360. _update() {
  18361. // active session?
  18362. if (this._active) {
  18363. return speedy_vision_default().Promise.all(
  18364. // update trackers
  18365. this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
  18366. // update internals
  18367. this._updateStats.update();
  18368. this._frameReady = true;
  18369. }).catch(err => {
  18370. // log error
  18371. Utils.error('Tracking error: ' + err.toString(), err);
  18372. // handle WebGL errors
  18373. const cause = err.cause;
  18374. if (err.name == 'GLError') {
  18375. alert(err.message); // fatal error?
  18376. alert(Utils.deviceInfo()); // display useful info
  18377. throw err;
  18378. }
  18379. else if (typeof cause == 'object' && cause.name == 'GLError') {
  18380. alert(err.message);
  18381. alert(cause.message);
  18382. alert(Utils.deviceInfo());
  18383. throw err;
  18384. }
  18385. });
  18386. }
  18387. else {
  18388. // inactive session
  18389. this._updateStats.reset();
  18390. return speedy_vision_default().Promise.resolve();
  18391. }
  18392. }
  18393. /**
  18394. * Setup the render loop
  18395. */
  18396. _setupRenderLoop() {
  18397. let skip = false, toggle = false;
  18398. const render = (timestamp) => {
  18399. const enableFrameSkipping = (Settings.powerPreference == 'low-power');
  18400. const highPerformance = (Settings.powerPreference == 'high-performance');
  18401. // advance time
  18402. this._time._update(timestamp);
  18403. // skip frames
  18404. if (!enableFrameSkipping || !(skip = !skip))
  18405. this._render(timestamp, false);
  18406. //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
  18407. // repeat
  18408. if (this._active)
  18409. window.requestAnimationFrame(render);
  18410. };
  18411. window.requestAnimationFrame(render);
  18412. }
  18413. /**
  18414. * Render a frame (RAF callback)
  18415. * @param time current time, in ms
  18416. * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
  18417. */
  18418. _render(time, skipUserMedia) {
  18419. // is the session active?
  18420. if (this._active) {
  18421. // are we ready to render a frame?
  18422. if (this._frameReady) {
  18423. // create a frame
  18424. const results = this._trackers.map(tracker => tracker._output.exports || ({
  18425. tracker: tracker,
  18426. trackables: [],
  18427. }));
  18428. const frame = new Frame(this, results);
  18429. // clone & clear the RAF queue
  18430. const rafQueue = this._rafQueue.slice(0);
  18431. this._rafQueue.length = 0;
  18432. // render user media
  18433. if (!skipUserMedia)
  18434. this._renderUserMedia();
  18435. // render frame
  18436. for (let i = 0; i < rafQueue.length; i++)
  18437. rafQueue[i][1].call(undefined, time, frame);
  18438. // update internals
  18439. this._renderStats.update();
  18440. this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
  18441. this._frameReady = false;
  18442. }
  18443. else {
  18444. // skip frame
  18445. ;
  18446. // we'll update the renderStats even if we skip the frame,
  18447. // otherwise this becomes updateStats! (approximately)
  18448. // This is a window.requestAnimationFrame() call, so the
  18449. // browser is rendering content even if we're not.
  18450. this._renderStats.update();
  18451. }
  18452. }
  18453. else {
  18454. // inactive session
  18455. this._renderStats.reset();
  18456. }
  18457. }
  18458. }
  18459. /** Number of active sessions */
  18460. Session._count = 0;
  18461. ;// CONCATENATED MODULE: ./src/core/settings.ts
  18462. /*
  18463. * MARTINS.js
  18464. * GPU-accelerated Augmented Reality for the web
  18465. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18466. *
  18467. * This program is free software: you can redistribute it and/or modify
  18468. * it under the terms of the GNU Lesser General Public License as published
  18469. * by the Free Software Foundation, either version 3 of the License, or
  18470. * (at your option) any later version.
  18471. *
  18472. * This program is distributed in the hope that it will be useful,
  18473. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18474. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18475. * GNU Lesser General Public License for more details.
  18476. *
  18477. * You should have received a copy of the GNU Lesser General Public License
  18478. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18479. *
  18480. * settings.ts
  18481. * Global Settings
  18482. */
  18483. /**
  18484. * Global Settings
  18485. */
  18486. class Settings {
  18487. /**
  18488. * Power preference (may impact performance x battery life)
  18489. */
  18490. static get powerPreference() {
  18491. return this._powerPreference;
  18492. }
  18493. /**
  18494. * Power preference (may impact performance x battery life)
  18495. * Note: this setting should be the very first thing you set
  18496. * (before the WebGL context is created by Speedy)
  18497. */
  18498. static set powerPreference(value) {
  18499. // validate
  18500. if (Session.count > 0)
  18501. throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
  18502. else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
  18503. throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
  18504. /*
  18505. // we won't use 'high-performance' for Speedy's GPU computations
  18506. // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
  18507. // also, it seems like low-power mode may break WebGL2 in some drivers?!
  18508. if(value == 'high-performance')
  18509. Speedy.Settings.powerPreference = 'default';
  18510. else
  18511. Speedy.Settings.powerPreference = value;
  18512. */
  18513. // change the GPU polling mode
  18514. if (value == 'high-performance')
  18515. (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
  18516. else
  18517. (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
  18518. // update the power preference
  18519. this._powerPreference = value;
  18520. // log
  18521. Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
  18522. }
  18523. }
  18524. Settings._powerPreference = 'default';
  18525. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
  18526. /*
  18527. * MARTINS.js
  18528. * GPU-accelerated Augmented Reality for the web
  18529. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18530. *
  18531. * This program is free software: you can redistribute it and/or modify
  18532. * it under the terms of the GNU Lesser General Public License as published
  18533. * by the Free Software Foundation, either version 3 of the License, or
  18534. * (at your option) any later version.
  18535. *
  18536. * This program is distributed in the hope that it will be useful,
  18537. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18538. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18539. * GNU Lesser General Public License for more details.
  18540. *
  18541. * You should have received a copy of the GNU Lesser General Public License
  18542. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18543. *
  18544. * reference-image-database.ts
  18545. * A collection of Reference Images
  18546. */
  18547. /** Default capacity of a Reference Image Database */
  18548. const DEFAULT_CAPACITY = 100;
  18549. /** Generate a unique name for a reference image */
  18550. const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
  18551. /**
  18552. * A collection of Reference Images
  18553. */
  18554. class ReferenceImageDatabase {
  18555. /**
  18556. * Constructor
  18557. */
  18558. constructor() {
  18559. this._capacity = DEFAULT_CAPACITY;
  18560. this._database = [];
  18561. this._locked = false;
  18562. }
  18563. /**
  18564. * The number of reference images stored in this database
  18565. */
  18566. get count() {
  18567. return this._database.length;
  18568. }
  18569. /**
  18570. * Maximum number of elements
  18571. */
  18572. get capacity() {
  18573. return this._capacity;
  18574. }
  18575. /**
  18576. * Maximum number of elements
  18577. */
  18578. /*
  18579. set capacity(value: number)
  18580. {
  18581. const capacity = Math.max(0, value | 0);
  18582. if(this.count > capacity)
  18583. throw new IllegalArgumentError(`Can't set the capacity of the database to ${this._capacity}: it currently stores ${this.count} entries`);
  18584. this._capacity = capacity;
  18585. }
  18586. */
  18587. /**
  18588. * Iterates over the collection
  18589. */
  18590. *[Symbol.iterator]() {
  18591. const ref = this._database.map(entry => entry.referenceImage);
  18592. yield* ref;
  18593. }
  18594. /**
  18595. * Add reference images to this database
  18596. * Add only the images you actually need to track!
  18597. * (each image take up storage space)
  18598. * @param referenceImages one or more reference images with unique names (a unique name will
  18599. * be generated automatically if you don't specify one)
  18600. * @returns a promise that resolves as soon as the images are loaded and added to this database
  18601. */
  18602. add(referenceImages) {
  18603. // handle no input
  18604. if (referenceImages.length == 0)
  18605. return speedy_vision_default().Promise.resolve();
  18606. // handle multiple images as input
  18607. if (referenceImages.length > 1) {
  18608. const promises = referenceImages.map(image => this.add([image]));
  18609. return speedy_vision_default().Promise.all(promises).then(() => void (0));
  18610. }
  18611. // handle a single image as input
  18612. const referenceImage = referenceImages[0];
  18613. // locked database?
  18614. if (this._locked)
  18615. throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
  18616. // reached full capacity?
  18617. if (this.count >= this.capacity)
  18618. throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
  18619. // check for duplicate names
  18620. if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
  18621. throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
  18622. // load the media and add the reference image to the database
  18623. return speedy_vision_default().load(referenceImage.image).then(media => {
  18624. this._database.push({
  18625. referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
  18626. media: media
  18627. });
  18628. });
  18629. }
  18630. /**
  18631. * Lock the database, so that new reference images can no longer be added to it
  18632. * @internal
  18633. */
  18634. _lock() {
  18635. this._locked = true;
  18636. }
  18637. /**
  18638. * Get the media object associated to a reference image
  18639. * @param name reference image name
  18640. * @returns media
  18641. * @internal
  18642. */
  18643. _findMedia(name) {
  18644. for (let i = 0; i < this._database.length; i++) {
  18645. if (this._database[i].referenceImage.name === name)
  18646. return this._database[i].media;
  18647. }
  18648. throw new IllegalArgumentError(`Can't find reference image "${name}"`);
  18649. }
  18650. }
  18651. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
  18652. /*
  18653. * MARTINS.js
  18654. * GPU-accelerated Augmented Reality for the web
  18655. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18656. *
  18657. * This program is free software: you can redistribute it and/or modify
  18658. * it under the terms of the GNU Lesser General Public License as published
  18659. * by the Free Software Foundation, either version 3 of the License, or
  18660. * (at your option) any later version.
  18661. *
  18662. * This program is distributed in the hope that it will be useful,
  18663. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18664. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18665. * GNU Lesser General Public License for more details.
  18666. *
  18667. * You should have received a copy of the GNU Lesser General Public License
  18668. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18669. *
  18670. * settings.ts
  18671. * Settings of the Image Tracker
  18672. */
  18673. /** Default tracking resolution */
  18674. const DEFAULT_TRACKING_RESOLUTION = 'sm+';
  18675. /** Maximum number of keypoints to be stored for each reference image when in the training state */
  18676. const TRAIN_MAX_KEYPOINTS = 1024; //512;
  18677. /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
  18678. const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
  18679. /** Normalized width & height of an image target, in pixels */
  18680. const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
  18681. /** Used to identify the best maches */
  18682. const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
  18683. /** Maximum number of keypoints to be analyzed when in the scanning state */
  18684. const SCAN_MAX_KEYPOINTS = 512;
  18685. /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
  18686. const SCAN_PYRAMID_LEVELS = 4; //7;
  18687. /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
  18688. const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
  18689. /** Threshold of the FAST corner detector used in the scanning/training states */
  18690. const SCAN_FAST_THRESHOLD = 60;
  18691. /** Minimum number of accepted matches for us to move out from the scanning state */
  18692. const SCAN_MIN_MATCHES = 20; //30;
  18693. /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
  18694. const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
  18695. /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
  18696. const SCAN_RANSAC_REPROJECTIONERROR = 5;
  18697. /** Number of tables used in the LSH-based keypoint matching */
  18698. const SCAN_LSH_TABLES = 8; // up to 32
  18699. /** Hash size, in bits, used in the LSH-based keypoint matching */
  18700. const SCAN_LSH_HASHSIZE = 15; // up to 16
  18701. /** Use the Nightvision filter when in the scanning/training state? */
  18702. const SCAN_WITH_NIGHTVISION = true;
  18703. /** Nightvision filter: gain */
  18704. const NIGHTVISION_GAIN = 0.3; // 0.2;
  18705. /** Nightvision filter: offset */
  18706. const NIGHTVISION_OFFSET = 0.5;
  18707. /** Nightvision filter: decay */
  18708. const NIGHTVISION_DECAY = 0.0;
  18709. /** Nightvision filter: quality level */
  18710. const NIGHTVISION_QUALITY = 'low';
  18711. /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
  18712. const ORB_GAUSSIAN_KSIZE = 9;
  18713. /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
  18714. const ORB_GAUSSIAN_SIGMA = 2.0;
  18715. /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
  18716. const SUBPIXEL_GAUSSIAN_KSIZE = 5;
  18717. /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
  18718. const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
  18719. /** Subpixel refinement method */
  18720. const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
  18721. /** Minimum acceptable number of matched keypoints when in the tracking state */
  18722. const TRACK_MIN_MATCHES = 4; //10; //20;
  18723. /** Maximum number of keypoints to be analyzed in the tracking state */
  18724. const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
  18725. /** Capacity of the keypoint detector used in the the tracking state */
  18726. const TRACK_DETECTOR_CAPACITY = 2048; //4096;
  18727. /** Quality of the Harris/Shi-Tomasi corner detector */
  18728. const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
  18729. /** Use the Nightvision filter when in the tracking state? */
  18730. const TRACK_WITH_NIGHTVISION = false; // produces shaking?
  18731. /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
  18732. const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
  18733. /** Relative size (%) used to clip keypoints from the borders of the rectified image */
  18734. const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
  18735. /** Number of iterations used to refine the target image before tracking */
  18736. const TRACK_REFINEMENT_ITERATIONS = 3;
  18737. /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
  18738. const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
  18739. /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
  18740. const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
  18741. /** Used to identify the best maches */
  18742. const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
  18743. /** Number of consecutive frames in which we tolerate a "target lost" situation */
  18744. const TRACK_LOST_TOLERANCE = 10;
  18745. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
  18746. /*
  18747. * MARTINS.js
  18748. * GPU-accelerated Augmented Reality for the web
  18749. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18750. *
  18751. * This program is free software: you can redistribute it and/or modify
  18752. * it under the terms of the GNU Lesser General Public License as published
  18753. * by the Free Software Foundation, either version 3 of the License, or
  18754. * (at your option) any later version.
  18755. *
  18756. * This program is distributed in the hope that it will be useful,
  18757. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18758. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18759. * GNU Lesser General Public License for more details.
  18760. *
  18761. * You should have received a copy of the GNU Lesser General Public License
  18762. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18763. *
  18764. * state.ts
  18765. * Abstract state of the Image Tracker
  18766. */
  18767. /**
  18768. * Abstract state of the Image Tracker
  18769. */
  18770. class ImageTrackerState {
  18771. /**
  18772. * Constructor
  18773. * @param name
  18774. * @param imageTracker
  18775. */
  18776. constructor(name, imageTracker) {
  18777. this._name = name;
  18778. this._imageTracker = imageTracker;
  18779. this._pipeline = this._createPipeline();
  18780. }
  18781. /**
  18782. * State name
  18783. */
  18784. get name() {
  18785. return this._name;
  18786. }
  18787. /**
  18788. * AR screen size
  18789. */
  18790. get screenSize() {
  18791. const screen = this._pipeline.node('screen');
  18792. if (!screen)
  18793. throw new IllegalOperationError();
  18794. // this is available once this state has run at least once
  18795. return screen.size;
  18796. }
  18797. /**
  18798. * Initialize the state
  18799. */
  18800. init() {
  18801. }
  18802. /**
  18803. * Release resources
  18804. */
  18805. release() {
  18806. return this._pipeline.release();
  18807. }
  18808. /**
  18809. * Update the state
  18810. * @param media user media
  18811. * @param screenSize AR screen size for image processing
  18812. * @param state all states
  18813. * @returns promise
  18814. */
  18815. update(media, screenSize) {
  18816. const source = this._pipeline.node('source');
  18817. const screen = this._pipeline.node('screen');
  18818. // validate the pipeline
  18819. if (!source || !screen)
  18820. throw new IllegalOperationError();
  18821. // prepare the pipeline
  18822. source.media = media;
  18823. screen.size = screenSize;
  18824. // run the pipeline
  18825. return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
  18826. }
  18827. /**
  18828. * Called as soon as this becomes the active state, just before update() runs for the first time
  18829. * @param settings
  18830. */
  18831. onEnterState(settings) {
  18832. }
  18833. /**
  18834. * Called when leaving the state, after update()
  18835. */
  18836. onLeaveState() {
  18837. }
  18838. /**
  18839. * Called just before the GPU processing
  18840. * @returns promise
  18841. */
  18842. _beforeUpdate() {
  18843. return speedy_vision_default().Promise.resolve();
  18844. }
  18845. /**
  18846. * GPU processing
  18847. * @returns promise with the pipeline results
  18848. */
  18849. _gpuUpdate() {
  18850. return this._pipeline.run();
  18851. }
  18852. //
  18853. // Some utility methods common to various states
  18854. //
  18855. /**
  18856. * Find the coordinates of a polyline surrounding the target image
  18857. * @param homography maps the target image to the AR screen
  18858. * @param targetSize size of the target space
  18859. * @returns promise that resolves to 4 points in AR screen space
  18860. */
  18861. _findPolylineCoordinates(homography, targetSize) {
  18862. const w = targetSize.width, h = targetSize.height;
  18863. const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
  18864. 0, 0,
  18865. w, 0,
  18866. w, h,
  18867. 0, h,
  18868. ]);
  18869. const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
  18870. return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
  18871. }
  18872. /**
  18873. * Find a polyline surrounding the target image
  18874. * @param homography maps the target image to the AR screen
  18875. * @param targetSize size of the target space
  18876. * @returns promise that resolves to 4 points in AR screen space
  18877. */
  18878. _findPolyline(homography, targetSize) {
  18879. return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
  18880. const polydata = polylineCoordinates.read();
  18881. const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
  18882. return polyline;
  18883. });
  18884. }
  18885. /**
  18886. * Whether or not to rotate the warped image in order to best fit the AR screen
  18887. * @param media media associated with the reference image
  18888. * @param screenSize AR screen
  18889. * @returns boolean
  18890. */
  18891. _mustRotateWarpedImage(media, screenSize) {
  18892. const screenAspectRatio = screenSize.width / screenSize.height;
  18893. const mediaAspectRatio = media.width / media.height;
  18894. const eps = 0.1;
  18895. return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
  18896. }
  18897. /**
  18898. * Find a rectification matrix to be applied to an image fitting the entire AR screen
  18899. * @param media media associated with the reference image
  18900. * @param screenSize AR screen
  18901. * @returns promise that resolves to a rectification matrix
  18902. */
  18903. _findRectificationMatrixOfFullscreenImage(media, screenSize) {
  18904. const b = TRACK_RECTIFIED_BORDER;
  18905. const sw = screenSize.width, sh = screenSize.height;
  18906. const mediaAspectRatio = media.width / media.height;
  18907. const mustRotate = this._mustRotateWarpedImage(media, screenSize);
  18908. // compute the vertices of the target in screen space
  18909. // we suppose portrait or landscape mode for both screen & media
  18910. const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
  18911. const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
  18912. const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
  18913. const right = sw - left;
  18914. const bottom = sh - top;
  18915. const targetVertices = speedy_vision_default().Matrix(2, 4, [
  18916. left, top,
  18917. right, top,
  18918. right, bottom,
  18919. left, bottom,
  18920. ]);
  18921. const screenVertices = speedy_vision_default().Matrix(2, 4, [
  18922. 0, 0,
  18923. sw, 0,
  18924. sw, sh,
  18925. 0, sh
  18926. ]);
  18927. const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
  18928. const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
  18929. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  18930. return (mustRotate ? speedy_vision_default().Matrix.perspective(
  18931. // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
  18932. preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
  18933. // alignment: align the target to the center of the screen
  18934. speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
  18935. // pre-rectify and then align
  18936. rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
  18937. }
  18938. /**
  18939. * Find a rectification matrix to be applied to the target image
  18940. * @param homography maps a reference image to the AR screen
  18941. * @param targetSize size of the target space
  18942. * @param media media associated with the reference image
  18943. * @param screenSize AR screen
  18944. * @returns promise that resolves to a rectification matrix
  18945. */
  18946. _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
  18947. const sw = screenSize.width, sh = screenSize.height;
  18948. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  18949. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  18950. return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
  18951. // from target space to (full)screen
  18952. speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
  18953. // from (full)screen to rectified coordinates
  18954. this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
  18955. // function composition
  18956. rectificationMatrix.setTo(mat.times(rectificationMatrix)));
  18957. }
  18958. }
  18959. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
  18960. /*
  18961. * MARTINS.js
  18962. * GPU-accelerated Augmented Reality for the web
  18963. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18964. *
  18965. * This program is free software: you can redistribute it and/or modify
  18966. * it under the terms of the GNU Lesser General Public License as published
  18967. * by the Free Software Foundation, either version 3 of the License, or
  18968. * (at your option) any later version.
  18969. *
  18970. * This program is distributed in the hope that it will be useful,
  18971. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18972. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18973. * GNU Lesser General Public License for more details.
  18974. *
  18975. * You should have received a copy of the GNU Lesser General Public License
  18976. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18977. *
  18978. * initial.ts
  18979. * Initial state of the Image Tracker
  18980. */
  18981. /**
  18982. * The purpose of the initial state of the Image Tracker
  18983. * is to initialize the training state using the state machine
  18984. */
  18985. class ImageTrackerInitialState extends ImageTrackerState {
  18986. /**
  18987. * Constructor
  18988. * @param imageTracker
  18989. */
  18990. constructor(imageTracker) {
  18991. super('initial', imageTracker);
  18992. }
  18993. /**
  18994. * Called just before the GPU processing
  18995. * @returns promise
  18996. */
  18997. _beforeUpdate() {
  18998. const source = this._pipeline.node('source');
  18999. const media = source.media;
  19000. const mediaSize = media.size;
  19001. if (mediaSize.area() < this.screenSize.area())
  19002. Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
  19003. return speedy_vision_default().Promise.resolve();
  19004. }
  19005. /**
  19006. * Post processing that takes place just after the GPU processing
  19007. * @param result pipeline results
  19008. * @returns state output
  19009. */
  19010. _afterUpdate(result) {
  19011. return speedy_vision_default().Promise.resolve({
  19012. nextState: 'training',
  19013. trackerOutput: {},
  19014. });
  19015. }
  19016. /**
  19017. * Create & setup the pipeline
  19018. * @returns pipeline
  19019. */
  19020. _createPipeline() {
  19021. // this pipeline does nothing useful,
  19022. // but it does preload some shaders...
  19023. const pipeline = speedy_vision_default().Pipeline();
  19024. const source = speedy_vision_default().Image.Source('source');
  19025. const screen = speedy_vision_default().Transform.Resize('screen');
  19026. const greyscale = speedy_vision_default().Filter.Greyscale();
  19027. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
  19028. const nightvision = speedy_vision_default().Filter.Nightvision();
  19029. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  19030. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  19031. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19032. const blur = speedy_vision_default().Filter.GaussianBlur();
  19033. const clipper = speedy_vision_default().Keypoint.Clipper();
  19034. const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
  19035. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19036. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19037. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  19038. const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
  19039. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
  19040. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
  19041. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  19042. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
  19043. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  19044. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
  19045. source.media = null;
  19046. screen.size = speedy_vision_default().Size(0, 0);
  19047. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19048. nightvision.quality = NIGHTVISION_QUALITY;
  19049. subpixel.method = SUBPIXEL_METHOD;
  19050. //borderClipper.imageSize = screen.size;
  19051. borderClipper.imageSize = speedy_vision_default().Size(100, 100);
  19052. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  19053. matcher.k = 1; //2;
  19054. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19055. keypointPortalSource.source = keypointPortalSink;
  19056. muxOfReferenceKeypoints.port = 0;
  19057. muxOfBufferOfReferenceKeypoints.port = 0;
  19058. bufferOfReferenceKeypoints.frozen = false;
  19059. keypointSink.turbo = false;
  19060. // prepare input
  19061. source.output().connectTo(screen.input());
  19062. screen.output().connectTo(greyscale.input());
  19063. // preprocess images
  19064. greyscale.output().connectTo(imageRectifier.input());
  19065. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  19066. imageRectifier.output().connectTo(nightvision.input());
  19067. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19068. nightvisionMux.output().connectTo(blur.input());
  19069. // keypoint detection & clipping
  19070. nightvisionMux.output().connectTo(detector.input());
  19071. detector.output().connectTo(borderClipper.input());
  19072. borderClipper.output().connectTo(clipper.input());
  19073. // keypoint refinement
  19074. imageRectifier.output().connectTo(denoiser.input());
  19075. denoiser.output().connectTo(subpixel.input('image'));
  19076. clipper.output().connectTo(subpixel.input('keypoints'));
  19077. // keypoint description
  19078. blur.output().connectTo(descriptor.input('image'));
  19079. subpixel.output().connectTo(descriptor.input('keypoints'));
  19080. // keypoint matching
  19081. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  19082. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  19083. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  19084. descriptor.output().connectTo(matcher.input('keypoints'));
  19085. // store reference keypoints
  19086. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  19087. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  19088. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  19089. // portals
  19090. descriptor.output().connectTo(keypointPortalSink.input());
  19091. // prepare output
  19092. descriptor.output().connectTo(keypointRectifier.input());
  19093. keypointRectifier.output().connectTo(keypointSink.input());
  19094. matcher.output().connectTo(keypointSink.input('matches'));
  19095. // done!
  19096. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  19097. /*
  19098. const run = pipeline.run.bind(pipeline);
  19099. pipeline.run = function() {
  19100. console.time("TIME");
  19101. return run().then(x => {
  19102. console.timeEnd("TIME");
  19103. return x;
  19104. });
  19105. };
  19106. */
  19107. return pipeline;
  19108. }
  19109. }
  19110. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
  19111. /*
  19112. * MARTINS.js
  19113. * GPU-accelerated Augmented Reality for the web
  19114. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19115. *
  19116. * This program is free software: you can redistribute it and/or modify
  19117. * it under the terms of the GNU Lesser General Public License as published
  19118. * by the Free Software Foundation, either version 3 of the License, or
  19119. * (at your option) any later version.
  19120. *
  19121. * This program is distributed in the hope that it will be useful,
  19122. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19123. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19124. * GNU Lesser General Public License for more details.
  19125. *
  19126. * You should have received a copy of the GNU Lesser General Public License
  19127. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19128. *
  19129. * training.ts
  19130. * Training state of the Image Tracker
  19131. */
  19132. /**
  19133. * Training state of the Image Tracker
  19134. */
  19135. class ImageTrackerTrainingState extends ImageTrackerState {
  19136. /**
  19137. * Constructor
  19138. * @param imageTracker
  19139. */
  19140. constructor(imageTracker) {
  19141. super('training', imageTracker);
  19142. this._currentImageIndex = 0;
  19143. this._image = [];
  19144. // initialize the training map
  19145. this._trainingMap = {
  19146. referenceImageIndex: [],
  19147. referenceImage: [],
  19148. keypoints: []
  19149. };
  19150. }
  19151. /**
  19152. * Called as soon as this becomes the active state, just before update() runs for the first time
  19153. * @param settings
  19154. */
  19155. onEnterState(settings) {
  19156. const database = this._imageTracker.database;
  19157. // validate
  19158. if (database.count == 0)
  19159. throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
  19160. // prepare to train...
  19161. this._currentImageIndex = 0;
  19162. this._image.length = 0;
  19163. this._trainingMap.referenceImageIndex.length = 0;
  19164. this._trainingMap.referenceImage.length = 0;
  19165. this._trainingMap.keypoints.length = 0;
  19166. // lock the database
  19167. Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
  19168. database._lock();
  19169. // collect all images
  19170. for (const referenceImage of database)
  19171. this._image.push(referenceImage);
  19172. }
  19173. /**
  19174. * Called just before the GPU processing
  19175. * @returns promise
  19176. */
  19177. _beforeUpdate() {
  19178. const arScreenSize = this.screenSize;
  19179. const source = this._pipeline.node('source');
  19180. const screen = this._pipeline.node('screen');
  19181. const keypointScaler = this._pipeline.node('keypointScaler');
  19182. // this shouldn't happen
  19183. if (this._currentImageIndex >= this._image.length)
  19184. return speedy_vision_default().Promise.reject(new IllegalOperationError());
  19185. // set the appropriate training media
  19186. const database = this._imageTracker.database;
  19187. const referenceImage = this._image[this._currentImageIndex];
  19188. const media = database._findMedia(referenceImage.name);
  19189. source.media = media;
  19190. // compute the appropriate size of the training image space
  19191. const resolution = this._imageTracker.resolution;
  19192. const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
  19193. const aspectRatioOfTrainingImage = media.width / media.height;
  19194. /*
  19195. let sin = 0, cos = 1;
  19196. if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
  19197. // training image and source video: both in landscape mode or both in portrait mode
  19198. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  19199. screen.size.width = Math.round(screen.size.width * scale);
  19200. screen.size.height = Math.round(screen.size.height * scale);
  19201. }
  19202. else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
  19203. // training image: portrait mode; source video: landscape mode
  19204. screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
  19205. screen.size.width = Math.round(screen.size.width * scale);
  19206. screen.size.height = Math.round(screen.size.height * scale);
  19207. sin = 1; cos = 0; // rotate 90deg
  19208. }
  19209. else {
  19210. // training image: landscape mode; source video: portrait mode
  19211. }
  19212. */
  19213. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  19214. screen.size.width = Math.round(screen.size.width * scale);
  19215. screen.size.height = Math.round(screen.size.height * scale);
  19216. // convert keypoints from the training image space to AR screen space
  19217. // let's pretend that trained keypoints belong to the AR screen space,
  19218. // regardless of the size of the target image. This will make things
  19219. // easier when computing the homography.
  19220. /*
  19221. const sw = arScreenSize.width / screen.size.width;
  19222. const sh = arScreenSize.height / screen.size.height;
  19223. */
  19224. const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
  19225. const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
  19226. keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
  19227. sw, 0, 0,
  19228. 0, sh, 0,
  19229. 0, 0, 1,
  19230. ]);
  19231. // log
  19232. Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
  19233. // done!
  19234. return speedy_vision_default().Promise.resolve();
  19235. }
  19236. /**
  19237. * Post processing that takes place just after the GPU processing
  19238. * @param result pipeline results
  19239. * @returns state output
  19240. */
  19241. _afterUpdate(result) {
  19242. const referenceImage = this._image[this._currentImageIndex];
  19243. const keypoints = result.keypoints;
  19244. const image = result.image;
  19245. // log
  19246. Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
  19247. // set the training map, so that we can map all keypoints of the current image to the current image
  19248. this._trainingMap.referenceImage.push(referenceImage);
  19249. for (let i = 0; i < keypoints.length; i++) {
  19250. this._trainingMap.keypoints.push(keypoints[i]);
  19251. this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
  19252. }
  19253. // the current image has been processed!
  19254. ++this._currentImageIndex;
  19255. // set output
  19256. if (this._currentImageIndex >= this._image.length) {
  19257. // finished training!
  19258. return speedy_vision_default().Promise.resolve({
  19259. //nextState: 'training',
  19260. nextState: 'scanning',
  19261. nextStateSettings: {
  19262. keypoints: this._trainingMap.keypoints,
  19263. },
  19264. trackerOutput: {},
  19265. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  19266. });
  19267. }
  19268. else {
  19269. // we're not done yet
  19270. return speedy_vision_default().Promise.resolve({
  19271. nextState: 'training',
  19272. trackerOutput: {},
  19273. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  19274. });
  19275. }
  19276. }
  19277. /**
  19278. * Create & setup the pipeline
  19279. * @returns pipeline
  19280. */
  19281. _createPipeline() {
  19282. const pipeline = speedy_vision_default().Pipeline();
  19283. const source = speedy_vision_default().Image.Source('source');
  19284. const screen = speedy_vision_default().Transform.Resize('screen');
  19285. const greyscale = speedy_vision_default().Filter.Greyscale();
  19286. const blur = speedy_vision_default().Filter.GaussianBlur();
  19287. const nightvision = speedy_vision_default().Filter.Nightvision();
  19288. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  19289. const pyramid = speedy_vision_default().Image.Pyramid();
  19290. const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
  19291. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19292. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19293. const blurredPyramid = speedy_vision_default().Image.Pyramid();
  19294. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19295. const clipper = speedy_vision_default().Keypoint.Clipper();
  19296. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  19297. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  19298. const imageSink = speedy_vision_default().Image.Sink('image');
  19299. source.media = null;
  19300. screen.size = speedy_vision_default().Size(0, 0);
  19301. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19302. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19303. nightvision.gain = NIGHTVISION_GAIN;
  19304. nightvision.offset = NIGHTVISION_OFFSET;
  19305. nightvision.decay = NIGHTVISION_DECAY;
  19306. nightvision.quality = NIGHTVISION_QUALITY;
  19307. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19308. detector.levels = SCAN_PYRAMID_LEVELS;
  19309. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  19310. detector.threshold = SCAN_FAST_THRESHOLD;
  19311. detector.capacity = 8192;
  19312. subpixel.method = SUBPIXEL_METHOD;
  19313. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  19314. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  19315. clipper.size = TRAIN_MAX_KEYPOINTS;
  19316. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  19317. keypointSink.turbo = false;
  19318. // prepare input
  19319. source.output().connectTo(screen.input());
  19320. screen.output().connectTo(greyscale.input());
  19321. // preprocess image
  19322. greyscale.output().connectTo(nightvisionMux.input('in0'));
  19323. greyscale.output().connectTo(nightvision.input());
  19324. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19325. nightvisionMux.output().connectTo(pyramid.input());
  19326. // keypoint detection
  19327. pyramid.output().connectTo(detector.input());
  19328. detector.output().connectTo(clipper.input());
  19329. // keypoint refinement
  19330. greyscale.output().connectTo(denoiser.input()); // reduce noise
  19331. denoiser.output().connectTo(blurredPyramid.input());
  19332. clipper.output().connectTo(subpixel.input('keypoints'));
  19333. blurredPyramid.output().connectTo(subpixel.input('image'));
  19334. // keypoint description
  19335. greyscale.output().connectTo(blur.input());
  19336. blur.output().connectTo(descriptor.input('image'));
  19337. clipper.output().connectTo(descriptor.input('keypoints'));
  19338. // prepare output
  19339. descriptor.output().connectTo(keypointScaler.input());
  19340. keypointScaler.output().connectTo(keypointSink.input());
  19341. nightvisionMux.output().connectTo(imageSink.input());
  19342. // done!
  19343. pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
  19344. return pipeline;
  19345. }
  19346. /**
  19347. * Get reference image
  19348. * @param keypointIndex -1 if not found
  19349. * @returns reference image
  19350. */
  19351. referenceImageOfKeypoint(keypointIndex) {
  19352. const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
  19353. if (imageIndex < 0)
  19354. return null;
  19355. return this._trainingMap.referenceImage[imageIndex];
  19356. }
  19357. /**
  19358. * Get reference image index
  19359. * @param keypointIndex -1 if not found
  19360. * @returns reference image index, or -1 if not found
  19361. */
  19362. referenceImageIndexOfKeypoint(keypointIndex) {
  19363. const n = this._trainingMap.referenceImageIndex.length;
  19364. if (keypointIndex < 0 || keypointIndex >= n)
  19365. return -1;
  19366. const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
  19367. if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
  19368. return -1;
  19369. return imageIndex;
  19370. }
  19371. /**
  19372. * Get keypoint of the trained set
  19373. * @param keypointIndex -1 if not found
  19374. * @returns a keypoint
  19375. */
  19376. referenceKeypoint(keypointIndex) {
  19377. if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
  19378. return null;
  19379. return this._trainingMap.keypoints[keypointIndex];
  19380. }
  19381. }
  19382. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
  19383. /*
  19384. * MARTINS.js
  19385. * GPU-accelerated Augmented Reality for the web
  19386. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19387. *
  19388. * This program is free software: you can redistribute it and/or modify
  19389. * it under the terms of the GNU Lesser General Public License as published
  19390. * by the Free Software Foundation, either version 3 of the License, or
  19391. * (at your option) any later version.
  19392. *
  19393. * This program is distributed in the hope that it will be useful,
  19394. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19395. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19396. * GNU Lesser General Public License for more details.
  19397. *
  19398. * You should have received a copy of the GNU Lesser General Public License
  19399. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19400. *
  19401. * scanning.ts
  19402. * Scanning state of the Image Tracker
  19403. */
  19404. /** Default target space size (used when training) */
  19405. const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  19406. /** Port of the portal multiplexer: get new data from the camera */
  19407. const PORT_CAMERA = 0;
  19408. /** Port of the portal multiplexer: get previously memorized data */
  19409. const PORT_MEMORY = 1;
  19410. /**
  19411. * Scanning state of the Image Tracker
  19412. */
  19413. class ImageTrackerScanningState extends ImageTrackerState {
  19414. /**
  19415. * Constructor
  19416. * @param imageTracker
  19417. */
  19418. constructor(imageTracker) {
  19419. super('scanning', imageTracker);
  19420. this._counter = 0;
  19421. this._bestScore = 0;
  19422. this._bestHomography = speedy_vision_default().Matrix.Eye(3);
  19423. }
  19424. /**
  19425. * Called as soon as this becomes the active state, just before update() runs for the first time
  19426. * @param settings
  19427. */
  19428. onEnterState(settings) {
  19429. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19430. const lshTables = this._pipeline.node('lshTables');
  19431. const keypoints = settings.keypoints;
  19432. // set attributes
  19433. this._counter = 0;
  19434. this._bestScore = 0;
  19435. // reset the image memorization circuit
  19436. imagePortalMux.port = PORT_CAMERA;
  19437. // prepare the keypoint matcher
  19438. if (keypoints !== undefined)
  19439. lshTables.keypoints = keypoints;
  19440. }
  19441. /**
  19442. * Post processing that takes place just after the GPU processing
  19443. * @param result pipeline results
  19444. * @returns state output
  19445. */
  19446. _afterUpdate(result) {
  19447. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19448. const keypoints = result.keypoints;
  19449. const matchedKeypoints = this._goodMatches(keypoints);
  19450. // tracker output
  19451. const trackerOutput = {
  19452. keypoints: keypoints,
  19453. screenSize: this.screenSize
  19454. };
  19455. // keep the last memorized image
  19456. imagePortalMux.port = PORT_MEMORY;
  19457. // have we found enough matches...?
  19458. if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
  19459. return this._findHomography(matchedKeypoints).then(([homography, score]) => {
  19460. // have we found the best homography so far?
  19461. if (score >= this._bestScore) {
  19462. // store it only if we'll be running the pipeline again
  19463. if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
  19464. this._bestScore = score;
  19465. this._bestHomography = homography;
  19466. // memorize the last image, corresponding to the best homography(*)
  19467. imagePortalMux.port = PORT_CAMERA;
  19468. /*
  19469. (*) technically speaking, this is not exactly the case. Since we're
  19470. using turbo to download the keypoints, there's a slight difference
  19471. between the data used to compute the homography and the last image.
  19472. Still, assuming continuity of the video stream, this logic is
  19473. good enough.
  19474. */
  19475. }
  19476. }
  19477. // find a polyline surrounding the target
  19478. return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
  19479. }).then(polyline => {
  19480. // continue a little longer in the scanning state
  19481. if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
  19482. return {
  19483. nextState: this.name,
  19484. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  19485. };
  19486. }
  19487. // this image should correspond to the best homography
  19488. const snapshot = this._pipeline.node('imagePortalSink');
  19489. // the reference image that we'll track
  19490. const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
  19491. // let's track the target!
  19492. return {
  19493. nextState: 'pre-tracking',
  19494. nextStateSettings: {
  19495. homography: this._bestHomography,
  19496. snapshot: snapshot,
  19497. referenceImage: referenceImage,
  19498. },
  19499. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  19500. };
  19501. }).catch(() => {
  19502. // continue in the scanning state
  19503. return {
  19504. nextState: this.name,
  19505. trackerOutput: trackerOutput,
  19506. };
  19507. });
  19508. }
  19509. else {
  19510. // not enough matches...!
  19511. this._counter = 0;
  19512. this._bestScore = 0;
  19513. }
  19514. // we'll continue to scan the scene
  19515. return speedy_vision_default().Promise.resolve({
  19516. nextState: this.name,
  19517. trackerOutput: trackerOutput,
  19518. });
  19519. }
  19520. /**
  19521. * Find "high quality" matches of a single reference image
  19522. * @param keypoints
  19523. * @returns high quality matches
  19524. */
  19525. _goodMatches(keypoints) {
  19526. const matchedKeypointsPerImageIndex = Object.create(null);
  19527. // filter "good matches"
  19528. for (let j = keypoints.length - 1; j >= 0; j--) {
  19529. const keypoint = keypoints[j];
  19530. if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
  19531. const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
  19532. // the best match should be "much better" than the second best match,
  19533. // which means that they are "distinct enough"
  19534. if (d1 <= SCAN_MATCH_RATIO * d2) {
  19535. const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
  19536. //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
  19537. //if(idx1 == idx2 && idx1 >= 0) {
  19538. if (idx1 >= 0) {
  19539. if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
  19540. matchedKeypointsPerImageIndex[idx1] = [];
  19541. matchedKeypointsPerImageIndex[idx1].push(keypoint);
  19542. }
  19543. }
  19544. }
  19545. }
  19546. // find the image with the most matches
  19547. let matchedKeypoints = [];
  19548. for (const imageIndex in matchedKeypointsPerImageIndex) {
  19549. if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
  19550. matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
  19551. }
  19552. // done!
  19553. return matchedKeypoints;
  19554. }
  19555. /**
  19556. * Find a homography matrix using matched keypoints
  19557. * @param matchedKeypoints "good" matches only
  19558. * @returns homography from reference image space to AR screen space & homography "quality" score
  19559. */
  19560. _findHomography(matchedKeypoints) {
  19561. const srcCoords = [];
  19562. const dstCoords = [];
  19563. // find matching coordinates of the keypoints
  19564. for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
  19565. const matchedKeypoint = matchedKeypoints[i];
  19566. const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
  19567. if (referenceKeypoint != null) {
  19568. srcCoords.push(referenceKeypoint.x);
  19569. srcCoords.push(referenceKeypoint.y);
  19570. dstCoords.push(matchedKeypoint.x);
  19571. dstCoords.push(matchedKeypoint.y);
  19572. }
  19573. else {
  19574. // this shouldn't happen
  19575. return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
  19576. }
  19577. }
  19578. // too few points?
  19579. const n = srcCoords.length / 2;
  19580. if (n < 4) {
  19581. return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
  19582. }
  19583. // compute a homography
  19584. const src = speedy_vision_default().Matrix(2, n, srcCoords);
  19585. const dst = speedy_vision_default().Matrix(2, n, dstCoords);
  19586. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  19587. const homography = speedy_vision_default().Matrix.Zeros(3);
  19588. return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
  19589. method: 'pransac',
  19590. reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
  19591. numberOfHypotheses: 512,
  19592. bundleSize: 128,
  19593. mask: mask,
  19594. }).then(homography => {
  19595. // check if this is a valid homography
  19596. const a00 = homography.at(0, 0);
  19597. if (Number.isNaN(a00))
  19598. throw new DetectionError(`Can't compute homography`);
  19599. // count the number of inliers
  19600. const inliers = mask.read();
  19601. let inlierCount = 0;
  19602. for (let i = inliers.length - 1; i >= 0; i--)
  19603. inlierCount += inliers[i];
  19604. const score = inlierCount / inliers.length;
  19605. // done!
  19606. return [homography, score];
  19607. });
  19608. }
  19609. /**
  19610. * Create & setup the pipeline
  19611. * @returns pipeline
  19612. */
  19613. _createPipeline() {
  19614. const pipeline = speedy_vision_default().Pipeline();
  19615. const source = speedy_vision_default().Image.Source('source');
  19616. const screen = speedy_vision_default().Transform.Resize('screen');
  19617. const greyscale = speedy_vision_default().Filter.Greyscale();
  19618. const blur = speedy_vision_default().Filter.GaussianBlur();
  19619. const nightvision = speedy_vision_default().Filter.Nightvision();
  19620. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  19621. const pyramid = speedy_vision_default().Image.Pyramid();
  19622. const detector = speedy_vision_default().Keypoint.Detector.FAST();
  19623. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19624. const clipper = speedy_vision_default().Keypoint.Clipper();
  19625. const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
  19626. const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
  19627. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  19628. const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
  19629. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  19630. const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
  19631. const imagePortalBuffer = speedy_vision_default().Image.Buffer();
  19632. const imagePortalCopy = speedy_vision_default().Transform.Resize();
  19633. //const imageSink = Speedy.Image.Sink('image');
  19634. source.media = null;
  19635. screen.size = speedy_vision_default().Size(0, 0);
  19636. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19637. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19638. nightvision.gain = NIGHTVISION_GAIN;
  19639. nightvision.offset = NIGHTVISION_OFFSET;
  19640. nightvision.decay = NIGHTVISION_DECAY;
  19641. nightvision.quality = NIGHTVISION_QUALITY;
  19642. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19643. detector.levels = SCAN_PYRAMID_LEVELS;
  19644. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  19645. detector.threshold = SCAN_FAST_THRESHOLD;
  19646. detector.capacity = 2048;
  19647. clipper.size = SCAN_MAX_KEYPOINTS;
  19648. lshTables.keypoints = [];
  19649. lshTables.numberOfTables = SCAN_LSH_TABLES;
  19650. lshTables.hashSize = SCAN_LSH_HASHSIZE;
  19651. knn.k = 2;
  19652. knn.quality = 'default';
  19653. //knn.quality = 'fastest';
  19654. imagePortalSource.source = imagePortalSink;
  19655. imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
  19656. imagePortalCopy.size = speedy_vision_default().Size(0, 0);
  19657. imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
  19658. keypointSink.turbo = true;
  19659. // prepare input
  19660. source.output().connectTo(screen.input());
  19661. screen.output().connectTo(greyscale.input());
  19662. // preprocess image
  19663. greyscale.output().connectTo(blur.input());
  19664. greyscale.output().connectTo(nightvisionMux.input('in0'));
  19665. greyscale.output().connectTo(nightvision.input());
  19666. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19667. nightvisionMux.output().connectTo(pyramid.input());
  19668. // keypoint detection
  19669. pyramid.output().connectTo(detector.input());
  19670. detector.output().connectTo(clipper.input());
  19671. // keypoint description
  19672. blur.output().connectTo(descriptor.input('image'));
  19673. clipper.output().connectTo(descriptor.input('keypoints'));
  19674. // keypoint matching
  19675. descriptor.output().connectTo(knn.input('keypoints'));
  19676. lshTables.output().connectTo(knn.input('lsh'));
  19677. // prepare output
  19678. clipper.output().connectTo(keypointSink.input());
  19679. knn.output().connectTo(keypointSink.input('matches'));
  19680. //pyramid.output().connectTo(imageSink.input());
  19681. // memorize image
  19682. source.output().connectTo(imagePortalBuffer.input());
  19683. imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
  19684. imagePortalSource.output().connectTo(imagePortalCopy.input());
  19685. imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
  19686. imagePortalMux.output().connectTo(imagePortalSink.input());
  19687. // done!
  19688. pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
  19689. return pipeline;
  19690. }
  19691. }
  19692. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
  19693. /*
  19694. * MARTINS.js
  19695. * GPU-accelerated Augmented Reality for the web
  19696. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19697. *
  19698. * This program is free software: you can redistribute it and/or modify
  19699. * it under the terms of the GNU Lesser General Public License as published
  19700. * by the Free Software Foundation, either version 3 of the License, or
  19701. * (at your option) any later version.
  19702. *
  19703. * This program is distributed in the hope that it will be useful,
  19704. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19705. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19706. * GNU Lesser General Public License for more details.
  19707. *
  19708. * You should have received a copy of the GNU Lesser General Public License
  19709. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19710. *
  19711. * pre-tracking.ts
  19712. * Pre-tracking state of the Image Tracker
  19713. */
  19714. /** Default target space size (used when training) */
  19715. const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  19716. /** Use the camera stream as the input of the pipeline */
  19717. const PORT_CAMERA_IMAGE = 1;
  19718. /** Use the reference image as the input of the pipeline */
  19719. const PORT_REFERENCE_IMAGE = 0;
  19720. /**
  19721. * The pre-tracking state of the Image Tracker is a new training
  19722. * phase for the particular, actual target we'll be tracking
  19723. */
  19724. class ImageTrackerPreTrackingState extends ImageTrackerState {
  19725. /**
  19726. * Constructor
  19727. * @param imageTracker
  19728. */
  19729. constructor(imageTracker) {
  19730. super('pre-tracking', imageTracker);
  19731. this._homography = speedy_vision_default().Matrix.Eye(3);
  19732. this._referenceImage = null;
  19733. this._step = 'read-reference-image';
  19734. this._referenceKeypoints = [];
  19735. this._iterations = 0;
  19736. }
  19737. /**
  19738. * Called as soon as this becomes the active state, just before update() runs for the first time
  19739. * @param settings
  19740. */
  19741. onEnterState(settings) {
  19742. const imagePortalSource = this._pipeline.node('imagePortalSource');
  19743. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  19744. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  19745. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  19746. const homography = settings.homography;
  19747. const referenceImage = settings.referenceImage;
  19748. const snapshot = settings.snapshot;
  19749. // this shouldn't happen
  19750. if (!referenceImage)
  19751. throw new TrackingError(`Can't track a null reference image`);
  19752. // set attributes
  19753. this._homography = homography;
  19754. this._referenceImage = referenceImage;
  19755. this._step = 'read-reference-image';
  19756. this._referenceKeypoints = [];
  19757. this._iterations = 0;
  19758. // setup the pipeline
  19759. imagePortalSource.source = snapshot;
  19760. muxOfReferenceKeypoints.port = 0;
  19761. muxOfBufferOfReferenceKeypoints.port = 0;
  19762. bufferOfReferenceKeypoints.frozen = false;
  19763. }
  19764. /**
  19765. * Called just before the GPU processing
  19766. * @returns promise
  19767. */
  19768. _beforeUpdate() {
  19769. const referenceImage = this._referenceImage;
  19770. const source = this._pipeline.node('source');
  19771. const sourceMux = this._pipeline.node('sourceMux');
  19772. const imageRectifier = this._pipeline.node('imageRectifier');
  19773. const keypointRectifier = this._pipeline.node('keypointRectifier');
  19774. const borderClipper = this._pipeline.node('borderClipper');
  19775. const screenSize = this.screenSize;
  19776. // set the source media to the reference image we're going to track
  19777. const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
  19778. source.media = targetMedia;
  19779. // setup the source multiplexer
  19780. if (this._step == 'read-reference-image')
  19781. sourceMux.port = PORT_REFERENCE_IMAGE;
  19782. else
  19783. sourceMux.port = PORT_CAMERA_IMAGE;
  19784. // clip keypoints from the borders of the target image
  19785. borderClipper.imageSize = screenSize;
  19786. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  19787. // rectify the image
  19788. const rectify = (this._step == 'read-reference-image') ?
  19789. this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
  19790. this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
  19791. return rectify.then(rectificationMatrix => {
  19792. imageRectifier.transform = rectificationMatrix;
  19793. });
  19794. }
  19795. /**
  19796. * Post processing that takes place just after the GPU processing
  19797. * @param result pipeline results
  19798. * @returns state output
  19799. */
  19800. _afterUpdate(result) {
  19801. const referenceImage = this._referenceImage;
  19802. const imagePortalSink = this._pipeline.node('imagePortal');
  19803. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  19804. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  19805. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  19806. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  19807. const keypoints = result.keypoints;
  19808. const image = result.image;
  19809. // tracker output
  19810. const trackerOutput = {
  19811. keypoints: image !== undefined ? keypoints : undefined,
  19812. image: image,
  19813. screenSize: this.screenSize,
  19814. };
  19815. // decide what to do next
  19816. switch (this._step) {
  19817. case 'read-reference-image': {
  19818. // enable matching
  19819. muxOfReferenceKeypoints.port = 1;
  19820. // store reference keypoints
  19821. this._referenceKeypoints = keypoints;
  19822. // next step
  19823. this._step = 'warp-camera-image';
  19824. return speedy_vision_default().Promise.resolve({
  19825. nextState: 'pre-tracking',
  19826. trackerOutput: trackerOutput,
  19827. });
  19828. }
  19829. case 'warp-camera-image': {
  19830. // freeze reference keypoints
  19831. bufferOfReferenceKeypoints.frozen = true;
  19832. muxOfBufferOfReferenceKeypoints.port = 1;
  19833. // refine warp?
  19834. if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
  19835. this._step = 'warp-camera-image';
  19836. else
  19837. this._step = 'train-camera-image';
  19838. // warp image & go to next step
  19839. return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
  19840. nextState: 'pre-tracking',
  19841. trackerOutput: trackerOutput,
  19842. })).catch(err => {
  19843. Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
  19844. return {
  19845. nextState: 'scanning',
  19846. trackerOutput: trackerOutput,
  19847. };
  19848. });
  19849. }
  19850. case 'train-camera-image': {
  19851. // log
  19852. Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
  19853. // change the coordinates
  19854. return this._changeSpace(this._homography, this.screenSize).then(homography => {
  19855. // we're ready to track the target!
  19856. return speedy_vision_default().Promise.resolve({
  19857. //nextState: 'pre-tracking',
  19858. nextState: 'tracking',
  19859. trackerOutput: trackerOutput,
  19860. nextStateSettings: {
  19861. homography: homography,
  19862. referenceImage: referenceImage,
  19863. templateKeypoints: keypoints,
  19864. keypointPortalSink: keypointPortalSink,
  19865. imagePortalSink: imagePortalSink,
  19866. screenSize: this.screenSize,
  19867. },
  19868. });
  19869. });
  19870. }
  19871. }
  19872. }
  19873. /**
  19874. * Find an adjustment warp between the camera image and the reference image
  19875. * @param dstKeypoints destination
  19876. * @param srcKeypoints source
  19877. * @returns a promise that resolves to a 3x3 homography
  19878. */
  19879. _findWarp(dstKeypoints, srcKeypoints) {
  19880. //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
  19881. const srcCoords = [];
  19882. const dstCoords = [];
  19883. // find matching coordinates of the keypoints
  19884. for (let i = 0; i < dstKeypoints.length; i++) {
  19885. const dstKeypoint = dstKeypoints[i];
  19886. if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
  19887. const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
  19888. // the best match should be "much better" than the second best match,
  19889. // which means that they are "distinct enough"
  19890. if (d1 <= TRACK_MATCH_RATIO * d2) {
  19891. const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
  19892. srcCoords.push(srcKeypoint.x);
  19893. srcCoords.push(srcKeypoint.y);
  19894. dstCoords.push(dstKeypoint.x);
  19895. dstCoords.push(dstKeypoint.y);
  19896. }
  19897. }
  19898. }
  19899. // too few points?
  19900. const n = srcCoords.length / 2;
  19901. if (n < 4) {
  19902. return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
  19903. }
  19904. // compute warp
  19905. const model = speedy_vision_default().Matrix.Eye(3);
  19906. return this._findKeypointWarp().then(transform =>
  19907. // rectify keypoints
  19908. speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
  19909. // find warp
  19910. speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
  19911. method: 'pransac',
  19912. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  19913. numberOfHypotheses: 512 * 4,
  19914. bundleSize: 128,
  19915. })).then(_ => {
  19916. // validate the model
  19917. const a00 = model.at(0, 0);
  19918. if (Number.isNaN(a00))
  19919. throw new TrackingError(`Can't compute warp: bad keypoints`);
  19920. // done!
  19921. return model;
  19922. });
  19923. }
  19924. /**
  19925. * Find a warp to be applied to the keypoints
  19926. * @returns affine transform
  19927. */
  19928. _findKeypointWarp() {
  19929. const referenceImage = this._referenceImage;
  19930. const media = this._imageTracker.database._findMedia(referenceImage.name);
  19931. const screenSize = this.screenSize;
  19932. // no rotation is needed
  19933. if (!this._mustRotateWarpedImage(media, screenSize))
  19934. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
  19935. // rotate by 90 degrees clockwise around the pivot
  19936. const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
  19937. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
  19938. 0, 1, 0,
  19939. -1, 0, 0,
  19940. py + px, py - px, 1,
  19941. ]));
  19942. }
  19943. /**
  19944. * Change the space of the homography in order to improve tracking quality
  19945. * @param homography mapping coordinates from normalized target space to AR screen space
  19946. * @param screenSize AR screen size
  19947. * @returns homography mapping coordinates from AR screen space to AR screen space
  19948. */
  19949. _changeSpace(homography, screenSize) {
  19950. const sw = screenSize.width, sh = screenSize.height;
  19951. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  19952. const mat = speedy_vision_default().Matrix.Zeros(3);
  19953. return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
  19954. }
  19955. /**
  19956. * Create & setup the pipeline
  19957. * @returns pipeline
  19958. */
  19959. _createPipeline() {
  19960. const pipeline = speedy_vision_default().Pipeline();
  19961. const source = speedy_vision_default().Image.Source('source');
  19962. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  19963. const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
  19964. const screen = speedy_vision_default().Transform.Resize('screen');
  19965. const greyscale = speedy_vision_default().Filter.Greyscale();
  19966. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  19967. const nightvision = speedy_vision_default().Filter.Nightvision();
  19968. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  19969. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  19970. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19971. const blur = speedy_vision_default().Filter.GaussianBlur();
  19972. const clipper = speedy_vision_default().Keypoint.Clipper();
  19973. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  19974. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19975. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19976. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  19977. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  19978. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  19979. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  19980. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
  19981. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
  19982. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
  19983. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  19984. const imageSink = speedy_vision_default().Image.Sink('image');
  19985. source.media = null;
  19986. screen.size = speedy_vision_default().Size(0, 0);
  19987. imagePortalSource.source = null;
  19988. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19989. sourceMux.port = PORT_REFERENCE_IMAGE;
  19990. nightvision.gain = NIGHTVISION_GAIN;
  19991. nightvision.offset = NIGHTVISION_OFFSET;
  19992. nightvision.decay = NIGHTVISION_DECAY;
  19993. nightvision.quality = NIGHTVISION_QUALITY;
  19994. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19995. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19996. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19997. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  19998. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  19999. detector.quality = TRACK_HARRIS_QUALITY;
  20000. detector.capacity = TRACK_DETECTOR_CAPACITY;
  20001. subpixel.method = SUBPIXEL_METHOD;
  20002. clipper.size = TRACK_MAX_KEYPOINTS;
  20003. borderClipper.imageSize = screen.size;
  20004. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  20005. matcher.k = 2;
  20006. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20007. keypointPortalSource.source = keypointPortalSink;
  20008. muxOfReferenceKeypoints.port = 0;
  20009. muxOfBufferOfReferenceKeypoints.port = 0;
  20010. bufferOfReferenceKeypoints.frozen = false;
  20011. keypointSink.turbo = false;
  20012. // prepare input
  20013. source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
  20014. imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
  20015. sourceMux.output().connectTo(screen.input());
  20016. screen.output().connectTo(greyscale.input());
  20017. // preprocess images
  20018. greyscale.output().connectTo(imageRectifier.input());
  20019. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  20020. imageRectifier.output().connectTo(nightvision.input());
  20021. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20022. nightvisionMux.output().connectTo(blur.input());
  20023. // keypoint detection & clipping
  20024. nightvisionMux.output().connectTo(detector.input());
  20025. detector.output().connectTo(borderClipper.input());
  20026. borderClipper.output().connectTo(clipper.input());
  20027. // keypoint refinement
  20028. imageRectifier.output().connectTo(denoiser.input());
  20029. denoiser.output().connectTo(subpixel.input('image'));
  20030. clipper.output().connectTo(subpixel.input('keypoints'));
  20031. // keypoint description
  20032. blur.output().connectTo(descriptor.input('image'));
  20033. subpixel.output().connectTo(descriptor.input('keypoints'));
  20034. // keypoint matching
  20035. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  20036. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  20037. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  20038. descriptor.output().connectTo(matcher.input('keypoints'));
  20039. // store reference keypoints
  20040. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  20041. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  20042. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  20043. // portals
  20044. descriptor.output().connectTo(keypointPortalSink.input());
  20045. // prepare output
  20046. descriptor.output().connectTo(keypointRectifier.input());
  20047. keypointRectifier.output().connectTo(keypointSink.input());
  20048. matcher.output().connectTo(keypointSink.input('matches'));
  20049. //imageRectifier.output().connectTo(imageSink.input());
  20050. // done!
  20051. pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  20052. return pipeline;
  20053. }
  20054. }
  20055. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
  20056. /*
  20057. * MARTINS.js
  20058. * GPU-accelerated Augmented Reality for the web
  20059. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20060. *
  20061. * This program is free software: you can redistribute it and/or modify
  20062. * it under the terms of the GNU Lesser General Public License as published
  20063. * by the Free Software Foundation, either version 3 of the License, or
  20064. * (at your option) any later version.
  20065. *
  20066. * This program is distributed in the hope that it will be useful,
  20067. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20068. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20069. * GNU Lesser General Public License for more details.
  20070. *
  20071. * You should have received a copy of the GNU Lesser General Public License
  20072. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20073. *
  20074. * image-tracker-event.ts
  20075. * Events emitted by an Image Tracker
  20076. */
  20077. /**
  20078. * An event emitted by an Image Tracker
  20079. */
  20080. class ImageTrackerEvent extends AREvent {
  20081. /**
  20082. * Constructor
  20083. * @param type event type
  20084. * @param referenceImage optional reference image
  20085. */
  20086. constructor(type, referenceImage) {
  20087. super(type);
  20088. this._referenceImage = referenceImage;
  20089. }
  20090. /**
  20091. * Reference image
  20092. */
  20093. get referenceImage() {
  20094. return this._referenceImage;
  20095. }
  20096. }
  20097. ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
  20098. /*
  20099. * MARTINS.js
  20100. * GPU-accelerated Augmented Reality for the web
  20101. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20102. *
  20103. * This program is free software: you can redistribute it and/or modify
  20104. * it under the terms of the GNU Lesser General Public License as published
  20105. * by the Free Software Foundation, either version 3 of the License, or
  20106. * (at your option) any later version.
  20107. *
  20108. * This program is distributed in the hope that it will be useful,
  20109. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20110. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20111. * GNU Lesser General Public License for more details.
  20112. *
  20113. * You should have received a copy of the GNU Lesser General Public License
  20114. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20115. *
  20116. * camera-model.ts
  20117. * Camera model
  20118. */
  20119. /** Number of samples we'll be keeping to help calibrate the camera */
  20120. const INTRISICS_SAMPLES = 401; //201; //31; // odd number
  20121. /** Whether or not to auto-calibrate the camera */
  20122. const FOVY_AUTODETECT = false; //true;
  20123. /** A guess of the vertical field-of-view of a generic camera, in degrees */
  20124. const FOVY_GUESS = 45; //50; // will be part of the viewing frustum
  20125. /** Number of iterations used to refine the estimated pose */
  20126. const POSE_ITERATIONS = 30;
  20127. /** Number of samples used in the rotation filter */
  20128. const ROTATION_FILTER_SAMPLES = 10;
  20129. /** Number of samples used in the translation filter */
  20130. const TRANSLATION_FILTER_SAMPLES = 10;
  20131. /** Convert degrees to radians */
  20132. const DEG2RAD = 0.017453292519943295; // pi / 180
  20133. /** Convert radians to degrees */
  20134. const RAD2DEG = 57.29577951308232; // 180 / pi
  20135. /** Numerical tolerance */
  20136. const EPSILON = 1e-6;
  20137. /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
  20138. const FX = 0;
  20139. /** Index of the vertical focal length in the camera intrinsics matrix */
  20140. const FY = 4;
  20141. /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
  20142. const U0 = 6;
  20143. /** Index of the vertical position of the principal point in the camera intrinsics matrix */
  20144. const V0 = 7;
  20145. /** Translation refinement: predefined buffers for efficiency */
  20146. const TRANSLATION_REFINEMENT_BUFFERS = (() => {
  20147. const l = 1.0;
  20148. const x = [0, l, 0, -l, 0];
  20149. const y = [-l, 0, l, 0, 0];
  20150. const n = x.length;
  20151. return Object.freeze({
  20152. x, y,
  20153. a1: new Array(n),
  20154. a2: new Array(n),
  20155. a3: new Array(n),
  20156. m: new Array(3 * n * 3),
  20157. v: new Array(3 * n),
  20158. t: new Array(3),
  20159. r: new Array(3 * n),
  20160. c: new Array(3),
  20161. Mc: new Array(3 * n),
  20162. });
  20163. })();
  20164. /** Translation refinement: number of iterations */
  20165. const TRANSLATION_REFINEMENT_ITERATIONS = 3; // 1; // 5;
  20166. /** Translation refinement: number of samples */
  20167. const TRANSLATION_REFINEMENT_SAMPLES = 5; // TRANSLATION_REFINEMENT_BUFFERS.x.length;
  20168. /** Translation refinement: the triple of the number of samples */
  20169. const TRANSLATION_REFINEMENT_SAMPLES_3X = 15; //3 * TRANSLATION_REFINEMENT_SAMPLES;
  20170. /**
  20171. * Camera model
  20172. */
  20173. class CameraModel {
  20174. /**
  20175. * Constructor
  20176. */
  20177. constructor() {
  20178. this._screenSize = speedy_vision_default().Size(0, 0);
  20179. this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
  20180. this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // identity matrix
  20181. this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // no rotation & no translation [ R | t ] = [ I | 0 ]
  20182. this._f = (new Array(INTRISICS_SAMPLES)).fill(this._intrinsics[FY]);
  20183. this._fp = 0;
  20184. this._partialRotationBuffer = [];
  20185. this._translationBuffer = [];
  20186. }
  20187. /**
  20188. * Initialize the model
  20189. * @param screenSize
  20190. */
  20191. init(screenSize) {
  20192. // validate
  20193. if (screenSize.area() == 0)
  20194. throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
  20195. // set the screen size
  20196. this._screenSize.width = screenSize.width;
  20197. this._screenSize.height = screenSize.height;
  20198. // reset the model
  20199. this._resetIntrinsics();
  20200. this._resetExtrinsics();
  20201. // log
  20202. Utils.log(`Initializing the camera model...`);
  20203. }
  20204. /**
  20205. * Release the model
  20206. */
  20207. release() {
  20208. this.reset();
  20209. return null;
  20210. }
  20211. /**
  20212. * Update the camera model
  20213. * @param homography 3x3 perspective transform
  20214. * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
  20215. * @returns promise that resolves to a camera matrix
  20216. */
  20217. update(homography, screenSize) {
  20218. // validate the shape of the homography
  20219. if (homography.rows != 3 || homography.columns != 3)
  20220. throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
  20221. // validate screenSize
  20222. if (screenSize.area() == 0)
  20223. throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
  20224. // changed screen size?
  20225. if (!this._screenSize.equals(screenSize)) {
  20226. Utils.log(`Camera model: detected a change in screen size...`);
  20227. // update the screen size
  20228. this._screenSize.width = screenSize.width;
  20229. this._screenSize.height = screenSize.height;
  20230. // reset camera
  20231. this.reset();
  20232. }
  20233. // read the entries of the homography
  20234. const h = homography.read();
  20235. const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
  20236. // validate the homography (homography matrices aren't singular)
  20237. const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
  20238. if (Math.abs(det) < EPSILON) {
  20239. Utils.warning(`Can't update the camera model using an invalid homography matrix`);
  20240. return speedy_vision_default().Promise.resolve(this._matrix);
  20241. }
  20242. // estimate the focal length (auto-calibration)
  20243. const f = this._estimateFocal(homography);
  20244. if (f > 0)
  20245. this._storeFocal(f);
  20246. //console.log(this.fovy * RAD2DEG);
  20247. // estimate the pose
  20248. const pose = this._estimatePose(homography);
  20249. this._storePose(pose);
  20250. // compute the camera matrix
  20251. const C = this.denormalizer();
  20252. const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
  20253. const E = speedy_vision_default().Matrix(3, 4, this._extrinsics);
  20254. this._matrix.setToSync(K.times(E).times(C));
  20255. //console.log("intrinsics -----------", K.toString());
  20256. //console.log("matrix ----------------",this._matrix.toString());
  20257. return speedy_vision_default().Promise.resolve(this._matrix);
  20258. }
  20259. /**
  20260. * Reset camera model
  20261. */
  20262. reset() {
  20263. this._resetIntrinsics();
  20264. this._resetExtrinsics();
  20265. }
  20266. /**
  20267. * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
  20268. * 2D AR screen space (measured in pixels)
  20269. * @returns 3x4 camera matrix
  20270. */
  20271. get matrix() {
  20272. return this._matrix;
  20273. }
  20274. /**
  20275. * Camera intrinsics matrix
  20276. * @returns 3x3 intrinsics matrix in column-major format
  20277. */
  20278. get intrinsics() {
  20279. return this._intrinsics;
  20280. }
  20281. /**
  20282. * Camera extrinsics matrix
  20283. * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
  20284. */
  20285. get extrinsics() {
  20286. return this._extrinsics;
  20287. }
  20288. /**
  20289. * Convert coordinates from normalized space [-1,1]^3 to a
  20290. * "3D pixel space" based on the dimensions of the AR screen.
  20291. *
  20292. * We perform a 180-degrees rotation around the x-axis so that
  20293. * it looks nicer (the y-axis grows downwards in image space).
  20294. *
  20295. * The final camera matrix is P = K * [ R | t ] * C, where
  20296. * C is this conversion matrix. The intent behind this is to
  20297. * make tracking independent of target and screen sizes.
  20298. *
  20299. * Reminder: we use a right-handed coordinate system in 3D!
  20300. * In 2D image space the coordinate system is left-handed.
  20301. *
  20302. * @returns 4x4 conversion matrix C
  20303. */
  20304. denormalizer() {
  20305. const w = this._screenSize.width / 2; // half width, in pixels
  20306. const h = this._screenSize.height / 2; // half height, in pixels
  20307. const d = Math.min(w, h); // virtual unit length, in pixels
  20308. /*
  20309. return Speedy.Matrix(4, 4, [
  20310. 1, 0, 0, 0,
  20311. 0,-1, 0, 0,
  20312. 0, 0,-1, 0,
  20313. w/d, h/d, 0, 1/d
  20314. ]);
  20315. */
  20316. return speedy_vision_default().Matrix(4, 4, [
  20317. d, 0, 0, 0,
  20318. 0, -d, 0, 0,
  20319. 0, 0, -d, 0,
  20320. w, h, 0, 1,
  20321. ]);
  20322. }
  20323. /**
  20324. * Size of the AR screen space, in pixels
  20325. * @returns size in pixels
  20326. */
  20327. get screenSize() {
  20328. return this._screenSize;
  20329. }
  20330. /**
  20331. * Focal length in pixel units (projection distance in the pinhole camera model)
  20332. * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
  20333. * @returns focal length
  20334. */
  20335. get focalLength() {
  20336. return this._intrinsics[FY]; // fx == fy
  20337. }
  20338. /**
  20339. * Horizontal field-of-view, given in radians
  20340. * @returns vertical field-of-view
  20341. */
  20342. get fovx() {
  20343. return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
  20344. }
  20345. /**
  20346. * Vertical field-of-view, given in radians
  20347. * @returns vertical field-of-view
  20348. */
  20349. get fovy() {
  20350. return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
  20351. }
  20352. /**
  20353. * Principal point
  20354. * @returns principal point, in pixel coordinates
  20355. */
  20356. principalPoint() {
  20357. return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
  20358. }
  20359. /**
  20360. * Reset camera extrinsics
  20361. */
  20362. _resetExtrinsics() {
  20363. // set the rotation matrix to the identity
  20364. this._extrinsics.fill(0);
  20365. this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
  20366. // reset filters
  20367. this._partialRotationBuffer.length = 0;
  20368. this._translationBuffer.length = 0;
  20369. }
  20370. /**
  20371. * Reset camera intrinsics
  20372. */
  20373. _resetIntrinsics() {
  20374. const u0 = this._screenSize.width / 2;
  20375. const v0 = this._screenSize.height / 2;
  20376. const f = v0 / Math.tan(DEG2RAD * FOVY_GUESS / 2);
  20377. this._intrinsics[FX] = f;
  20378. this._intrinsics[FY] = f;
  20379. this._intrinsics[U0] = u0;
  20380. this._intrinsics[V0] = v0;
  20381. this._f.fill(this._intrinsics[FY]);
  20382. this._fp = 0;
  20383. }
  20384. /**
  20385. * Estimate the focal length
  20386. * @param homography valid homography
  20387. * @returns estimated focal length, or 0 on error
  20388. */
  20389. _estimateFocal(homography) {
  20390. // auto-detect the focal length?
  20391. if (!FOVY_AUTODETECT)
  20392. return 0;
  20393. // read the entries of the homography
  20394. const h = homography.read();
  20395. const h11 = h[0], h12 = h[3]; //, h13 = h[6];
  20396. const h21 = h[1], h22 = h[4]; //, h23 = h[7];
  20397. const h31 = h[2], h32 = h[5]; //, h33 = h[8];
  20398. // read the principal point
  20399. const u0 = this._intrinsics[U0];
  20400. const v0 = this._intrinsics[V0];
  20401. // estimate the focal length based on the orthogonality
  20402. // constraint r1'r2 = 0 of a rotation matrix
  20403. const f2 = -((h11 / h31 - u0) * (h12 / h32 - u0) + (h21 / h31 - v0) * (h22 / h32 - v0));
  20404. // can't estimate it?
  20405. if (f2 < 0)
  20406. return this._intrinsics[FY];
  20407. //return 0;
  20408. // done!
  20409. return Math.sqrt(f2);
  20410. }
  20411. /**
  20412. * Store an estimated focal length
  20413. * @param f estimated focal length
  20414. */
  20415. _storeFocal(f) {
  20416. // store the focal length
  20417. this._f[this._fp] = f;
  20418. this._fp = (this._fp + 1) % INTRISICS_SAMPLES;
  20419. // take the median of the estimated focal lengths
  20420. const sorted = this._f.concat([]).sort((a, b) => a - b);
  20421. const median = sorted[sorted.length >>> 1];
  20422. // update the intrinsics matrix
  20423. this._intrinsics[FX] = this._intrinsics[FY] = median;
  20424. /*
  20425. // test
  20426. const u0 = this._intrinsics[U0];
  20427. const v0 = this._intrinsics[V0];
  20428. const fovx = 2 * Math.atan(u0 / median) * RAD2DEG;
  20429. const fovy = 2 * Math.atan(v0 / median) * RAD2DEG;
  20430. console.log('---------------');
  20431. console.log("fov:",fovx,fovy);
  20432. console.log("f:",median);
  20433. */
  20434. }
  20435. /**
  20436. * Compute a normalized homography H' = K^(-1) * H for an
  20437. * ideal pinhole with f = 1 and principal point = (0,0)
  20438. * @param homography homography H to be normalized
  20439. * @param f focal length
  20440. * @returns normalized homography H'
  20441. */
  20442. _normalizeHomography(homography, f = this._intrinsics[FY]) {
  20443. const h = homography.read();
  20444. const u0 = this._intrinsics[U0];
  20445. const v0 = this._intrinsics[V0];
  20446. const h11 = h[0] - u0 * h[2], h12 = h[3] - u0 * h[5], h13 = h[6] - u0 * h[8];
  20447. const h21 = h[1] - v0 * h[2], h22 = h[4] - v0 * h[5], h23 = h[7] - v0 * h[8];
  20448. const h31 = h[2] * f, h32 = h[5] * f, h33 = h[8] * f;
  20449. return speedy_vision_default().Matrix(3, 3, [
  20450. h11, h21, h31,
  20451. h12, h22, h32,
  20452. h13, h23, h33,
  20453. ]);
  20454. }
  20455. /**
  20456. * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
  20457. * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
  20458. * @returns a 3x3 matrix
  20459. */
  20460. _estimatePartialPose(normalizedHomography) {
  20461. const h = normalizedHomography.read();
  20462. const h11 = h[0], h12 = h[3], h13 = h[6];
  20463. const h21 = h[1], h22 = h[4], h23 = h[7];
  20464. const h31 = h[2], h32 = h[5], h33 = h[8];
  20465. // select the sign so that t3 = tz > 0
  20466. const sign = h33 >= 0 ? 1 : -1;
  20467. // compute the scale factor
  20468. const h1norm = Math.sqrt(h11 * h11 + h21 * h21 + h31 * h31);
  20469. const h2norm = Math.sqrt(h12 * h12 + h22 * h22 + h32 * h32);
  20470. //const scale = sign * 2 / (h1norm + h2norm);
  20471. //const scale = sign / h1norm;
  20472. //const scale = sign / h2norm;
  20473. const scale = sign / Math.max(h1norm, h2norm); // this seems to work. why?
  20474. // invalid homography?
  20475. if (Number.isNaN(scale))
  20476. return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
  20477. // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
  20478. // if h1norm is not approximately h2norm, it means that the first two columns of
  20479. // the normalized homography are not really encoding a rotation (up to a scale)
  20480. // what is causing this? does h3 (and h33) tell us anything about it?
  20481. // what about the intrinsics matrix? the principal point...? the fov...?
  20482. //console.log("h1,h2",h1norm,h2norm);
  20483. //console.log(normalizedHomography.toString());
  20484. // recover the translation and the rotation
  20485. const t1 = scale * h13;
  20486. const t2 = scale * h23;
  20487. const t3 = scale * h33;
  20488. const r11 = scale * h11;
  20489. const r21 = scale * h21;
  20490. const r31 = scale * h31;
  20491. const r12 = scale * h12;
  20492. const r22 = scale * h22;
  20493. const r32 = scale * h32;
  20494. // refine the pose
  20495. const r = this._refineRotation(r11, r21, r31, r12, r22, r32);
  20496. const t = this._refineTranslation(normalizedHomography, r, [t1, t2, t3]);
  20497. //const t = [t1, t2, t3]; // faster, but less accurate
  20498. // done!
  20499. return speedy_vision_default().Matrix(3, 3, r.concat(t)); // this is possibly NaN... why? homography...
  20500. }
  20501. /**
  20502. * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
  20503. * @param r11 x of r1
  20504. * @param r21 y of r1
  20505. * @param r31 z of r1
  20506. * @param r12 x of r2
  20507. * @param r22 y of r2
  20508. * @param r32 z of r2
  20509. * @returns a 3x2 matrix R such that R'R = I (column-major format)
  20510. */
  20511. _refineRotation(r11, r21, r31, r12, r22, r32) {
  20512. /*
  20513. A little technique I figured out to correct the rotation vectors
  20514. ----------------------------------------------------------------
  20515. We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
  20516. R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
  20517. because vectors r1 and r2 are not perfectly orthonormal due to noise.
  20518. Let's first notice that R'R is symmetric. You can easily check that its
  20519. two eigenvalues are both real and positive (as long as r1, r2 != 0 and
  20520. r1 is not parallel to r2, but we never take such vectors as input).
  20521. R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
  20522. [ r1'r2 r2'r2 ]
  20523. We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
  20524. chosen to be orthogonal and D is a diagonal matrix whose entries are
  20525. the eigenvalues of R'R.
  20526. Let LL' be the Cholesky decomposition of D. Such decomposition exists
  20527. and is trivially computed: just take the square roots of the entries of
  20528. D. Since L is diagonal, we have L = L'. Its inverse is also trivially
  20529. computed - call it Linv.
  20530. Now, define a 2x2 correction matrix C as follows:
  20531. C = Q * Linv * Q'
  20532. This matrix rotates the input vector, scales it by some amount, and
  20533. then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
  20534. We compute RC in order to correct the rotation vectors. We take its
  20535. two columns as the corrected vectors.
  20536. In order to show that the two columns of RC are orthonormal, we can
  20537. show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
  20538. expand the expression:
  20539. (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
  20540. Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
  20541. Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
  20542. I have provided below a closed formula to correct the rotation vectors.
  20543. What C does to R is very interesting: it makes the singular values
  20544. become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
  20545. values of R are the square roots of the eigenvalues of R'R. Letting
  20546. S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
  20547. means that RC is equivalent to the correction "trick" using the SVD
  20548. found in the computer vision literature (i.e., compute the SVD and
  20549. return U V'). That "trick" is known to return the rotation matrix that
  20550. minimizes the Frobenius norm of the difference between the input and
  20551. the output. Consequently, the technique I have just presented is also
  20552. optimal in that sense!
  20553. By the way, the input matrix R does not need to be 3x2.
  20554. */
  20555. // compute the entries of R'R
  20556. const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
  20557. const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
  20558. const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
  20559. // compute the two real eigenvalues of R'R
  20560. const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
  20561. const sqrt = Math.sqrt(delta); // delta >= 0 always
  20562. const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
  20563. const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
  20564. // compute two unit eigenvectors qi = (xi,yi) of R'R
  20565. const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
  20566. const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
  20567. const y1 = x1 / alpha1;
  20568. const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
  20569. const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
  20570. const y2 = x2 / alpha2;
  20571. // compute the Cholesky decomposition LL' of the diagonal matrix D
  20572. // whose entries are the two eigenvalues of R'R and then invert L
  20573. const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
  20574. const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
  20575. // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
  20576. // is orthogonal and Linv is computed as above
  20577. const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
  20578. const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
  20579. const C = Q.times(Linv).times(Qt);
  20580. // correct the rotation vectors r1 and r2 using C
  20581. const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
  20582. return speedy_vision_default().Matrix(R.times(C)).read();
  20583. }
  20584. /**
  20585. * Compute a refined translation vector
  20586. * @param normalizedHomography ideal pinhole K = I
  20587. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  20588. * @param t0 initial estimate for the translation vector
  20589. * @returns 3x1 translation vector in column-major format
  20590. */
  20591. _refineTranslation(normalizedHomography, rot, t0) {
  20592. /*
  20593. Given a normalized homography H, the rotation vectors r1, r2, and a
  20594. translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
  20595. scale factor s.
  20596. If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
  20597. [ r1 | r2 | t ] u is parallel to H u, which means that their cross
  20598. product is zero:
  20599. [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
  20600. The following code finds an optimal translation vector t based on the
  20601. above observation. H, r1, r2 are known.
  20602. */
  20603. const B = TRANSLATION_REFINEMENT_BUFFERS;
  20604. const n = TRANSLATION_REFINEMENT_SAMPLES;
  20605. const n3 = TRANSLATION_REFINEMENT_SAMPLES_3X;
  20606. Utils.assert(B.x.length === n);
  20607. const h = normalizedHomography.read();
  20608. const h11 = h[0], h12 = h[3], h13 = h[6];
  20609. const h21 = h[1], h22 = h[4], h23 = h[7];
  20610. const h31 = h[2], h32 = h[5], h33 = h[8];
  20611. const r11 = rot[0], r12 = rot[3];
  20612. const r21 = rot[1], r22 = rot[4];
  20613. const r31 = rot[2], r32 = rot[5];
  20614. // get sample points (xi, yi), 0 <= i < n
  20615. const x = B.x, y = B.y;
  20616. // set auxiliary values: ai = H [ xi yi 1 ]'
  20617. const a1 = B.a1, a2 = B.a2, a3 = B.a3;
  20618. for (let i = 0; i < n; i++) {
  20619. a1[i] = x[i] * h11 + y[i] * h12 + h13;
  20620. a2[i] = x[i] * h21 + y[i] * h22 + h23;
  20621. a3[i] = x[i] * h31 + y[i] * h32 + h33;
  20622. }
  20623. // solve M t = v for t; M: 3n x 3, v: 3n x 1, t: 3 x 1 (linear least squares)
  20624. const m = B.m, v = B.v;
  20625. for (let i = 0, k = 0; k < n; i += 3, k++) {
  20626. m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
  20627. m[i + n3] = -(m[i + 1] = a3[k]);
  20628. m[i + 2] = -(m[i + n3 + n3] = a2[k]);
  20629. m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
  20630. v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
  20631. v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
  20632. v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
  20633. }
  20634. /*
  20635. // this works, but I want more lightweight
  20636. const M = Speedy.Matrix(n3, 3, m);
  20637. const v_ = Speedy.Matrix(n3, 1, v);
  20638. return Speedy.Matrix(M.ldiv(v_)).read();
  20639. */
  20640. /*
  20641. Gradient descent with optimal step size / learning rate
  20642. -------------------------------------------------------
  20643. Let's find the column-vector x that minimizes the error function
  20644. E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
  20645. least squares. We want to find x easily, QUICKLY and iteratively.
  20646. The update rule of gradient descent is set to:
  20647. x := x - w * grad(E)
  20648. where w is the learning rate and grad(E) is the gradient of E(x):
  20649. grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
  20650. Let's adjust w to make x "converge quickly". Define function S(w) as:
  20651. S(w) = x - w * grad(E) (step)
  20652. and another function F(w) as:
  20653. F(w) = E(S(w))
  20654. which is the error of the step. We minimize F by setting its derivative
  20655. to zero:
  20656. 0 = dF = dF dS
  20657. dw dS dw
  20658. What follows is a fair amount of algebra. Do the math and you'll find
  20659. the following optimal update rule:
  20660. (c'c)
  20661. x := x - --------- c
  20662. (Ac)'(Ac)
  20663. where c = A'r = A'(Ax - b)
  20664. */
  20665. // initial guess
  20666. const t = B.t;
  20667. t[0] = t0[0];
  20668. t[1] = t0[1];
  20669. t[2] = t0[2];
  20670. // gradient descent: super lightweight implementation
  20671. const r = B.r, c = B.c, Mc = B.Mc;
  20672. for (let it = 0; it < TRANSLATION_REFINEMENT_ITERATIONS; it++) {
  20673. // compute residual r = Mt - v
  20674. for (let i = 0; i < n3; i++) {
  20675. r[i] = 0;
  20676. for (let j = 0; j < 3; j++)
  20677. r[i] += m[j * n3 + i] * t[j];
  20678. r[i] -= v[i];
  20679. }
  20680. // compute c = M'r
  20681. for (let i = 0; i < 3; i++) {
  20682. c[i] = 0;
  20683. for (let j = 0; j < n3; j++)
  20684. c[i] += m[i * n3 + j] * r[j];
  20685. }
  20686. // compute Mc
  20687. for (let i = 0; i < n3; i++) {
  20688. Mc[i] = 0;
  20689. for (let j = 0; j < 3; j++)
  20690. Mc[i] += m[j * n3 + i] * c[j];
  20691. }
  20692. // compute num = c'c and den = (Mc)'(Mc)
  20693. let num = 0, den = 0;
  20694. for (let i = 0; i < 3; i++)
  20695. num += c[i] * c[i];
  20696. for (let i = 0; i < n3; i++)
  20697. den += Mc[i] * Mc[i];
  20698. // compute num / den
  20699. const frc = num / den;
  20700. if (Number.isNaN(frc))
  20701. break;
  20702. // iterate: t = t - (num / den) * c
  20703. for (let i = 0; i < 3; i++)
  20704. t[i] -= frc * c[i];
  20705. }
  20706. //console.log("OLD t:\n\n",t0.join('\n'));
  20707. //console.log("new t:\n\n",t.join('\n'));
  20708. // done!
  20709. return t;
  20710. }
  20711. /**
  20712. * Apply a smoothing filter to the partial pose
  20713. * @param partialPose 3x3 [ r1 | r2 | t ]
  20714. * @returns filtered partial pose
  20715. */
  20716. _filterPartialPose(partialPose) {
  20717. const avg = new Array(9).fill(0);
  20718. const entries = partialPose.read();
  20719. const rotationBlock = entries.slice(0, 6);
  20720. const translationBlock = entries.slice(6, 9);
  20721. // how many samples should we store, at most?
  20722. const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
  20723. const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
  20724. const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
  20725. // is it a valid partial pose?
  20726. if (!Number.isNaN(entries[0])) {
  20727. // store samples
  20728. this._partialRotationBuffer.unshift(rotationBlock);
  20729. if (this._partialRotationBuffer.length > N)
  20730. this._partialRotationBuffer.length = N;
  20731. this._translationBuffer.unshift(translationBlock);
  20732. if (this._translationBuffer.length > M)
  20733. this._translationBuffer.length = M;
  20734. }
  20735. else if (this._partialRotationBuffer.length == 0) {
  20736. // invalid pose, no samples
  20737. return speedy_vision_default().Matrix.Eye(3);
  20738. }
  20739. // average *nearby* rotations
  20740. const n = this._partialRotationBuffer.length;
  20741. for (let i = 0; i < n; i++) {
  20742. const r = this._partialRotationBuffer[i];
  20743. for (let j = 0; j < 6; j++)
  20744. avg[j] += r[j] / n;
  20745. }
  20746. const r = this._refineRotation(avg[0], avg[1], avg[2], avg[3], avg[4], avg[5]);
  20747. // average translations
  20748. const m = this._translationBuffer.length;
  20749. for (let i = 0; i < m; i++) {
  20750. const t = this._translationBuffer[i];
  20751. for (let j = 0; j < 3; j++)
  20752. avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
  20753. //avg[6 + j] += t[j] / m;
  20754. }
  20755. const t = [avg[6], avg[7], avg[8]];
  20756. // done!
  20757. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  20758. }
  20759. /**
  20760. * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
  20761. * @param partialPose
  20762. * @returns 3x4 matrix
  20763. */
  20764. _estimateFullPose(partialPose) {
  20765. const p = partialPose.read();
  20766. const r11 = p[0], r12 = p[3], t1 = p[6];
  20767. const r21 = p[1], r22 = p[4], t2 = p[7];
  20768. const r31 = p[2], r32 = p[5], t3 = p[8];
  20769. // r3 = +- ( r1 x r2 )
  20770. let r13 = r21 * r32 - r31 * r22;
  20771. let r23 = r31 * r12 - r11 * r32;
  20772. let r33 = r11 * r22 - r21 * r12;
  20773. // let's make sure that det R = +1 (keep the orientation)
  20774. const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
  20775. if (det < 0) {
  20776. r13 = -r13;
  20777. r23 = -r23;
  20778. r33 = -r33;
  20779. }
  20780. // done!
  20781. return speedy_vision_default().Matrix(3, 4, [
  20782. r11, r21, r31,
  20783. r12, r22, r32,
  20784. r13, r23, r33,
  20785. t1, t2, t3,
  20786. ]);
  20787. }
  20788. /**
  20789. * Estimate the pose [ R | t ] given a homography in AR screen space
  20790. * @param homography must be valid
  20791. * @param f focal length
  20792. * @returns 3x4 matrix
  20793. */
  20794. _estimatePose(homography, f = this._intrinsics[FY]) {
  20795. const normalizedHomography = this._normalizeHomography(homography, f);
  20796. const partialPose = speedy_vision_default().Matrix.Eye(3);
  20797. // we want the estimated partial pose [ r1 | r2 | t ] to be as close
  20798. // as possible to the normalized homography, up to a scale factor;
  20799. // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
  20800. // it won't be a perfect equality due to noise in the homography
  20801. const residual = speedy_vision_default().Matrix(normalizedHomography);
  20802. for (let k = 0; k < POSE_ITERATIONS; k++) {
  20803. // incrementally improve the partial pose
  20804. const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
  20805. partialPose.setToSync(rt.times(partialPose));
  20806. residual.setToSync(residual.times(rt.inverse()));
  20807. //console.log("residual",residual.toString());
  20808. }
  20809. //console.log('-----------');
  20810. /*
  20811. // test
  20812. const result = Speedy.Matrix.Zeros(3);
  20813. result.setToSync(partialPose.times(normalizedHomography.inverse()));
  20814. const m11 = result.at(0,0);
  20815. result.setToSync(result.times(1/m11));
  20816. console.log("Pose * NORMALIZED HOM^-1", result.toString());
  20817. */
  20818. /*
  20819. const rt = partialPose.read();
  20820. const r = rt.slice(0, 6);
  20821. const t = this._refineTranslation(normalizedHomography, r, rt.slice(6, 9));
  20822. const refinedPartialPose = Speedy.Matrix(3, 3, r.concat(t));
  20823. const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
  20824. */
  20825. // filter the partial pose
  20826. const filteredPartialPose = this._filterPartialPose(partialPose);
  20827. // estimate the full pose
  20828. return this._estimateFullPose(filteredPartialPose);
  20829. }
  20830. /**
  20831. * Store an estimated pose
  20832. * @param pose 3x4 matrix
  20833. */
  20834. _storePose(pose) {
  20835. this._extrinsics = pose.read();
  20836. }
  20837. }
  20838. ;// CONCATENATED MODULE: ./src/geometry/pose.ts
  20839. /*
  20840. * MARTINS.js
  20841. * GPU-accelerated Augmented Reality for the web
  20842. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20843. *
  20844. * This program is free software: you can redistribute it and/or modify
  20845. * it under the terms of the GNU Lesser General Public License as published
  20846. * by the Free Software Foundation, either version 3 of the License, or
  20847. * (at your option) any later version.
  20848. *
  20849. * This program is distributed in the hope that it will be useful,
  20850. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20851. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20852. * GNU Lesser General Public License for more details.
  20853. *
  20854. * You should have received a copy of the GNU Lesser General Public License
  20855. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20856. *
  20857. * pose.ts
  20858. * A pose represents a position and an orientation in a 3D space
  20859. */
  20860. /**
  20861. * A pose represents a position and an orientation in a 3D space
  20862. * (and sometimes a scale, too...)
  20863. */
  20864. class Pose {
  20865. /**
  20866. * Constructor
  20867. * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
  20868. */
  20869. constructor(transform) {
  20870. this._transform = transform;
  20871. }
  20872. /**
  20873. * A transform describing the position and the orientation
  20874. * of the pose relative to the 3D space to which it belongs
  20875. */
  20876. get transform() {
  20877. return this._transform;
  20878. }
  20879. }
  20880. ;// CONCATENATED MODULE: ./src/geometry/transform.ts
  20881. /*
  20882. * MARTINS.js
  20883. * GPU-accelerated Augmented Reality for the web
  20884. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20885. *
  20886. * This program is free software: you can redistribute it and/or modify
  20887. * it under the terms of the GNU Lesser General Public License as published
  20888. * by the Free Software Foundation, either version 3 of the License, or
  20889. * (at your option) any later version.
  20890. *
  20891. * This program is distributed in the hope that it will be useful,
  20892. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20893. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20894. * GNU Lesser General Public License for more details.
  20895. *
  20896. * You should have received a copy of the GNU Lesser General Public License
  20897. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20898. *
  20899. * transform.ts
  20900. * 3D geometrical transforms
  20901. */
  20902. /**
  20903. * A 3D transformation
  20904. */
  20905. class BaseTransform {
  20906. /**
  20907. * Constructor
  20908. * @param matrix a 4x4 matrix
  20909. */
  20910. constructor(matrix) {
  20911. if (matrix.rows != 4 || matrix.columns != 4)
  20912. throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
  20913. this._matrix = matrix;
  20914. }
  20915. /**
  20916. * The 4x4 transformation matrix (read-only)
  20917. */
  20918. get matrix() {
  20919. return this._matrix;
  20920. }
  20921. }
  20922. /**
  20923. * An invertible 3D transformation
  20924. */
  20925. class InvertibleTransform extends BaseTransform {
  20926. /**
  20927. * Constructor
  20928. * @param matrix a 4x4 matrix
  20929. */
  20930. constructor(matrix) {
  20931. // WARNING: we do not check if the matrix actually encodes an invertible transform!
  20932. super(matrix);
  20933. }
  20934. /**
  20935. * The inverse of the transform
  20936. */
  20937. get inverse() {
  20938. const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
  20939. return new InvertibleTransform(inverseMatrix);
  20940. }
  20941. }
  20942. /**
  20943. * A 3D transformation described by translation, rotation and scale
  20944. */
  20945. class StandardTransform extends InvertibleTransform {
  20946. // TODO: position, rotation and scale attributes
  20947. /**
  20948. * Constructor
  20949. * @param matrix a 4x4 matrix
  20950. */
  20951. constructor(matrix) {
  20952. // WARNING: we do not check if the matrix actually encodes a standard transform!
  20953. super(matrix);
  20954. }
  20955. /**
  20956. * The inverse of the transform
  20957. */
  20958. get inverse() {
  20959. /*
  20960. The inverse of a 4x4 standard transform T * R * S...
  20961. [ RS t ] is [ ZR' -ZR't ]
  20962. [ 0' 1 ] [ 0' 1 ]
  20963. where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
  20964. */
  20965. return super.inverse;
  20966. }
  20967. }
  20968. /**
  20969. * A 3D transformation described by position and orientation
  20970. */
  20971. class RigidTransform extends StandardTransform {
  20972. // TODO: position and rotation attributes (need to decompose the matrix)
  20973. /**
  20974. * Constructor
  20975. * @param matrix a 4x4 matrix
  20976. */
  20977. constructor(matrix) {
  20978. // WARNING: we do not check if the matrix actually encodes a rigid transform!
  20979. super(matrix);
  20980. }
  20981. /**
  20982. * The inverse of the transform
  20983. */
  20984. get inverse() {
  20985. /*
  20986. The inverse of a 4x4 rigid transform
  20987. [ R t ] is [ R' -R't ]
  20988. [ 0' 1 ] [ 0' 1 ]
  20989. where R is 3x3, t is 3x1 and 0' is 1x3
  20990. */
  20991. const m = this._matrix.read();
  20992. if (m[15] == 0) // error? abs()??
  20993. throw new IllegalOperationError('Not a rigid transform');
  20994. const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
  20995. const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
  20996. const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
  20997. const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
  20998. const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
  20999. const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
  21000. const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
  21001. const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
  21002. const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
  21003. r11, r12, r13, 0,
  21004. r21, r22, r23, 0,
  21005. r31, r32, r33, 0,
  21006. -rt1, -rt2, -rt3, 1
  21007. ]);
  21008. return new RigidTransform(inverseMatrix);
  21009. }
  21010. }
  21011. ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
  21012. /*
  21013. * MARTINS.js
  21014. * GPU-accelerated Augmented Reality for the web
  21015. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21016. *
  21017. * This program is free software: you can redistribute it and/or modify
  21018. * it under the terms of the GNU Lesser General Public License as published
  21019. * by the Free Software Foundation, either version 3 of the License, or
  21020. * (at your option) any later version.
  21021. *
  21022. * This program is distributed in the hope that it will be useful,
  21023. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21024. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21025. * GNU Lesser General Public License for more details.
  21026. *
  21027. * You should have received a copy of the GNU Lesser General Public License
  21028. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21029. *
  21030. * viewer-pose.ts
  21031. * The pose of a virtual camera in 3D world space at a moment in time
  21032. */
  21033. /**
  21034. * The pose of a virtual camera in 3D world space at a moment in time
  21035. */
  21036. class ViewerPose extends Pose {
  21037. /**
  21038. * Constructor
  21039. * @param camera camera model
  21040. */
  21041. constructor(camera) {
  21042. // compute the view matrix and its inverse in AR screen space
  21043. const viewMatrix = ViewerPose._computeViewMatrix(camera);
  21044. const inverseTransform = new RigidTransform(viewMatrix);
  21045. super(inverseTransform.inverse);
  21046. this._viewMatrix = viewMatrix;
  21047. }
  21048. /**
  21049. * This 4x4 matrix moves 3D points from world space to viewer space. We
  21050. * assume that the camera is looking in the direction of the negative
  21051. * z-axis (WebGL-friendly)
  21052. */
  21053. get viewMatrix() {
  21054. return this._viewMatrix;
  21055. }
  21056. /**
  21057. * Compute the view matrix in AR screen space, measured in pixels
  21058. * @param camera
  21059. * @returns a 4x4 matrix describing a rotation and a translation
  21060. */
  21061. static _computeViewMatrix(camera) {
  21062. /*
  21063. // this is the view matrix in AR screen space, measured in pixels
  21064. // we augment the extrinsics matrix, making it 4x4 by adding a
  21065. // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
  21066. const V = Speedy.Matrix(4, 4, [
  21067. E[0], E[1], E[2], 0,
  21068. E[3], E[4], E[5], 0,
  21069. E[6], E[7], E[8], 0,
  21070. E[9], E[10], E[11], 1
  21071. ]);
  21072. // we premultiply V by F, which performs a rotation around the
  21073. // x-axis by 180 degrees, so that we get the 3D objects in front
  21074. // of the camera pointing in the direction of the negative z-axis
  21075. const F = Speedy.Matrix(4, 4, [
  21076. 1, 0, 0, 0,
  21077. 0,-1, 0, 0,
  21078. 0, 0,-1, 0,
  21079. 0, 0, 0, 1
  21080. ]);
  21081. Matrix F * V is matrix V with the second and third rows negated
  21082. */
  21083. const E = camera.extrinsics;
  21084. return speedy_vision_default().Matrix(4, 4, [
  21085. E[0], -E[1], -E[2], 0,
  21086. E[3], -E[4], -E[5], 0,
  21087. E[6], -E[7], -E[8], 0,
  21088. E[9], -E[10], -E[11], 1
  21089. ]);
  21090. }
  21091. }
  21092. ;// CONCATENATED MODULE: ./src/geometry/view.ts
  21093. /*
  21094. * MARTINS.js
  21095. * GPU-accelerated Augmented Reality for the web
  21096. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21097. *
  21098. * This program is free software: you can redistribute it and/or modify
  21099. * it under the terms of the GNU Lesser General Public License as published
  21100. * by the Free Software Foundation, either version 3 of the License, or
  21101. * (at your option) any later version.
  21102. *
  21103. * This program is distributed in the hope that it will be useful,
  21104. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21105. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21106. * GNU Lesser General Public License for more details.
  21107. *
  21108. * You should have received a copy of the GNU Lesser General Public License
  21109. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21110. *
  21111. * view.ts
  21112. * A view of the 3D world at a moment in time,
  21113. * featuring the means to project points into clip space
  21114. */
  21115. /** Default distance in pixels of the near plane to the optical center of the camera */
  21116. const DEFAULT_NEAR = 1;
  21117. /** Default distance in pixels of the far plane to the optical center of the camera */
  21118. const DEFAULT_FAR = 20000;
  21119. /**
  21120. * A PerspectiveView is a View defining a symmetric frustum around the z-axis
  21121. * (perspective projection)
  21122. */
  21123. class PerspectiveView {
  21124. /**
  21125. * Constructor
  21126. * @param camera camera model
  21127. * @param near distance of the near plane
  21128. * @param far distance of the far plane
  21129. */
  21130. constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
  21131. const intrinsics = camera.intrinsics;
  21132. const screenSize = camera.screenSize;
  21133. this._near = Math.max(0, +near);
  21134. this._far = Math.max(0, +far);
  21135. if (this._near >= this._far)
  21136. throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
  21137. this._aspect = screenSize.width / screenSize.height;
  21138. this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
  21139. this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
  21140. }
  21141. /**
  21142. * A 4x4 projection matrix for WebGL
  21143. */
  21144. get projectionMatrix() {
  21145. return this._projectionMatrix;
  21146. }
  21147. /**
  21148. * Aspect ratio of the frustum
  21149. */
  21150. get aspect() {
  21151. return this._aspect;
  21152. }
  21153. /**
  21154. * Vertical field-of-view of the frustum, measured in radians
  21155. */
  21156. get fovy() {
  21157. return 2 * Math.atan(this._tanOfHalfFovy);
  21158. }
  21159. /**
  21160. * Distance of the near plane
  21161. */
  21162. get near() {
  21163. return this._near;
  21164. }
  21165. /**
  21166. * Distance of the far plane
  21167. */
  21168. get far() {
  21169. return this._far;
  21170. }
  21171. /**
  21172. * Compute a perspective projection matrix for WebGL
  21173. * @param K camera intrinsics
  21174. * @param near distance of the near plane
  21175. * @param far distance of the far plane
  21176. */
  21177. static _computeProjectionMatrix(K, near, far) {
  21178. // we assume that the principal point is at the center of the image
  21179. const top = near * (K[V0] / K[FY]);
  21180. const right = near * (K[U0] / K[FX]);
  21181. const bottom = -top, left = -right; // symmetric frustum
  21182. // a derivation of this projection matrix can be found at
  21183. // https://www.songho.ca/opengl/gl_projectionmatrix.html
  21184. // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
  21185. return speedy_vision_default().Matrix(4, 4, [
  21186. 2 * near / (right - left), 0, 0, 0,
  21187. 0, 2 * near / (top - bottom), 0, 0,
  21188. (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
  21189. 0, 0, -2 * far * near / (far - near), 0
  21190. ]);
  21191. }
  21192. }
  21193. ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
  21194. /*
  21195. * MARTINS.js
  21196. * GPU-accelerated Augmented Reality for the web
  21197. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21198. *
  21199. * This program is free software: you can redistribute it and/or modify
  21200. * it under the terms of the GNU Lesser General Public License as published
  21201. * by the Free Software Foundation, either version 3 of the License, or
  21202. * (at your option) any later version.
  21203. *
  21204. * This program is distributed in the hope that it will be useful,
  21205. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21206. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21207. * GNU Lesser General Public License for more details.
  21208. *
  21209. * You should have received a copy of the GNU Lesser General Public License
  21210. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21211. *
  21212. * view.ts
  21213. * A viewer represents a virtual camera in 3D world space
  21214. */
  21215. /**
  21216. * A viewer represents a virtual camera in 3D world space
  21217. */
  21218. class Viewer {
  21219. /**
  21220. * Constructor
  21221. * @param camera camera model
  21222. */
  21223. constructor(camera) {
  21224. this._pose = new ViewerPose(camera);
  21225. this._views = [new PerspectiveView(camera)];
  21226. }
  21227. /**
  21228. * The pose of this viewer
  21229. */
  21230. get pose() {
  21231. return this._pose;
  21232. }
  21233. /**
  21234. * The view of this viewer (only for monoscopic rendering)
  21235. */
  21236. get view() {
  21237. /*
  21238. if(this._views.length > 1)
  21239. throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
  21240. */
  21241. return this._views[0];
  21242. }
  21243. /**
  21244. * The views of this viewer
  21245. */
  21246. /*
  21247. get views(): View[]
  21248. {
  21249. return this._views.concat([]);
  21250. }
  21251. */
  21252. /**
  21253. * Convert a pose from world space to viewer space
  21254. * @param pose a pose in world space
  21255. * @returns a pose in viewer space
  21256. */
  21257. convertToViewerSpace(pose) {
  21258. const modelMatrix = pose.transform.matrix;
  21259. const viewMatrix = this._pose.viewMatrix;
  21260. const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
  21261. const transform = new StandardTransform(modelViewMatrix);
  21262. return new Pose(transform);
  21263. }
  21264. }
  21265. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
  21266. /*
  21267. * MARTINS.js
  21268. * GPU-accelerated Augmented Reality for the web
  21269. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21270. *
  21271. * This program is free software: you can redistribute it and/or modify
  21272. * it under the terms of the GNU Lesser General Public License as published
  21273. * by the Free Software Foundation, either version 3 of the License, or
  21274. * (at your option) any later version.
  21275. *
  21276. * This program is distributed in the hope that it will be useful,
  21277. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21278. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21279. * GNU Lesser General Public License for more details.
  21280. *
  21281. * You should have received a copy of the GNU Lesser General Public License
  21282. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21283. *
  21284. * tracking.ts
  21285. * Tracking state of the Image Tracker
  21286. */
  21287. /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
  21288. const USE_TURBO = true;
  21289. /** Number of PBOs; meaningful only when using turbo */
  21290. const NUMBER_OF_PBOS = 2;
  21291. /** Frame skipping; meaningful only when using turbo */
  21292. const TURBO_SKIP = 2;
  21293. /**
  21294. * The tracking state of the Image Tracker tracks
  21295. * keypoints of the image target and updates the
  21296. * rectification matrix
  21297. */
  21298. class ImageTrackerTrackingState extends ImageTrackerState {
  21299. /**
  21300. * Constructor
  21301. * @param imageTracker
  21302. */
  21303. constructor(imageTracker) {
  21304. super('tracking', imageTracker);
  21305. this._referenceImage = null;
  21306. this._warpHomography = speedy_vision_default().Matrix.Eye(3);
  21307. this._poseHomography = speedy_vision_default().Matrix.Eye(3);
  21308. this._initialHomography = speedy_vision_default().Matrix.Eye(3);
  21309. this._initialKeypoints = [];
  21310. this._counter = 0;
  21311. this._camera = new CameraModel();
  21312. this._predictedKeypoints = [];
  21313. this._lastPipelineOutput = { keypoints: [] };
  21314. this._pipelineCounter = 0;
  21315. this._lastOutput = {};
  21316. this._lostCounter = 0;
  21317. // we need at least 4 correspondences of points to compute a homography matrix
  21318. Utils.assert(TRACK_MIN_MATCHES >= 4);
  21319. }
  21320. /**
  21321. * Called as soon as this becomes the active state, just before update() runs for the first time
  21322. * @param settings
  21323. */
  21324. onEnterState(settings) {
  21325. const homography = settings.homography;
  21326. const referenceImage = settings.referenceImage;
  21327. const templateKeypoints = settings.templateKeypoints;
  21328. const keypointPortalSink = settings.keypointPortalSink;
  21329. const screenSize = settings.screenSize; // this.screenSize is not yet set
  21330. const keypointPortalSource = this._pipeline.node('keypointPortalSource');
  21331. // this shouldn't happen
  21332. if (!referenceImage)
  21333. throw new IllegalOperationError(`Can't track a null reference image`);
  21334. // set attributes
  21335. this._referenceImage = referenceImage;
  21336. this._warpHomography = speedy_vision_default().Matrix(homography);
  21337. this._poseHomography = speedy_vision_default().Matrix(homography);
  21338. this._initialHomography = speedy_vision_default().Matrix(homography);
  21339. this._initialKeypoints = templateKeypoints;
  21340. this._counter = 0;
  21341. this._predictedKeypoints = [];
  21342. this._lastPipelineOutput = { keypoints: [] };
  21343. this._pipelineCounter = 0;
  21344. this._lastOutput = {};
  21345. this._lostCounter = 0;
  21346. // setup portals
  21347. keypointPortalSource.source = keypointPortalSink;
  21348. // setup camera
  21349. this._camera.init(screenSize);
  21350. // emit event
  21351. const ev = new ImageTrackerEvent('targetfound', referenceImage);
  21352. this._imageTracker.dispatchEvent(ev);
  21353. // log
  21354. Utils.log(`Tracking image "${referenceImage.name}"...`);
  21355. }
  21356. /**
  21357. * Called when leaving the state
  21358. */
  21359. onLeaveState() {
  21360. const referenceImage = this._referenceImage;
  21361. // release the camera
  21362. this._camera.release();
  21363. // emit event
  21364. const ev = new ImageTrackerEvent('targetlost', referenceImage);
  21365. this._imageTracker.dispatchEvent(ev);
  21366. }
  21367. /**
  21368. * Called just before the GPU processing
  21369. * @returns promise
  21370. */
  21371. _beforeUpdate() {
  21372. const imageRectifier = this._pipeline.node('imageRectifier');
  21373. const borderClipper = this._pipeline.node('borderClipper');
  21374. const keypointRectifier = this._pipeline.node('keypointRectifier');
  21375. const screenSize = this.screenSize;
  21376. /*
  21377. // pause media (test)
  21378. const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
  21379. const media = source.media as SpeedyMedia;
  21380. (media.source as HTMLVideoElement).pause();
  21381. */
  21382. // clip keypoints from the borders of the target image
  21383. borderClipper.imageSize = screenSize;
  21384. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  21385. // rectify the image
  21386. return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
  21387. imageRectifier.transform = warp;
  21388. });
  21389. }
  21390. /**
  21391. * GPU processing
  21392. * @returns promise with the pipeline results
  21393. */
  21394. _gpuUpdate() {
  21395. //return super._gpuUpdate();
  21396. // No turbo?
  21397. if (!USE_TURBO || Settings.powerPreference == 'low-power')
  21398. return super._gpuUpdate();
  21399. // When using turbo, we reduce the GPU usage by skipping every other frame
  21400. const counter = this._pipelineCounter;
  21401. this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
  21402. // Skip frame
  21403. if (counter != 0) {
  21404. if (this._lastPipelineOutput.keypoints !== undefined) {
  21405. this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
  21406. }
  21407. else
  21408. this._predictedKeypoints.length = 0;
  21409. this._lastPipelineOutput.keypoints = this._predictedKeypoints;
  21410. return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
  21411. }
  21412. // Run the pipeline and store the results
  21413. return super._gpuUpdate().then(results => {
  21414. this._lastPipelineOutput = results;
  21415. return results;
  21416. });
  21417. }
  21418. /**
  21419. * Post processing that takes place just after the GPU processing
  21420. * @param result pipeline results
  21421. * @returns state output
  21422. */
  21423. _afterUpdate(result) {
  21424. const imageRectifier = this._pipeline.node('imageRectifier');
  21425. const keypoints = result.keypoints;
  21426. const image = result.image;
  21427. const referenceImage = this._referenceImage;
  21428. // find the best keypoint matches
  21429. return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
  21430. // find motion models
  21431. return speedy_vision_default().Promise.all([
  21432. this._findAffineMotion(matches),
  21433. this._findPerspectiveMotion(matches)
  21434. ]);
  21435. }).then(([affineMotion, perspectiveMotion]) => {
  21436. const lowPower = (Settings.powerPreference == 'low-power');
  21437. const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
  21438. // update warp homography
  21439. const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
  21440. const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
  21441. if (!USE_TURBO || this._counter % delay == remainder)
  21442. this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
  21443. // update pose homography
  21444. if (!frozen)
  21445. this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
  21446. // update counter
  21447. this._counter = (this._counter + 1) % delay;
  21448. // update the camera
  21449. if (!frozen)
  21450. return this._camera.update(this._poseHomography, this.screenSize);
  21451. else
  21452. return this._camera.matrix;
  21453. }).then(_ => {
  21454. // find the inverse of the rectification matrix
  21455. const rectificationMatrix = imageRectifier.transform;
  21456. const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
  21457. // move keypoints from rectified space back to image space
  21458. const n = keypoints.length;
  21459. const coords = new Array(2 * n);
  21460. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21461. coords[j] = keypoints[i].position.x;
  21462. coords[j + 1] = keypoints[i].position.y;
  21463. }
  21464. return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
  21465. /*
  21466. // test image center
  21467. const coords2: number[] = new Array(2 * n);
  21468. for(let i = 0, j = 0; i < n; i++, j += 2) {
  21469. coords2[j] = this._imageTracker.screenSize.width / 2;
  21470. coords2[j+1] = this._imageTracker.screenSize.height / 2;
  21471. if(i % 2 == 0) {
  21472. coords2[j] = this._imageTracker.screenSize.width / 4;
  21473. coords2[j+1] = this._imageTracker.screenSize.height / 4;
  21474. }
  21475. }
  21476. return Speedy.Matrix.applyPerspectiveTransform(
  21477. Speedy.Matrix.Zeros(2, n),
  21478. Speedy.Matrix(2, n, coords2),
  21479. this._poseHomography
  21480. //this._warpHomography
  21481. );
  21482. */
  21483. }).then(mat => {
  21484. /*
  21485. const n = keypoints.length;
  21486. const coords = mat.read();
  21487. // ** this will interfere with the calculations when frame skipping is on **
  21488. // get keypoints in image space
  21489. for(let i = 0, j = 0; i < n; i++, j += 2) {
  21490. keypoints[i].position.x = coords[j];
  21491. keypoints[i].position.y = coords[j+1];
  21492. }
  21493. */
  21494. // find a polyline surrounding the target
  21495. return this._findPolyline(this._poseHomography, this.screenSize);
  21496. //return this._findPolyline(this._warpHomography, this.screenSize);
  21497. }).then(polyline => {
  21498. // we let the target object be at the origin of the world space
  21499. // (identity transform). We also perform a change of coordinates,
  21500. // so that we move out from pixel space and into normalized space
  21501. const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
  21502. const transform = new StandardTransform(modelMatrix);
  21503. const pose = new Pose(transform);
  21504. // given the current state of the camera model, we get a viewer
  21505. // compatible with the pose of the target
  21506. const viewer = new Viewer(this._camera);
  21507. // the trackable object
  21508. const trackable = {
  21509. pose: pose,
  21510. referenceImage: referenceImage
  21511. };
  21512. // the result generated by the image tracker
  21513. const result = {
  21514. tracker: this._imageTracker,
  21515. trackables: [trackable],
  21516. viewer: viewer
  21517. };
  21518. // build and save the output
  21519. this._lastOutput = {
  21520. exports: result,
  21521. cameraMatrix: this._camera.matrix,
  21522. homography: this._warpHomography,
  21523. //keypoints: keypoints,
  21524. screenSize: this.screenSize,
  21525. image: image,
  21526. polyline: polyline,
  21527. };
  21528. // we have successfully tracked the target in this frame
  21529. this._lostCounter = 0;
  21530. // done!
  21531. return {
  21532. nextState: 'tracking',
  21533. trackerOutput: this._lastOutput
  21534. };
  21535. }).catch(err => {
  21536. // give some tolerance to tracking errors
  21537. if (err instanceof TrackingError) {
  21538. if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
  21539. //console.log("ABSORB",this._lostCounter,err.toString())
  21540. // absorb the error
  21541. return {
  21542. nextState: 'tracking',
  21543. trackerOutput: this._lastOutput
  21544. };
  21545. }
  21546. }
  21547. // lost tracking
  21548. Utils.warning(`The target has been lost! ${err.toString()}`);
  21549. this._camera.reset();
  21550. // go back to the scanning state
  21551. return {
  21552. nextState: 'scanning',
  21553. trackerOutput: {
  21554. image: image,
  21555. screenSize: this.screenSize,
  21556. },
  21557. };
  21558. });
  21559. }
  21560. /**
  21561. * Find quality matches between two sets of keypoints
  21562. * @param currKeypoints keypoints of the current frame
  21563. * @param prevKeypoints keypoints of the previous frame
  21564. * @returns quality matches
  21565. */
  21566. _findQualityMatches(currKeypoints, prevKeypoints) {
  21567. const result = [[], []];
  21568. const n = currKeypoints.length;
  21569. for (let i = 0; i < n; i++) {
  21570. const currKeypoint = currKeypoints[i];
  21571. if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
  21572. const d1 = currKeypoint.matches[0].distance;
  21573. const d2 = currKeypoint.matches[1].distance;
  21574. if (d1 <= TRACK_MATCH_RATIO * d2) {
  21575. const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
  21576. result[0].push(currKeypoint);
  21577. result[1].push(prevKeypoint);
  21578. }
  21579. }
  21580. }
  21581. return result;
  21582. }
  21583. /**
  21584. * Find a better spatial distribution of the input matches
  21585. * @param matches quality matches
  21586. * @returns refined quality matches
  21587. */
  21588. _refineQualityMatches(matches) {
  21589. const currKeypoints = matches[0];
  21590. const prevKeypoints = matches[1];
  21591. // find a better spatial distribution of the keypoints
  21592. const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
  21593. const n = indices.length; // number of refined matches
  21594. // assemble output
  21595. const result = [new Array(n), new Array(n)];
  21596. for (let i = 0; i < n; i++) {
  21597. result[0][i] = currKeypoints[indices[i]];
  21598. result[1][i] = prevKeypoints[indices[i]];
  21599. }
  21600. // done!
  21601. return result;
  21602. }
  21603. /**
  21604. * Spatially distribute keypoints over a grid
  21605. * @param keypoints keypoints to be distributed
  21606. * @param gridCells number of grid elements in each axis
  21607. * @returns a list of indices of keypoints[]
  21608. */
  21609. _distributeKeypoints(keypoints, gridCells) {
  21610. // get the coordinates of the keypoints
  21611. const n = keypoints.length;
  21612. const points = new Array(2 * n);
  21613. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21614. points[j] = keypoints[i].x;
  21615. points[j + 1] = keypoints[i].y;
  21616. }
  21617. // normalize the coordinates to [0,1] x [0,1]
  21618. this._normalizePoints(points);
  21619. // distribute the keypoints over a grid
  21620. const numberOfCells = gridCells * gridCells;
  21621. const grid = (new Array(numberOfCells)).fill(-1);
  21622. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21623. // find the grid location of the i-th point
  21624. const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
  21625. const yg = Math.floor(points[j + 1] * gridCells);
  21626. // store the index of the i-th point in the grid
  21627. grid[yg * gridCells + xg] = i;
  21628. }
  21629. // retrieve points of the grid
  21630. const indices = [];
  21631. for (let g = 0; g < numberOfCells; g++) {
  21632. if (grid[g] >= 0) {
  21633. const i = grid[g];
  21634. indices.push(i);
  21635. }
  21636. }
  21637. // done!
  21638. return indices;
  21639. }
  21640. /**
  21641. * Normalize points to [0,1)^2
  21642. * @param points 2 x n matrix of points in column-major format
  21643. * @returns points
  21644. */
  21645. _normalizePoints(points) {
  21646. Utils.assert(points.length % 2 == 0);
  21647. const n = points.length / 2;
  21648. if (n == 0)
  21649. return points;
  21650. let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
  21651. let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
  21652. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21653. const x = points[j], y = points[j + 1];
  21654. xmin = x < xmin ? x : xmin;
  21655. ymin = y < ymin ? y : ymin;
  21656. xmax = x > xmax ? x : xmax;
  21657. ymax = y > ymax ? y : ymax;
  21658. }
  21659. const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
  21660. const ylen = ymax - ymin + 1;
  21661. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21662. points[j] = (points[j] - xmin) / xlen;
  21663. points[j + 1] = (points[j + 1] - ymin) / ylen;
  21664. }
  21665. return points;
  21666. }
  21667. /**
  21668. * Find a matrix with the coordinates of quality matches
  21669. * @param matches n quality matches
  21670. * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
  21671. */
  21672. _findMatrixOfMatches(matches) {
  21673. const n = matches[0].length;
  21674. Utils.assert(n > 0);
  21675. // sets of keypoints
  21676. const currKeypoints = matches[0];
  21677. const prevKeypoints = matches[1];
  21678. // get the coordinates of the keypoints of the set of refined matches
  21679. const src = new Array(2 * n);
  21680. const dst = new Array(2 * n);
  21681. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21682. src[j] = prevKeypoints[i].x;
  21683. src[j + 1] = prevKeypoints[i].y;
  21684. dst[j] = currKeypoints[i].x;
  21685. dst[j + 1] = currKeypoints[i].y;
  21686. }
  21687. // assemble the matrix
  21688. return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
  21689. }
  21690. /**
  21691. * Preprocess keypoint matches
  21692. * @param currKeypoints keypoints of the current frame
  21693. * @param prevKeypoints keypoints of the previous frame
  21694. * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
  21695. * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
  21696. */
  21697. _preprocessMatches(currKeypoints, prevKeypoints) {
  21698. // find and refine quality matches
  21699. const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
  21700. const refinedMatches = this._refineQualityMatches(qualityMatches);
  21701. // not enough matches?
  21702. const n = refinedMatches[0].length;
  21703. if (n < TRACK_MIN_MATCHES)
  21704. return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
  21705. // find matrix of matches
  21706. const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
  21707. // warp matrix of matches
  21708. const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
  21709. return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
  21710. }
  21711. /**
  21712. * Find an affine motion model of the target image
  21713. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  21714. * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
  21715. */
  21716. _findAffineMotion(preprocessedMatches) {
  21717. const model = speedy_vision_default().Matrix.Eye(3);
  21718. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  21719. // find motion model
  21720. return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  21721. method: 'pransac',
  21722. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21723. numberOfHypotheses: 512,
  21724. bundleSize: 128,
  21725. }).then(_ => {
  21726. // validate the model
  21727. const a00 = model.at(0, 0);
  21728. if (Number.isNaN(a00))
  21729. throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
  21730. // done!
  21731. return model;
  21732. });
  21733. }
  21734. /**
  21735. * Find a perspective motion model of the target image
  21736. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  21737. * @returns a promise that resolves to a 3x3 perspective motion model
  21738. */
  21739. _findPerspectiveMotion(preprocessedMatches) {
  21740. /*
  21741. We can probably get more accurate motion estimates if we
  21742. work in 3D rather than in 2D. We're currently estimating
  21743. an affine transform in image space. What if we projected
  21744. the keypoints into world space, estimated the camera motion
  21745. (rotation and translation) that best describes the observed
  21746. observed motion of the keypoints, and then projected things
  21747. back to image space? Need to figure this out; we'll get a
  21748. homography matrix.
  21749. Note: keypoints are in rectified image space.
  21750. Note: work with a 6 DoF perspective transform instead of 8.
  21751. */
  21752. const model = speedy_vision_default().Matrix.Zeros(3);
  21753. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  21754. // find motion model
  21755. return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  21756. method: 'pransac',
  21757. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21758. numberOfHypotheses: 512 * 2,
  21759. bundleSize: 128 * 4, //*4
  21760. }).then(_ => {
  21761. // validate the model
  21762. const a00 = model.at(0, 0);
  21763. if (Number.isNaN(a00))
  21764. throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
  21765. // done!
  21766. return model;
  21767. });
  21768. }
  21769. /**
  21770. * Find a rectification matrix to be applied to the target image
  21771. * @param homography maps a reference image to the AR screen
  21772. * @param media target
  21773. * @param screenSize AR screen
  21774. * @returns promise that resolves to a rectification matrix
  21775. */
  21776. _findImageWarp(homography, screenSize) {
  21777. const referenceImage = this._referenceImage;
  21778. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21779. const mat = speedy_vision_default().Matrix.Zeros(3);
  21780. return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
  21781. }
  21782. /**
  21783. * Find a warp to be applied to the keypoints
  21784. * @returns affine transform
  21785. */
  21786. _findKeypointWarp() {
  21787. const referenceImage = this._referenceImage;
  21788. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21789. const screenSize = this.screenSize;
  21790. const sw = screenSize.width, sh = screenSize.height;
  21791. const mat = speedy_vision_default().Matrix.Eye(3, 3);
  21792. // no rotation is needed
  21793. if (!this._mustRotateWarpedImage(media, screenSize))
  21794. return speedy_vision_default().Promise.resolve(mat);
  21795. // rotate by 90 degrees clockwise and scale
  21796. return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
  21797. }
  21798. /**
  21799. * Predict the keypoints without actually looking at the image
  21800. * @param curr keypoints at time t (will modify the contents)
  21801. * @param initial keypoints at time t-1 (not just t = 0)
  21802. * @returns keypoints at time t+1
  21803. */
  21804. _predictKeypoints(curr, initial) {
  21805. // the target image is likely to be moving roughly in
  21806. // the same manner as it was in the previous frame
  21807. const next = [];
  21808. const n = curr.length;
  21809. for (let i = 0; i < n; i++) {
  21810. const cur = curr[i];
  21811. if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
  21812. continue;
  21813. /*
  21814. else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
  21815. continue;
  21816. */
  21817. const ini = initial[cur.matches[0].index];
  21818. const dx = cur.position.x - ini.position.x;
  21819. const dy = cur.position.y - ini.position.y;
  21820. // a better mathematical model is needed
  21821. const alpha = 0.8; //0.2;
  21822. cur.position.x = ini.position.x + alpha * dx;
  21823. cur.position.y = ini.position.y + alpha * dy;
  21824. next.push(cur);
  21825. }
  21826. // done!
  21827. return next;
  21828. }
  21829. /**
  21830. * Create & setup the pipeline
  21831. * @returns pipeline
  21832. */
  21833. _createPipeline() {
  21834. const pipeline = speedy_vision_default().Pipeline();
  21835. const source = speedy_vision_default().Image.Source('source');
  21836. const screen = speedy_vision_default().Transform.Resize('screen');
  21837. const greyscale = speedy_vision_default().Filter.Greyscale();
  21838. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  21839. const nightvision = speedy_vision_default().Filter.Nightvision();
  21840. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  21841. const blur = speedy_vision_default().Filter.GaussianBlur();
  21842. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  21843. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  21844. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  21845. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  21846. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  21847. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  21848. const clipper = speedy_vision_default().Keypoint.Clipper();
  21849. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  21850. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  21851. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  21852. const imageSink = speedy_vision_default().Image.Sink('image');
  21853. source.media = null;
  21854. screen.size = speedy_vision_default().Size(0, 0);
  21855. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21856. nightvision.gain = NIGHTVISION_GAIN;
  21857. nightvision.offset = NIGHTVISION_OFFSET;
  21858. nightvision.decay = NIGHTVISION_DECAY;
  21859. nightvision.quality = NIGHTVISION_QUALITY;
  21860. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  21861. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  21862. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  21863. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  21864. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  21865. detector.quality = TRACK_HARRIS_QUALITY;
  21866. detector.capacity = TRACK_DETECTOR_CAPACITY;
  21867. subpixel.method = SUBPIXEL_METHOD;
  21868. clipper.size = TRACK_MAX_KEYPOINTS;
  21869. borderClipper.imageSize = screen.size;
  21870. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  21871. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21872. matcher.k = 2;
  21873. keypointPortalSource.source = null;
  21874. keypointSink.turbo = USE_TURBO;
  21875. // prepare input
  21876. source.output().connectTo(screen.input());
  21877. screen.output().connectTo(greyscale.input());
  21878. // preprocess images
  21879. greyscale.output().connectTo(imageRectifier.input());
  21880. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  21881. imageRectifier.output().connectTo(nightvision.input());
  21882. nightvision.output().connectTo(nightvisionMux.input('in1'));
  21883. // keypoint detection & clipping
  21884. nightvisionMux.output().connectTo(detector.input());
  21885. detector.output().connectTo(borderClipper.input());
  21886. borderClipper.output().connectTo(clipper.input());
  21887. // keypoint refinement
  21888. imageRectifier.output().connectTo(denoiser.input());
  21889. denoiser.output().connectTo(subpixel.input('image'));
  21890. clipper.output().connectTo(subpixel.input('keypoints'));
  21891. // keypoint description
  21892. imageRectifier.output().connectTo(blur.input());
  21893. blur.output().connectTo(descriptor.input('image'));
  21894. subpixel.output().connectTo(descriptor.input('keypoints'));
  21895. // keypoint matching
  21896. keypointPortalSource.output().connectTo(matcher.input('database'));
  21897. descriptor.output().connectTo(matcher.input('keypoints'));
  21898. // prepare output
  21899. descriptor.output().connectTo(keypointRectifier.input());
  21900. //preMatcher.output().connectTo(keypointRectifier.input());
  21901. keypointRectifier.output().connectTo(keypointSink.input());
  21902. matcher.output().connectTo(keypointSink.input('matches'));
  21903. //imageRectifier.output().connectTo(imageSink.input());
  21904. // done!
  21905. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
  21906. return pipeline;
  21907. }
  21908. }
  21909. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
  21910. /*
  21911. * MARTINS.js
  21912. * GPU-accelerated Augmented Reality for the web
  21913. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21914. *
  21915. * This program is free software: you can redistribute it and/or modify
  21916. * it under the terms of the GNU Lesser General Public License as published
  21917. * by the Free Software Foundation, either version 3 of the License, or
  21918. * (at your option) any later version.
  21919. *
  21920. * This program is distributed in the hope that it will be useful,
  21921. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21922. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21923. * GNU Lesser General Public License for more details.
  21924. *
  21925. * You should have received a copy of the GNU Lesser General Public License
  21926. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21927. *
  21928. * image-tracker.ts
  21929. * Image Tracker
  21930. */
  21931. /** A helper */
  21932. const formatSize = (size) => `${size.width}x${size.height}`;
  21933. /**
  21934. * The ImageTracker tracks an image (one at a time)
  21935. */
  21936. class ImageTracker extends AREventTarget {
  21937. /**
  21938. * Constructor
  21939. */
  21940. constructor() {
  21941. super();
  21942. // the states
  21943. this._state = {
  21944. 'initial': new ImageTrackerInitialState(this),
  21945. 'training': new ImageTrackerTrainingState(this),
  21946. 'scanning': new ImageTrackerScanningState(this),
  21947. 'pre-tracking': new ImageTrackerPreTrackingState(this),
  21948. 'tracking': new ImageTrackerTrackingState(this),
  21949. };
  21950. // initial setup
  21951. this._session = null;
  21952. this._activeStateName = 'initial';
  21953. this._lastOutput = {};
  21954. this._database = new ReferenceImageDatabase();
  21955. // user settings
  21956. this._resolution = DEFAULT_TRACKING_RESOLUTION;
  21957. }
  21958. /**
  21959. * The type of the tracker
  21960. */
  21961. get type() {
  21962. return 'image-tracker';
  21963. }
  21964. /**
  21965. * Current state name
  21966. */
  21967. get state() {
  21968. return this._activeStateName;
  21969. }
  21970. /**
  21971. * Reference Image Database
  21972. * Must be configured before training the tracker
  21973. */
  21974. get database() {
  21975. return this._database;
  21976. }
  21977. /**
  21978. * Resolution of the AR screen space
  21979. */
  21980. get resolution() {
  21981. return this._resolution;
  21982. }
  21983. /**
  21984. * Resolution of the AR screen space
  21985. */
  21986. set resolution(resolution) {
  21987. this._resolution = resolution;
  21988. }
  21989. /**
  21990. * Size of the AR screen space, in pixels
  21991. * @internal
  21992. */
  21993. get screenSize() {
  21994. return this._state[this._activeStateName].screenSize;
  21995. }
  21996. /**
  21997. * Last emitted output
  21998. * @internal
  21999. */
  22000. get _output() {
  22001. return this._lastOutput;
  22002. }
  22003. /**
  22004. * Stats related to this tracker
  22005. * @internal
  22006. */
  22007. get _stats() {
  22008. return `${formatSize(this.screenSize)} ${this.state}`;
  22009. }
  22010. /**
  22011. * Initialize this tracker
  22012. * @param session
  22013. * @returns promise that resolves after the tracker has been initialized
  22014. * @internal
  22015. */
  22016. _init(session) {
  22017. // store the session
  22018. this._session = session;
  22019. // initialize states
  22020. for (const state of Object.values(this._state))
  22021. state.init();
  22022. // done!
  22023. return speedy_vision_default().Promise.resolve();
  22024. }
  22025. /**
  22026. * Release this tracker
  22027. * @returns promise that resolves after the tracker has been released
  22028. * @internal
  22029. */
  22030. _release() {
  22031. // release states
  22032. for (const state of Object.values(this._state))
  22033. state.release();
  22034. // unlink session
  22035. this._session = null;
  22036. // done!
  22037. return speedy_vision_default().Promise.resolve();
  22038. }
  22039. /**
  22040. * Update the tracker
  22041. * @returns promise
  22042. * @internal
  22043. */
  22044. _update() {
  22045. // validate
  22046. if (this._session == null)
  22047. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
  22048. // compute the screen size for image processing purposes
  22049. // note: this may change over time...!
  22050. const media = this._session.media;
  22051. const aspectRatio = media.width / media.height;
  22052. const screenSize = Utils.resolution(this._resolution, aspectRatio);
  22053. // run the active state
  22054. const activeState = this._state[this._activeStateName];
  22055. return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
  22056. // update the output of the tracker
  22057. this._lastOutput = trackerOutput;
  22058. // need to change the state?
  22059. if (this._activeStateName != nextState) {
  22060. activeState.onLeaveState();
  22061. this._activeStateName = nextState;
  22062. this._state[nextState].onEnterState(nextStateSettings || {});
  22063. }
  22064. });
  22065. }
  22066. /**
  22067. * Get reference image
  22068. * @param keypointIndex -1 if not found
  22069. * @returns reference image
  22070. * @internal
  22071. */
  22072. _referenceImageOfKeypoint(keypointIndex) {
  22073. const training = this._state.training;
  22074. return training.referenceImageOfKeypoint(keypointIndex);
  22075. }
  22076. /**
  22077. * Get reference image index
  22078. * @param keypointIndex -1 if not found
  22079. * @returns reference image index, or -1 if not found
  22080. * @internal
  22081. */
  22082. _referenceImageIndexOfKeypoint(keypointIndex) {
  22083. const training = this._state.training;
  22084. return training.referenceImageIndexOfKeypoint(keypointIndex);
  22085. }
  22086. /**
  22087. * Get a keypoint of the trained set
  22088. * @param keypointIndex
  22089. * @returns a keypoint
  22090. * @internal
  22091. */
  22092. _referenceKeypoint(keypointIndex) {
  22093. const training = this._state.training;
  22094. return training.referenceKeypoint(keypointIndex);
  22095. }
  22096. }
  22097. ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
  22098. /*
  22099. * MARTINS.js
  22100. * GPU-accelerated Augmented Reality for the web
  22101. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22102. *
  22103. * This program is free software: you can redistribute it and/or modify
  22104. * it under the terms of the GNU Lesser General Public License as published
  22105. * by the Free Software Foundation, either version 3 of the License, or
  22106. * (at your option) any later version.
  22107. *
  22108. * This program is distributed in the hope that it will be useful,
  22109. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22110. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22111. * GNU Lesser General Public License for more details.
  22112. *
  22113. * You should have received a copy of the GNU Lesser General Public License
  22114. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22115. *
  22116. * tracker-factory.ts
  22117. * Tracker factory
  22118. */
  22119. /**
  22120. * Tracker factory
  22121. */
  22122. class TrackerFactory {
  22123. /**
  22124. * Create an Image Tracker
  22125. */
  22126. static ImageTracker() {
  22127. return new ImageTracker();
  22128. }
  22129. }
  22130. ;// CONCATENATED MODULE: ./src/sources/video-source.ts
  22131. /*
  22132. * MARTINS.js
  22133. * GPU-accelerated Augmented Reality for the web
  22134. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22135. *
  22136. * This program is free software: you can redistribute it and/or modify
  22137. * it under the terms of the GNU Lesser General Public License as published
  22138. * by the Free Software Foundation, either version 3 of the License, or
  22139. * (at your option) any later version.
  22140. *
  22141. * This program is distributed in the hope that it will be useful,
  22142. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22143. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22144. * GNU Lesser General Public License for more details.
  22145. *
  22146. * You should have received a copy of the GNU Lesser General Public License
  22147. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22148. *
  22149. * video-source.ts
  22150. * HTMLVideoElement-based source of data
  22151. */
  22152. /** A message to be displayed if a video can't autoplay and user interaction is required */
  22153. const ALERT_MESSAGE = 'Tap on the screen to start';
  22154. /** Whether or not we have displayed the ALERT_MESSAGE */
  22155. let displayedAlertMessage = false;
  22156. /**
  22157. * HTMLVideoElement-based source of data
  22158. */
  22159. class VideoSource {
  22160. /**
  22161. * Constructor
  22162. */
  22163. constructor(video) {
  22164. Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
  22165. this._video = video;
  22166. this._media = null;
  22167. }
  22168. /**
  22169. * A type-identifier of the source of data
  22170. * @internal
  22171. */
  22172. get _type() {
  22173. return 'video';
  22174. }
  22175. /**
  22176. * Get media
  22177. * @internal
  22178. */
  22179. get _data() {
  22180. if (this._media == null)
  22181. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  22182. return this._media;
  22183. }
  22184. /**
  22185. * Stats related to this source of data
  22186. * @internal
  22187. */
  22188. get _stats() {
  22189. const media = this._media;
  22190. if (media != null)
  22191. return `${media.width}x${media.height} video`;
  22192. else
  22193. return 'uninitialized video';
  22194. }
  22195. /**
  22196. * Initialize this source of data
  22197. * @returns a promise that resolves as soon as this source of data is initialized
  22198. * @internal
  22199. */
  22200. _init() {
  22201. Utils.log(`Initializing ${this._type} source...`);
  22202. // prepare the video before loading the SpeedyMedia!
  22203. return this._prepareVideo(this._video).then(video => {
  22204. Utils.log('The video is prepared');
  22205. return speedy_vision_default().load(video).then(media => {
  22206. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  22207. this._media = media;
  22208. });
  22209. });
  22210. }
  22211. /**
  22212. * Release this source of data
  22213. * @returns a promise that resolves as soon as this source of data is released
  22214. * @internal
  22215. */
  22216. _release() {
  22217. if (this._media)
  22218. this._media.release();
  22219. this._media = null;
  22220. return speedy_vision_default().Promise.resolve();
  22221. }
  22222. /**
  22223. * Handle browser-specific quirks for <video> elements
  22224. * @param video a video element
  22225. * @returns a promise that resolves to the input video
  22226. */
  22227. _prepareVideo(video) {
  22228. // WebKit <video> policies for iOS:
  22229. // https://webkit.org/blog/6784/new-video-policies-for-ios/
  22230. // required on iOS; nice to have in all browsers
  22231. video.setAttribute('playsinline', '');
  22232. // handle autoplay
  22233. return this._handleAutoPlay(video).then(video => {
  22234. // handle WebKit quirks
  22235. if (Utils.isWebKit()) {
  22236. // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
  22237. // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
  22238. if (video.hidden) {
  22239. video.hidden = false;
  22240. video.style.setProperty('opacity', '0');
  22241. video.style.setProperty('position', 'fixed'); // make sure that it's visible on-screen
  22242. video.style.setProperty('left', '0');
  22243. video.style.setProperty('top', '0');
  22244. //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
  22245. //video.style.setProperty('visibility', 'hidden'); // doesn't work either
  22246. }
  22247. }
  22248. // done
  22249. return video;
  22250. });
  22251. }
  22252. /**
  22253. * Handle browser-specific quirks for videos marked with autoplay
  22254. * @param video a <video> marked with autoplay
  22255. * @returns a promise that resolves to the input video
  22256. */
  22257. _handleAutoPlay(video) {
  22258. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  22259. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  22260. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  22261. // nothing to do?
  22262. if (!video.autoplay)
  22263. return speedy_vision_default().Promise.resolve(video);
  22264. // videos marked with autoplay should be muted
  22265. if (!video.muted) {
  22266. Utils.warning('Videos marked with autoplay should be muted', video);
  22267. video.muted = true;
  22268. }
  22269. // the browser may not honor the autoplay attribute if the video is not
  22270. // visible on-screen. So, let's try to play the video in any case.
  22271. return this._waitUntilPlayable(video).then(video => {
  22272. // try to play the video
  22273. const promise = video.play();
  22274. // handle older browsers
  22275. if (promise === undefined)
  22276. return video;
  22277. // resolve if successful
  22278. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22279. promise.then(() => resolve(video), error => {
  22280. // can't play the video
  22281. Utils.error(`Can't autoplay video!`, error, video);
  22282. // autoplay is blocked for some reason
  22283. if (error.name == 'NotAllowedError') {
  22284. Utils.warning('Tip: allow manual playback');
  22285. if (Utils.isIOS())
  22286. Utils.warning('Is low power mode on?');
  22287. // User interaction is required to play the video. We can
  22288. // solve this here (easy and convenient to do) or at the
  22289. // application layer (for a better user experience). If the
  22290. // latter is preferred, just disable autoplay and play the
  22291. // video programatically.
  22292. if (video.hidden || !video.controls || video.parentNode === null) {
  22293. // this is added for convenience
  22294. document.body.addEventListener('pointerdown', () => video.play());
  22295. // ask only once for user interaction
  22296. if (!displayedAlertMessage) {
  22297. alert(ALERT_MESSAGE);
  22298. displayedAlertMessage = true;
  22299. }
  22300. // XXX what if the Session mode is inline? In this
  22301. // case, this convenience code may be undesirable.
  22302. // A workaround is to disable autoplay.
  22303. }
  22304. /*else {
  22305. // play the video after the first interaction with the page
  22306. const polling = setInterval(() => {
  22307. video.play().then(() => clearInterval(polling));
  22308. }, 500);
  22309. }*/
  22310. }
  22311. // unsupported media source
  22312. else if (error.name == 'NotSupportedError') {
  22313. reject(new NotSupportedError('Unsupported video format', error));
  22314. return;
  22315. }
  22316. // done
  22317. resolve(video);
  22318. });
  22319. });
  22320. });
  22321. }
  22322. /**
  22323. * Wait for the input video to be playable
  22324. * @param video
  22325. * @returns a promise that resolves to the input video when it can be played through to the end
  22326. */
  22327. _waitUntilPlayable(video) {
  22328. const TIMEOUT = 15000, INTERVAL = 500;
  22329. if (video.readyState >= 4)
  22330. return speedy_vision_default().Promise.resolve(video);
  22331. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22332. let ms = 0, t = setInterval(() => {
  22333. if (video.readyState >= 4) { // canplaythrough
  22334. clearInterval(t);
  22335. resolve(video);
  22336. }
  22337. else if ((ms += INTERVAL) >= TIMEOUT) {
  22338. clearInterval(t);
  22339. reject(new TimeoutError('The video took too long to load'));
  22340. }
  22341. }, INTERVAL);
  22342. });
  22343. }
  22344. }
  22345. ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
  22346. /*
  22347. * MARTINS.js
  22348. * GPU-accelerated Augmented Reality for the web
  22349. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22350. *
  22351. * This program is free software: you can redistribute it and/or modify
  22352. * it under the terms of the GNU Lesser General Public License as published
  22353. * by the Free Software Foundation, either version 3 of the License, or
  22354. * (at your option) any later version.
  22355. *
  22356. * This program is distributed in the hope that it will be useful,
  22357. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22358. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22359. * GNU Lesser General Public License for more details.
  22360. *
  22361. * You should have received a copy of the GNU Lesser General Public License
  22362. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22363. *
  22364. * canvas-source.ts
  22365. * HTMLCanvasElement-based source of data
  22366. */
  22367. /**
  22368. * HTMLCanvasElement-based source of data
  22369. */
  22370. class CanvasSource {
  22371. /**
  22372. * Constructor
  22373. */
  22374. constructor(canvas) {
  22375. Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
  22376. this._canvas = canvas;
  22377. this._media = null;
  22378. }
  22379. /**
  22380. * A type-identifier of the source of data
  22381. * @internal
  22382. */
  22383. get _type() {
  22384. return 'canvas';
  22385. }
  22386. /**
  22387. * Get media
  22388. * @internal
  22389. */
  22390. get _data() {
  22391. if (this._media == null)
  22392. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  22393. return this._media;
  22394. }
  22395. /**
  22396. * Stats related to this source of data
  22397. * @internal
  22398. */
  22399. get _stats() {
  22400. const media = this._media;
  22401. if (media != null)
  22402. return `${media.width}x${media.height} canvas`;
  22403. else
  22404. return 'uninitialized canvas';
  22405. }
  22406. /**
  22407. * Initialize this source of data
  22408. * @returns a promise that resolves as soon as this source of data is initialized
  22409. * @internal
  22410. */
  22411. _init() {
  22412. return speedy_vision_default().load(this._canvas).then(media => {
  22413. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  22414. this._media = media;
  22415. });
  22416. }
  22417. /**
  22418. * Release this source of data
  22419. * @returns a promise that resolves as soon as this source of data is released
  22420. * @internal
  22421. */
  22422. _release() {
  22423. if (this._media)
  22424. this._media.release();
  22425. this._media = null;
  22426. return speedy_vision_default().Promise.resolve();
  22427. }
  22428. }
  22429. ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
  22430. /*
  22431. * MARTINS.js
  22432. * GPU-accelerated Augmented Reality for the web
  22433. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22434. *
  22435. * This program is free software: you can redistribute it and/or modify
  22436. * it under the terms of the GNU Lesser General Public License as published
  22437. * by the Free Software Foundation, either version 3 of the License, or
  22438. * (at your option) any later version.
  22439. *
  22440. * This program is distributed in the hope that it will be useful,
  22441. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22442. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22443. * GNU Lesser General Public License for more details.
  22444. *
  22445. * You should have received a copy of the GNU Lesser General Public License
  22446. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22447. *
  22448. * camera-source.ts
  22449. * Webcam-based source of data
  22450. */
  22451. /** Default options for camera sources */
  22452. const DEFAULT_CAMERA_OPTIONS = {
  22453. resolution: 'md',
  22454. aspectRatio: 16 / 9,
  22455. constraints: { facingMode: 'environment' },
  22456. };
  22457. /**
  22458. * Webcam-based source of data
  22459. */
  22460. class CameraSource extends VideoSource {
  22461. /**
  22462. * Constructor
  22463. */
  22464. constructor(options) {
  22465. const video = document.createElement('video');
  22466. super(video);
  22467. this._cameraVideo = video;
  22468. this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
  22469. }
  22470. /**
  22471. * Camera resolution
  22472. */
  22473. get resolution() {
  22474. return this._options.resolution;
  22475. }
  22476. /**
  22477. * Initialize this source of data
  22478. * @returns a promise that resolves as soon as this source of data is initialized
  22479. * @internal
  22480. */
  22481. _init() {
  22482. Utils.log('Accessing the webcam...');
  22483. // validate
  22484. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  22485. throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
  22486. // set up media constraints
  22487. const options = this._options;
  22488. const size = Utils.resolution(options.resolution, options.aspectRatio);
  22489. const constraints = {
  22490. audio: false,
  22491. video: Object.assign({ width: size.width, height: size.height }, options.constraints)
  22492. };
  22493. // load camera stream
  22494. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22495. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  22496. const video = this._cameraVideo;
  22497. video.onloadedmetadata = () => {
  22498. const promise = video.play();
  22499. const success = 'Access to the webcam has been granted.';
  22500. // handle older browsers
  22501. if (promise === undefined) {
  22502. Utils.log(success);
  22503. resolve(video);
  22504. return;
  22505. }
  22506. // handle promise
  22507. promise.then(() => {
  22508. Utils.log(success);
  22509. resolve(video);
  22510. }).catch(error => {
  22511. reject(new IllegalOperationError('Webcam error!', error));
  22512. });
  22513. };
  22514. video.setAttribute('playsinline', '');
  22515. video.setAttribute('autoplay', '');
  22516. video.setAttribute('muted', '');
  22517. video.srcObject = stream;
  22518. }).catch(error => {
  22519. reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
  22520. });
  22521. }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
  22522. }
  22523. /**
  22524. * Release this source of data
  22525. * @returns a promise that resolves as soon as this source of data is released
  22526. * @internal
  22527. */
  22528. _release() {
  22529. const stream = this._cameraVideo.srcObject;
  22530. const tracks = stream.getTracks();
  22531. // stop camera feed
  22532. tracks.forEach(track => track.stop());
  22533. this._cameraVideo.onloadedmetadata = null;
  22534. this._cameraVideo.srcObject = null;
  22535. // release the media
  22536. return super._release();
  22537. }
  22538. }
  22539. ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
  22540. /*
  22541. * MARTINS.js
  22542. * GPU-accelerated Augmented Reality for the web
  22543. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22544. *
  22545. * This program is free software: you can redistribute it and/or modify
  22546. * it under the terms of the GNU Lesser General Public License as published
  22547. * by the Free Software Foundation, either version 3 of the License, or
  22548. * (at your option) any later version.
  22549. *
  22550. * This program is distributed in the hope that it will be useful,
  22551. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22552. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22553. * GNU Lesser General Public License for more details.
  22554. *
  22555. * You should have received a copy of the GNU Lesser General Public License
  22556. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22557. *
  22558. * source-factory.ts
  22559. * Factory of sources of data
  22560. */
  22561. /**
  22562. * Factory of sources of data
  22563. */
  22564. class SourceFactory {
  22565. /**
  22566. * Create a <video>-based source of data
  22567. * @param video video element
  22568. */
  22569. static Video(video) {
  22570. return new VideoSource(video);
  22571. }
  22572. /**
  22573. * Create a <canvas>-based source of data
  22574. * @param canvas canvas element
  22575. */
  22576. static Canvas(canvas) {
  22577. return new CanvasSource(canvas);
  22578. }
  22579. /**
  22580. * Create a Webcam-based source of data
  22581. * @param options optional options object
  22582. */
  22583. static Camera(options = {}) {
  22584. return new CameraSource(options);
  22585. }
  22586. }
  22587. ;// CONCATENATED MODULE: ./src/main.ts
  22588. /*
  22589. * MARTINS.js
  22590. * GPU-accelerated Augmented Reality for the web
  22591. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22592. *
  22593. * This program is free software: you can redistribute it and/or modify
  22594. * it under the terms of the GNU Lesser General Public License as published
  22595. * by the Free Software Foundation, either version 3 of the License, or
  22596. * (at your option) any later version.
  22597. *
  22598. * This program is distributed in the hope that it will be useful,
  22599. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22600. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22601. * GNU Lesser General Public License for more details.
  22602. *
  22603. * You should have received a copy of the GNU Lesser General Public License
  22604. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22605. *
  22606. * main.ts
  22607. * Entry point
  22608. */
  22609. /**
  22610. * GPU-accelerated Augmented Reality for the web
  22611. */
  22612. class Martins {
  22613. /**
  22614. * Start a new session
  22615. * @param options
  22616. * @returns a promise that resolves to a new session
  22617. */
  22618. static startSession(options) {
  22619. return Session.instantiate(options);
  22620. }
  22621. /**
  22622. * Trackers
  22623. */
  22624. static get Tracker() {
  22625. return TrackerFactory;
  22626. }
  22627. /**
  22628. * Sources of data
  22629. */
  22630. static get Source() {
  22631. return SourceFactory;
  22632. }
  22633. /**
  22634. * Create a viewport
  22635. * @param settings
  22636. * @returns a new viewport with the specified settings
  22637. */
  22638. static Viewport(settings) {
  22639. return new BaseViewport(settings);
  22640. }
  22641. /**
  22642. * Global Settings
  22643. */
  22644. static get Settings() {
  22645. return Settings;
  22646. }
  22647. /**
  22648. * Engine version
  22649. */
  22650. static get version() {
  22651. if (false)
  22652. {}
  22653. else
  22654. return "0.2.0";
  22655. }
  22656. /**
  22657. * Speedy Vision
  22658. */
  22659. static get Speedy() {
  22660. return (speedy_vision_default());
  22661. }
  22662. /**
  22663. * Checks if the engine can be run in the browser the client is using
  22664. * @returns true if the engine is compatible with the browser
  22665. */
  22666. static isSupported() {
  22667. return Session.isSupported();
  22668. }
  22669. }
  22670. // Freeze the namespace
  22671. Object.freeze(Martins);
  22672. // Add Speedy Vision to global scope
  22673. ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
  22674. // Display a notice
  22675. Utils.log(`MARTINS.js version ${Martins.version}. ` +
  22676. `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
  22677. "https://github.com/alemart/martins-js");
  22678. })();
  22679. __webpack_exports__ = __webpack_exports__["default"];
  22680. /******/ return __webpack_exports__;
  22681. /******/ })()
  22682. ;
  22683. });