You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

encantar.js 1.0MB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076260772607826079260802608126082260832608426085260862608726088260892609026091260922609326094260952609626097260982609926100261012610226103261042610526106261072610826109261102611126112261132611426115261162611726118261192612026121261222612326124261252612626127261282612926130261312613226133261342613526136261372613826139261402614126142261432614426145261462614726148261492615026151261522615326154261552615626157261582615926160261612616226163261642616526166261672616826169261702617126172261732617426175261762617726178261792618026181261822618326184261852618626187261882618926190261912619226193261942619526196261972619826199262002620126202262032620426205262062620726208262092621026211262122621326214262152621626217262182621926220262212622226223262242622526226262272622826229262302623126232262332623426235262362623726238262392624026241262422624326244262452624626247262482624926250262512625226253262542625526256262572625826259262602626126262262632626426265262662626726268262692627026271262722627326274262752627626277262782627926280262812628226283262842628526286262872628826289262902629126292262932629426295262962629726298262992630026301263022630326304263052630626307263082630926310263112631226313263142631526316263172631826319263202632126322263232632426325263262632726328263292633026331263322633326334263352633626337263382633926340263412634226343263442634526346263472634826349263502635126352263532635426355263562635726358263592636026361263622636326364263652636626367263682636926370263712637226373263742637526376263772637826379263802638126382263832638426385263862638726388263892639026391263922639326394263952639626397263982639926400264012640226403264042640526406264072640826409264102641126412264132641426415264162641726418264192642026421264222642326424264252642626427264282642926430264312643226433264342643526436264372643826439264402644126442264432644426445264462644726448264492645026451264522645326454264552645626457264582645926460264612646226463264642646526466264672646826469264702647126472264732647426475264762647726478264792648026481264822648326484264852648626487264882648926490264912649226493264942649526496264972649826499265002650126502265032650426505265062650726508265092651026511265122651326514265152651626517265182651926520265212652226523265242652526526265272652826529265302653126532265332653426535265362653726538265392654026541265422654326544265452654626547265482654926550265512655226553265542655526556265572655826559265602656126562265632656426565265662656726568265692657026571265722657326574265752657626577265782657926580265812658226583265842658526586265872658826589265902659126592265932659426595265962659726598265992660026601266022660326604266052660626607266082660926610266112661226613266142661526616266172661826619266202662126622266232662426625266262662726628266292663026631266322663326634266352663626637266382663926640266412664226643266442664526646266472664826649266502665126652266532665426655266562665726658266592666026661266622666326664266652666626667266682666926670266712667226673266742667526676266772667826679266802668126682266832668426685266862668726688266892669026691266922669326694266952669626697266982669926700267012670226703267042670526706267072670826709267102671126712267132671426715267162671726718267192672026721267222672326724267252672626727267282672926730267312673226733267342673526736267372673826739267402674126742267432674426745267462674726748267492675026751267522675326754267552675626757267582675926760267612676226763267642676526766267672676826769267702677126772267732677426775267762677726778267792678026781267822678326784267852678626787267882678926790267912679226793267942679526796267972679826799268002680126802268032680426805268062680726808268092681026811268122681326814268152681626817268182681926820268212682226823268242682526826268272682826829268302683126832268332683426835268362683726838268392684026841268422684326844268452684626847268482684926850268512685226853268542685526856268572685826859268602686126862268632686426865268662686726868268692687026871268722687326874268752687626877268782687926880268812688226883268842688526886268872688826889268902689126892268932689426895268962689726898268992690026901269022690326904269052690626907269082690926910269112691226913269142691526916269172691826919269202692126922269232692426925269262692726928269292693026931269322693326934269352693626937269382693926940269412694226943269442694526946269472694826949269502695126952269532695426955269562695726958269592696026961269622696326964269652696626967269682696926970269712697226973269742697526976269772697826979269802698126982269832698426985269862698726988269892699026991269922699326994269952699626997269982699927000270012700227003270042700527006270072700827009270102701127012270132701427015270162701727018270192702027021270222702327024270252702627027270282702927030270312703227033270342703527036270372703827039270402704127042270432704427045270462704727048270492705027051270522705327054270552705627057270582705927060270612706227063270642706527066270672706827069270702707127072270732707427075270762707727078270792708027081270822708327084270852708627087270882708927090270912709227093270942709527096270972709827099271002710127102271032710427105271062710727108271092711027111271122711327114271152711627117271182711927120271212712227123271242712527126271272712827129271302713127132271332713427135271362713727138271392714027141271422714327144271452714627147271482714927150271512715227153271542715527156271572715827159271602716127162271632716427165271662716727168271692717027171271722717327174271752717627177271782717927180271812718227183271842718527186271872718827189271902719127192271932719427195271962719727198271992720027201272022720327204272052720627207272082720927210272112721227213272142721527216272172721827219272202722127222272232722427225272262722727228272292723027231272322723327234272352723627237272382723927240272412724227243272442724527246272472724827249272502725127252272532725427255272562725727258272592726027261272622726327264272652726627267272682726927270272712727227273272742727527276272772727827279272802728127282272832728427285272862728727288272892729027291272922729327294272952729627297272982729927300273012730227303273042730527306273072730827309273102731127312273132731427315273162731727318273192732027321273222732327324273252732627327273282732927330273312733227333273342733527336273372733827339273402734127342273432734427345273462734727348273492735027351273522735327354273552735627357273582735927360273612736227363273642736527366273672736827369273702737127372273732737427375273762737727378273792738027381273822738327384273852738627387273882738927390273912739227393273942739527396273972739827399274002740127402274032740427405274062740727408274092741027411274122741327414274152741627417274182741927420274212742227423274242742527426274272742827429274302743127432274332743427435274362743727438274392744027441274422744327444274452744627447274482744927450274512745227453274542745527456274572745827459274602746127462274632746427465274662746727468274692747027471274722747327474274752747627477274782747927480274812748227483274842748527486274872748827489274902749127492274932749427495274962749727498274992750027501275022750327504275052750627507275082750927510275112751227513275142751527516275172751827519275202752127522275232752427525275262752727528275292753027531275322753327534275352753627537275382753927540275412754227543275442754527546275472754827549275502755127552275532755427555275562755727558275592756027561275622756327564275652756627567275682756927570275712757227573275742757527576275772757827579275802758127582275832758427585275862758727588275892759027591275922759327594275952759627597275982759927600276012760227603276042760527606276072760827609276102761127612276132761427615276162761727618276192762027621276222762327624276252762627627276282762927630276312763227633276342763527636276372763827639276402764127642276432764427645276462764727648276492765027651276522765327654276552765627657276582765927660276612766227663276642766527666276672766827669276702767127672276732767427675276762767727678276792768027681276822768327684276852768627687276882768927690276912769227693276942769527696276972769827699277002770127702277032770427705277062770727708277092771027711277122771327714277152771627717277182771927720277212772227723277242772527726277272772827729277302773127732277332773427735277362773727738277392774027741277422774327744277452774627747277482774927750277512775227753277542775527756277572775827759277602776127762277632776427765277662776727768277692777027771277722777327774277752777627777277782777927780277812778227783277842778527786277872778827789277902779127792277932779427795277962779727798277992780027801278022780327804278052780627807278082780927810278112781227813278142781527816278172781827819278202782127822278232782427825278262782727828278292783027831278322783327834278352783627837278382783927840278412784227843278442784527846278472784827849278502785127852278532785427855278562785727858278592786027861278622786327864278652786627867278682786927870278712787227873278742787527876278772787827879278802788127882278832788427885278862788727888278892789027891278922789327894278952789627897278982789927900279012790227903279042790527906279072790827909279102791127912279132791427915279162791727918279192792027921279222792327924279252792627927279282792927930279312793227933279342793527936279372793827939279402794127942279432794427945279462794727948279492795027951279522795327954279552795627957279582795927960279612796227963279642796527966279672796827969279702797127972279732797427975279762797727978279792798027981279822798327984279852798627987279882798927990279912799227993279942799527996279972799827999280002800128002280032800428005280062800728008280092801028011280122801328014280152801628017280182801928020280212802228023280242802528026280272802828029280302803128032280332803428035280362803728038280392804028041280422804328044280452804628047280482804928050280512805228053280542805528056280572805828059280602806128062280632806428065280662806728068280692807028071280722807328074280752807628077280782807928080280812808228083280842808528086280872808828089280902809128092280932809428095280962809728098280992810028101281022810328104281052810628107281082810928110281112811228113281142811528116281172811828119281202812128122281232812428125281262812728128281292813028131281322813328134281352813628137281382813928140281412814228143281442814528146281472814828149281502815128152281532815428155281562815728158281592816028161281622816328164281652816628167281682816928170281712817228173281742817528176281772817828179281802818128182281832818428185281862818728188281892819028191281922819328194281952819628197281982819928200282012820228203282042820528206282072820828209282102821128212282132821428215282162821728218282192822028221282222822328224282252822628227282282822928230282312823228233282342823528236282372823828239282402824128242282432824428245282462824728248282492825028251282522825328254282552825628257282582825928260282612826228263282642826528266282672826828269282702827128272282732827428275282762827728278282792828028281282822828328284282852828628287282882828928290282912829228293282942829528296282972829828299283002830128302283032830428305283062830728308283092831028311283122831328314283152831628317283182831928320283212832228323283242832528326283272832828329283302833128332283332833428335283362833728338283392834028341
  1. /*!
  2. * encantar.js version 0.4.0
  3. * GPU-accelerated Augmented Reality for the web
  4. * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  5. * https://github.com/alemart/encantar-js
  6. *
  7. * @license LGPL-3.0-or-later
  8. * Date: 2024-11-25T01:46:40.728Z
  9. */
  10. (function webpackUniversalModuleDefinition(root, factory) {
  11. if(typeof exports === 'object' && typeof module === 'object')
  12. module.exports = factory();
  13. else if(typeof define === 'function' && define.amd)
  14. define([], factory);
  15. else if(typeof exports === 'object')
  16. exports["AR"] = factory();
  17. else
  18. root["AR"] = factory();
  19. })(self, () => {
  20. return /******/ (() => { // webpackBootstrap
  21. /******/ var __webpack_modules__ = ({
  22. /***/ 774:
  23. /***/ ((module) => {
  24. /*!
  25. * Speedy Vision version 0.9.1
  26. * GPU-accelerated Computer Vision for JavaScript
  27. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  28. * https://github.com/alemart/speedy-vision
  29. *
  30. * @license Apache-2.0
  31. * Date: 2024-07-03T02:16:25.769Z
  32. */
  33. (function webpackUniversalModuleDefinition(root, factory) {
  34. if(true)
  35. module.exports = factory();
  36. else {}
  37. })(self, () => {
  38. return /******/ (() => { // webpackBootstrap
  39. /******/ var __webpack_modules__ = ({
  40. /***/ 2199:
  41. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
  42. "use strict";
  43. /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
  44. /* harmony export */ w: () => (/* binding */ Settings)
  45. /* harmony export */ });
  46. /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(6634);
  47. /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1001);
  48. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(9037);
  49. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(8581);
  50. /*
  51. * speedy-vision.js
  52. * GPU-accelerated Computer Vision for JavaScript
  53. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  54. *
  55. * Licensed under the Apache License, Version 2.0 (the "License");
  56. * you may not use this file except in compliance with the License.
  57. * You may obtain a copy of the License at
  58. *
  59. * http://www.apache.org/licenses/LICENSE-2.0
  60. *
  61. * Unless required by applicable law or agreed to in writing, software
  62. * distributed under the License is distributed on an "AS IS" BASIS,
  63. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  64. * See the License for the specific language governing permissions and
  65. * limitations under the License.
  66. *
  67. * settings.js
  68. * Global settings
  69. */
  70. /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
  71. /** @typedef {"raf" | "asap"} GPUPollingMode */
  72. /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
  73. /** @type {GPUPollingMode} Default GPU polling mode */
  74. const DEFAULT_GPU_POLLING_MODE = 'raf';
  75. /** @type {GPUPollingMode} GPU polling mode */
  76. let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
  77. /** @type {LoggingMode} logging mode */
  78. let loggingMode = 'default';
  79. /**
  80. * Global settings
  81. */
  82. class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q {
  83. /**
  84. * Power preference of the WebGL context
  85. * @returns {PowerPreference}
  86. */
  87. static get powerPreference() {
  88. return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
  89. }
  90. /**
  91. * Power preference of the WebGL context
  92. * @param {PowerPreference} value
  93. */
  94. static set powerPreference(value) {
  95. _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
  96. }
  97. /**
  98. * GPU polling mode
  99. * @returns {GPUPollingMode}
  100. */
  101. static get gpuPollingMode() {
  102. return gpuPollingMode;
  103. }
  104. /**
  105. * GPU polling mode
  106. * @param {GPUPollingMode} value
  107. */
  108. static set gpuPollingMode(value) {
  109. if (value !== 'raf' && value !== 'asap') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
  110. gpuPollingMode = value;
  111. }
  112. /**
  113. * Logging mode
  114. * @returns {LoggingMode}
  115. */
  116. static get logging() {
  117. return loggingMode;
  118. }
  119. /**
  120. * Logging mode
  121. * @param {LoggingMode} mode
  122. */
  123. static set logging(mode) {
  124. if (mode !== 'default' && mode !== 'none' && mode !== 'diagnostic') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);else if (mode === 'diagnostic') _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
  125. loggingMode = mode;
  126. }
  127. }
  128. /***/ }),
  129. /***/ 6306:
  130. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4248__) => {
  131. "use strict";
  132. /* harmony export */ __nested_webpack_require_4248__.d(__nested_webpack_exports__, {
  133. /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
  134. /* harmony export */ });
  135. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4248__(6465);
  136. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4248__(9037);
  137. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4248__(8581);
  138. /*
  139. * speedy-vision.js
  140. * GPU-accelerated Computer Vision for JavaScript
  141. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  142. *
  143. * Licensed under the Apache License, Version 2.0 (the "License");
  144. * you may not use this file except in compliance with the License.
  145. * You may obtain a copy of the License at
  146. *
  147. * http://www.apache.org/licenses/LICENSE-2.0
  148. *
  149. * Unless required by applicable law or agreed to in writing, software
  150. * distributed under the License is distributed on an "AS IS" BASIS,
  151. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  152. * See the License for the specific language governing permissions and
  153. * limitations under the License.
  154. *
  155. * speedy-matrix-expr.js
  156. * Symbolic matrix expressions
  157. */
  158. /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
  159. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
  160. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
  161. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  162. /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
  163. /** @const {Dtype2BufferType} */
  164. const DTYPE_TO_BUFFER_TYPE = Object.freeze({
  165. 'float32': Float32Array
  166. });
  167. /**
  168. * @abstract Matrix expression
  169. * It's an opaque object representing an algebraic
  170. * expression. It has no data attached to it.
  171. */
  172. class SpeedyMatrixExpr {
  173. /**
  174. * Constructor
  175. * @param {number} rows
  176. * @param {number} columns
  177. * @param {SpeedyMatrixDtype} dtype
  178. */
  179. constructor(rows, columns, dtype) {
  180. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
  181. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
  182. /** @type {number} number of rows */
  183. this._rows = rows | 0;
  184. /** @type {number} number of columns */
  185. this._columns = columns | 0;
  186. /** @type {SpeedyMatrixDtype} data type */
  187. this._dtype = dtype;
  188. }
  189. /**
  190. * Number of rows
  191. * @returns {number}
  192. */
  193. get rows() {
  194. return this._rows;
  195. }
  196. /**
  197. * Number of columns
  198. * @returns {number}
  199. */
  200. get columns() {
  201. return this._columns;
  202. }
  203. /**
  204. * Data type
  205. * @returns {SpeedyMatrixDtype}
  206. */
  207. get dtype() {
  208. return this._dtype;
  209. }
  210. /**
  211. * Default data type
  212. * @returns {SpeedyMatrixDtype}
  213. */
  214. static get DEFAULT_DTYPE() {
  215. return 'float32';
  216. }
  217. /**
  218. * Buffer types
  219. * @returns {Dtype2BufferType}
  220. */
  221. static get BUFFER_TYPE() {
  222. return DTYPE_TO_BUFFER_TYPE;
  223. }
  224. /**
  225. * Matrix addition
  226. * @param {SpeedyMatrixExpr} expr
  227. * @returns {SpeedyMatrixExpr}
  228. */
  229. plus(expr) {
  230. return new SpeedyMatrixAddExpr(this, expr);
  231. }
  232. /**
  233. * Matrix subtraction
  234. * @param {SpeedyMatrixExpr} expr
  235. * @returns {SpeedyMatrixExpr}
  236. */
  237. minus(expr) {
  238. return new SpeedyMatrixSubtractExpr(this, expr);
  239. }
  240. /**
  241. * Matrix multiplication
  242. * @param {SpeedyMatrixExpr|number} expr
  243. * @returns {SpeedyMatrixExpr}
  244. */
  245. times(expr) {
  246. if (typeof expr === 'number') return new SpeedyMatrixScaleExpr(this, expr);else return new SpeedyMatrixMultiplyExpr(this, expr);
  247. }
  248. /**
  249. * Matrix transposition
  250. * @returns {SpeedyMatrixExpr}
  251. */
  252. transpose() {
  253. return new SpeedyMatrixTransposeExpr(this);
  254. }
  255. /**
  256. * Matrix inversion
  257. * @returns {SpeedyMatrixExpr}
  258. */
  259. inverse() {
  260. return new SpeedyMatrixInvertExpr(this);
  261. }
  262. /**
  263. * Component-wise multiplication
  264. * @param {SpeedyMatrixExpr} expr
  265. * @returns {SpeedyMatrixExpr}
  266. */
  267. compMult(expr) {
  268. return new SpeedyMatrixCompMultExpr(this, expr);
  269. }
  270. /**
  271. * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
  272. * @param {SpeedyMatrixExpr} expr
  273. * @returns {SpeedyMatrixExpr}
  274. */
  275. ldiv(expr) {
  276. return new SpeedyMatrixLdivExpr(this, expr);
  277. }
  278. /**
  279. * Returns a human-readable string representation of the matrix expression
  280. * @returns {string}
  281. */
  282. toString() {
  283. return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
  284. }
  285. /**
  286. * Evaluate this expression
  287. * @abstract
  288. * @param {WebAssembly.Instance} wasm
  289. * @param {SpeedyMatrixWASMMemory} memory
  290. * @returns {SpeedyMatrix}
  291. */
  292. _evaluate(wasm, memory) {
  293. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  294. }
  295. }
  296. const {
  297. SpeedyMatrix
  298. } = __nested_webpack_require_4248__(4188);
  299. /**
  300. * @abstract operation storing a temporary matrix
  301. */
  302. class SpeedyMatrixTempExpr extends SpeedyMatrixExpr {
  303. /**
  304. * Constructor
  305. * @param {number} rows
  306. * @param {number} columns
  307. * @param {SpeedyMatrixDtype} dtype
  308. */
  309. constructor(rows, columns, dtype) {
  310. super(rows, columns, dtype);
  311. /** @type {SpeedyMatrix} holds the results of a computation */
  312. this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
  313. }
  314. }
  315. /**
  316. * @abstract unary operation
  317. */
  318. class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr {
  319. /**
  320. * Constructor
  321. * @param {number} rows rows of the output matrix
  322. * @param {number} columns columns of the output matrix
  323. * @param {SpeedyMatrixExpr} operand
  324. */
  325. constructor(rows, columns, operand) {
  326. super(rows, columns, operand.dtype);
  327. /** @type {SpeedyMatrixExpr} operand */
  328. this._operand = operand;
  329. }
  330. /**
  331. * Evaluate this expression
  332. * @param {WebAssembly.Instance} wasm
  333. * @param {SpeedyMatrixWASMMemory} memory
  334. * @returns {SpeedyMatrix}
  335. */
  336. _evaluate(wasm, memory) {
  337. const operand = this._operand._evaluate(wasm, memory);
  338. const result = this._tempMatrix;
  339. // allocate matrices
  340. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  341. const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
  342. // copy operand to WASM memory
  343. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
  344. // run the WASM routine
  345. this._compute(wasm, memory, resultptr, operandptr);
  346. // copy result from WASM memory
  347. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  348. // deallocate matrices
  349. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
  350. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  351. // done!
  352. return result;
  353. }
  354. /**
  355. * Compute the result of this operation
  356. * @abstract
  357. * @param {WebAssembly.Instance} wasm
  358. * @param {SpeedyMatrixWASMMemory} memory
  359. * @param {number} resultptr pointer to Mat32
  360. * @param {number} operandptr pointer to Mat32
  361. */
  362. _compute(wasm, memory, resultptr, operandptr) {
  363. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  364. }
  365. }
  366. /**
  367. * @abstract binary operation
  368. */
  369. class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr {
  370. /**
  371. * Constructor
  372. * @param {number} rows rows of the output matrix
  373. * @param {number} columns columns of the output matrix
  374. * @param {SpeedyMatrixExpr} left left operand
  375. * @param {SpeedyMatrixExpr} right right operand
  376. */
  377. constructor(rows, columns, left, right) {
  378. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
  379. super(rows, columns, left.dtype);
  380. /** @type {SpeedyMatrixExpr} left operand */
  381. this._left = left;
  382. /** @type {SpeedyMatrixExpr} right operand */
  383. this._right = right;
  384. }
  385. /**
  386. * Evaluate this expression
  387. * @param {WebAssembly.Instance} wasm
  388. * @param {SpeedyMatrixWASMMemory} memory
  389. * @returns {SpeedyMatrix}
  390. */
  391. _evaluate(wasm, memory) {
  392. const left = this._left._evaluate(wasm, memory);
  393. const right = this._right._evaluate(wasm, memory);
  394. const result = this._tempMatrix;
  395. // allocate matrices
  396. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  397. const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
  398. const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
  399. // copy input matrices to WASM memory
  400. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
  401. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
  402. // run the WASM routine
  403. this._compute(wasm, memory, resultptr, leftptr, rightptr);
  404. // copy output matrix from WASM memory
  405. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  406. // deallocate matrices
  407. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
  408. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
  409. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  410. // done!
  411. return result;
  412. }
  413. /**
  414. * Compute the result of this operation
  415. * @abstract
  416. * @param {WebAssembly.Instance} wasm
  417. * @param {SpeedyMatrixWASMMemory} memory
  418. * @param {number} resultptr pointer to Mat32
  419. * @param {number} leftptr pointer to Mat32
  420. * @param {number} rightptr pointer to Mat32
  421. */
  422. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  423. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  424. }
  425. }
  426. /**
  427. * Transpose matrix
  428. */
  429. class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr {
  430. /**
  431. * Constructor
  432. * @param {SpeedyMatrixExpr} operand
  433. */
  434. constructor(operand) {
  435. super(operand.columns, operand.rows, operand);
  436. }
  437. /**
  438. * Compute result = operand^T
  439. * @param {WebAssembly.Instance} wasm
  440. * @param {SpeedyMatrixWASMMemory} memory
  441. * @param {number} resultptr pointer to Mat32
  442. * @param {number} operandptr pointer to Mat32
  443. */
  444. _compute(wasm, memory, resultptr, operandptr) {
  445. wasm.exports.Mat32_transpose(resultptr, operandptr);
  446. }
  447. }
  448. /**
  449. * Invert square matrix
  450. */
  451. class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr {
  452. /**
  453. * Constructor
  454. * @param {SpeedyMatrixExpr} operand
  455. */
  456. constructor(operand) {
  457. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
  458. super(operand.rows, operand.columns, operand);
  459. /** @type {number} size of the matrix */
  460. this._size = operand.rows;
  461. }
  462. /**
  463. * Compute result = operand ^ (-1)
  464. * @param {WebAssembly.Instance} wasm
  465. * @param {SpeedyMatrixWASMMemory} memory
  466. * @param {number} resultptr pointer to Mat32
  467. * @param {number} operandptr pointer to Mat32
  468. */
  469. _compute(wasm, memory, resultptr, operandptr) {
  470. switch (this._size) {
  471. case 0:
  472. break;
  473. case 1:
  474. wasm.exports.Mat32_inverse1(resultptr, operandptr);
  475. break;
  476. case 2:
  477. wasm.exports.Mat32_inverse2(resultptr, operandptr);
  478. break;
  479. case 3:
  480. wasm.exports.Mat32_inverse3(resultptr, operandptr);
  481. break;
  482. default:
  483. wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
  484. break;
  485. }
  486. }
  487. }
  488. /**
  489. * Multiply matrix by a scalar value
  490. */
  491. class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr {
  492. /**
  493. * Constructor
  494. * @param {SpeedyMatrixExpr} operand
  495. * @param {number} scalar
  496. */
  497. constructor(operand, scalar) {
  498. super(operand.rows, operand.columns, operand);
  499. /** @type {number} scalar value */
  500. this._scalar = +scalar;
  501. }
  502. /**
  503. * Compute result = scalar * operand
  504. * @param {WebAssembly.Instance} wasm
  505. * @param {SpeedyMatrixWASMMemory} memory
  506. * @param {number} resultptr pointer to Mat32
  507. * @param {number} operandptr pointer to Mat32
  508. */
  509. _compute(wasm, memory, resultptr, operandptr) {
  510. wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
  511. }
  512. }
  513. /**
  514. * Matrix addition
  515. */
  516. class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr {
  517. /**
  518. * Constructor
  519. * @param {SpeedyMatrixExpr} left left operand
  520. * @param {SpeedyMatrixExpr} right right operand
  521. */
  522. constructor(left, right) {
  523. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  524. super(left.rows, left.columns, left, right);
  525. }
  526. /**
  527. * Compute result = left + right
  528. * @param {WebAssembly.Instance} wasm
  529. * @param {SpeedyMatrixWASMMemory} memory
  530. * @param {number} resultptr pointer to Mat32
  531. * @param {number} leftptr pointer to Mat32
  532. * @param {number} rightptr pointer to Mat32
  533. */
  534. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  535. wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
  536. }
  537. }
  538. /**
  539. * Matrix subtraction
  540. */
  541. class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr {
  542. /**
  543. * Constructor
  544. * @param {SpeedyMatrixExpr} left left operand
  545. * @param {SpeedyMatrixExpr} right right operand
  546. */
  547. constructor(left, right) {
  548. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  549. super(left.rows, left.columns, left, right);
  550. }
  551. /**
  552. * Compute result = left - right
  553. * @param {WebAssembly.Instance} wasm
  554. * @param {SpeedyMatrixWASMMemory} memory
  555. * @param {number} resultptr pointer to Mat32
  556. * @param {number} leftptr pointer to Mat32
  557. * @param {number} rightptr pointer to Mat32
  558. */
  559. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  560. wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
  561. }
  562. }
  563. /**
  564. * Matrix multiplication
  565. */
  566. class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr {
  567. /**
  568. * Constructor
  569. * @param {SpeedyMatrixExpr} left left operand
  570. * @param {SpeedyMatrixExpr} right right operand
  571. */
  572. constructor(left, right) {
  573. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
  574. super(left.rows, right.columns, left, right);
  575. }
  576. /**
  577. * Compute result = left * right
  578. * @param {WebAssembly.Instance} wasm
  579. * @param {SpeedyMatrixWASMMemory} memory
  580. * @param {number} resultptr pointer to Mat32
  581. * @param {number} leftptr pointer to Mat32
  582. * @param {number} rightptr pointer to Mat32
  583. */
  584. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  585. wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
  586. }
  587. }
  588. /**
  589. * Component-wise multiplication
  590. */
  591. class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr {
  592. /**
  593. * Constructor
  594. * @param {SpeedyMatrixExpr} left left operand
  595. * @param {SpeedyMatrixExpr} right right operand
  596. */
  597. constructor(left, right) {
  598. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  599. super(right.rows, right.columns, left, right);
  600. }
  601. /**
  602. * Compute result = left <compMult> right
  603. * @param {WebAssembly.Instance} wasm
  604. * @param {SpeedyMatrixWASMMemory} memory
  605. * @param {number} resultptr pointer to Mat32
  606. * @param {number} leftptr pointer to Mat32
  607. * @param {number} rightptr pointer to Mat32
  608. */
  609. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  610. wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
  611. }
  612. }
  613. /**
  614. * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
  615. */
  616. class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr {
  617. /**
  618. * Constructor
  619. * @param {SpeedyMatrixExpr} left left operand
  620. * @param {SpeedyMatrixExpr} right right operand
  621. */
  622. constructor(left, right) {
  623. const m = left.rows,
  624. n = left.columns;
  625. // TODO right doesn't need to be a column vector
  626. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
  627. super(n, 1, left, right);
  628. }
  629. /**
  630. * Compute result = left \ right
  631. * @param {WebAssembly.Instance} wasm
  632. * @param {SpeedyMatrixWASMMemory} memory
  633. * @param {number} resultptr pointer to Mat32
  634. * @param {number} leftptr pointer to Mat32
  635. * @param {number} rightptr pointer to Mat32
  636. */
  637. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  638. wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
  639. }
  640. }
  641. /***/ }),
  642. /***/ 6465:
  643. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_21592__) => {
  644. "use strict";
  645. /* harmony export */ __nested_webpack_require_21592__.d(__nested_webpack_exports__, {
  646. /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
  647. /* harmony export */ });
  648. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_21592__(9192);
  649. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_21592__(8581);
  650. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_21592__(9037);
  651. /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_21592__(3816);
  652. /*
  653. * speedy-vision.js
  654. * GPU-accelerated Computer Vision for JavaScript
  655. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  656. *
  657. * Licensed under the Apache License, Version 2.0 (the "License");
  658. * you may not use this file except in compliance with the License.
  659. * You may obtain a copy of the License at
  660. *
  661. * http://www.apache.org/licenses/LICENSE-2.0
  662. *
  663. * Unless required by applicable law or agreed to in writing, software
  664. * distributed under the License is distributed on an "AS IS" BASIS,
  665. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  666. * See the License for the specific language governing permissions and
  667. * limitations under the License.
  668. *
  669. * speedy-matrix-wasm.js
  670. * WebAssembly bridge
  671. */
  672. /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  673. /**
  674. * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
  675. * @property {object} as
  676. * @property {WebAssembly.Memory} as.object
  677. * @property {Uint8Array} as.uint8
  678. * @property {Int32Array} as.int32
  679. * @property {Uint32Array} as.uint32
  680. * @property {Float32Array} as.float32
  681. * @property {Float64Array} as.float64
  682. */
  683. /**
  684. * @typedef {object} SpeedyMatrixWASMHandle
  685. * @property {WebAssembly.Instance} wasm
  686. * @property {SpeedyMatrixWASMMemory} memory
  687. * @property {WebAssembly.Module} module
  688. */
  689. /** @type {Uint8Array} WebAssembly binary */
  690. const WASM_BINARY = __nested_webpack_require_21592__(3575);
  691. /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
  692. let _instance = null;
  693. /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
  694. let _module = null;
  695. /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
  696. const _memory = (mem => ({
  697. as: {
  698. object: mem,
  699. uint8: new Uint8Array(mem.buffer),
  700. int32: new Int32Array(mem.buffer),
  701. uint32: new Uint32Array(mem.buffer),
  702. float32: new Float32Array(mem.buffer),
  703. float64: new Float64Array(mem.buffer)
  704. }
  705. }))(typeof WebAssembly === 'undefined' ? new Uint8Array(1024) :
  706. // use a filler
  707. new WebAssembly.Memory({
  708. initial: 16,
  709. // 1 MB
  710. maximum: 256
  711. }));
  712. /**
  713. * WebAssembly utilities
  714. */
  715. class SpeedyMatrixWASM {
  716. /**
  717. * Gets you the WASM instance, augmented memory & module
  718. * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
  719. */
  720. static ready() {
  721. // Check if WebAssembly is supported
  722. if (typeof WebAssembly === 'undefined') return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM('This application requires WebAssembly. Please update your system.'));
  723. // Endianness check
  724. if (!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN) return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`));
  725. // Get the WASM instance
  726. return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
  727. SpeedyMatrixWASM._ready(resolve, reject);
  728. });
  729. }
  730. /**
  731. * Synchronously gets you the WASM instance, augmented memory & module
  732. * @returns {SpeedyMatrixWASMHandle}
  733. */
  734. static get handle() {
  735. if (!_instance || !_module) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
  736. return {
  737. wasm: _instance,
  738. memory: _memory,
  739. module: _module
  740. };
  741. }
  742. /**
  743. * Gets you the WASM imports bound to a memory object
  744. * @param {SpeedyMatrixWASMMemory} memory
  745. * @returns {Object<string,Function>}
  746. */
  747. static imports(memory) {
  748. const obj = new SpeedyMatrixWASMImports(memory);
  749. return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype).filter(property => typeof obj[property] === 'function' && property !== 'constructor').reduce((imports, methodName) => (imports[methodName] = obj[methodName], imports), Object.create(null));
  750. }
  751. /**
  752. * Allocate a Mat32 in WebAssembly memory without copying any data
  753. * @param {WebAssembly.Instance} wasm
  754. * @param {SpeedyMatrixWASMMemory} memory
  755. * @param {SpeedyMatrix} matrix
  756. * @returns {number} pointer to the new Mat32
  757. */
  758. static allocateMat32(wasm, memory, matrix) {
  759. const dataptr = wasm.exports.malloc(matrix.data.byteLength);
  760. const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
  761. return matptr;
  762. }
  763. /**
  764. * Deallocate a Mat32 in WebAssembly
  765. * @param {WebAssembly.Instance} wasm
  766. * @param {SpeedyMatrixWASMMemory} memory
  767. * @param {number} matptr pointer to the allocated Mat32
  768. * @returns {number} NULL
  769. */
  770. static deallocateMat32(wasm, memory, matptr) {
  771. const dataptr = wasm.exports.Mat32_data(matptr);
  772. wasm.exports.free(matptr);
  773. wasm.exports.free(dataptr);
  774. return 0;
  775. }
  776. /**
  777. * Copy the data of a matrix to a WebAssembly Mat32
  778. * @param {WebAssembly.Instance} wasm
  779. * @param {SpeedyMatrixWASMMemory} memory
  780. * @param {number} matptr pointer to a Mat32
  781. * @param {SpeedyMatrix} matrix
  782. * @returns {number} matptr
  783. */
  784. static copyToMat32(wasm, memory, matptr, matrix) {
  785. // We assume the following:
  786. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  787. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  788. // 3. the data type is float32
  789. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  790. //matrix.dtype === 'float32' &&
  791. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  792. const dataptr = wasm.exports.Mat32_data(matptr);
  793. memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
  794. return matptr;
  795. }
  796. /**
  797. * Copy the data of a WebAssembly Mat32 to a matrix
  798. * @param {WebAssembly.Instance} wasm
  799. * @param {SpeedyMatrixWASMMemory} memory
  800. * @param {number} matptr pointer to a Mat32
  801. * @param {SpeedyMatrix} matrix
  802. * @returns {number} matptr
  803. */
  804. static copyFromMat32(wasm, memory, matptr, matrix) {
  805. // We assume the following:
  806. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  807. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  808. // 3. the data type is float32
  809. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  810. //matrix.dtype === 'float32' &&
  811. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  812. const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
  813. for (let offset = matrix.data.length - 1; offset >= 0; offset--) matrix.data[offset] = memory.as.float32[base + offset];
  814. return matptr;
  815. }
  816. /**
  817. * Polls the WebAssembly instance until it's ready
  818. * @param {function(SpeedyMatrixWASMHandle): void} resolve
  819. * @param {function(Error): void} reject
  820. * @param {number} [counter]
  821. */
  822. static _ready(resolve, reject, counter = 1000) {
  823. if (_instance !== null && _module !== null) resolve({
  824. wasm: _instance,
  825. memory: _memory,
  826. module: _module
  827. });else if (counter <= 0) reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));else setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
  828. }
  829. }
  830. /**
  831. * Methods called from WASM
  832. */
  833. class SpeedyMatrixWASMImports {
  834. /**
  835. * Constructor
  836. * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
  837. */
  838. constructor(memory) {
  839. // find all methods of this object
  840. const methodNames = Object.getOwnPropertyNames(this.constructor.prototype).filter(property => typeof this[property] === 'function').filter(property => property !== 'constructor');
  841. // bind all methods to this object
  842. methodNames.forEach(methodName => {
  843. this[methodName] = this[methodName].bind(this);
  844. });
  845. /** @type {SpeedyMatrixWASMMemory} WASM memory */
  846. this.memory = memory;
  847. /** @type {CStringUtils} utilities related to C strings */
  848. this.cstring = new CStringUtils(memory);
  849. // done!
  850. return Object.freeze(this);
  851. }
  852. /**
  853. * Prints a message
  854. * @param {number} ptr pointer to char
  855. */
  856. print(ptr) {
  857. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
  858. }
  859. /**
  860. * Throws an error
  861. * @param {number} ptr pointer to char
  862. */
  863. fatal(ptr) {
  864. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
  865. }
  866. /**
  867. * Fills a memory segment with a byte
  868. * @param {number} value byte
  869. * @param {number} start memory address, inclusive
  870. * @param {number} end memory address greater than start, exclusive
  871. */
  872. bytefill(value, start, end) {
  873. this.memory.as.uint8.fill(value, start, end);
  874. }
  875. /**
  876. * Copy a memory segment to another segment
  877. * @param {number} target memory address, where we'll start writing
  878. * @param {number} start memory address, where we'll start copying (inclusive)
  879. * @param {number} end memory address, where we'll end the copy (exclusive)
  880. */
  881. copyWithin(target, start, end) {
  882. this.memory.as.uint8.copyWithin(target, start, end);
  883. }
  884. }
  885. /**
  886. * Utilities related to C strings
  887. */
  888. class CStringUtils {
  889. /**
  890. * Constructor
  891. * @param {SpeedyMatrixWASMMemory} memory
  892. */
  893. constructor(memory) {
  894. /** @type {TextDecoder} */
  895. this._decoder = new TextDecoder('utf-8');
  896. /** @type {SpeedyMatrixWASMMemory} */
  897. this._memory = memory;
  898. }
  899. /**
  900. * Convert a C string to a JavaScript string
  901. * @param {number} ptr pointer to char
  902. * @returns {string}
  903. */
  904. get(ptr) {
  905. const byte = this._memory.as.uint8;
  906. const size = this._memory.as.uint8.byteLength;
  907. let p = ptr;
  908. while (p < size && 0 !== byte[p]) ++p;
  909. return this._decoder.decode(byte.subarray(ptr, p));
  910. }
  911. }
  912. /**
  913. * WebAssembly loader
  914. * @param {SpeedyMatrixWASMMemory} memory
  915. */
  916. (function loadWASM(memory) {
  917. const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
  918. // Skip if WebAssembly is unsupported
  919. if (typeof WebAssembly === 'undefined') return;
  920. // Load the WASM binary
  921. _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY).then(data => base64decode(data)).then(bytes => WebAssembly.instantiate(bytes, {
  922. env: Object.assign({
  923. memory: memory.as.object
  924. }, SpeedyMatrixWASM.imports(memory))
  925. })).then(wasm => {
  926. _instance = wasm.instance;
  927. _module = wasm.module;
  928. wasm.instance.exports.srand(Date.now() * 0.001 & 0xffffffff); // srand(time(NULL))
  929. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
  930. }).catch(err => {
  931. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
  932. });
  933. })(_memory);
  934. /***/ }),
  935. /***/ 4188:
  936. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_33268__) => {
  937. "use strict";
  938. __nested_webpack_require_33268__.r(__nested_webpack_exports__);
  939. /* harmony export */ __nested_webpack_require_33268__.d(__nested_webpack_exports__, {
  940. /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
  941. /* harmony export */ });
  942. /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_33268__(6306);
  943. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_33268__(6465);
  944. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_33268__(9192);
  945. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_33268__(9037);
  946. /*
  947. * speedy-vision.js
  948. * GPU-accelerated Computer Vision for JavaScript
  949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  950. *
  951. * Licensed under the Apache License, Version 2.0 (the "License");
  952. * you may not use this file except in compliance with the License.
  953. * You may obtain a copy of the License at
  954. *
  955. * http://www.apache.org/licenses/LICENSE-2.0
  956. *
  957. * Unless required by applicable law or agreed to in writing, software
  958. * distributed under the License is distributed on an "AS IS" BASIS,
  959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  960. * See the License for the specific language governing permissions and
  961. * limitations under the License.
  962. *
  963. * speedy-matrix.js
  964. * Matrix class
  965. */
  966. /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
  967. /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
  968. /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
  969. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  970. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
  971. /**
  972. * Matrix class
  973. */
  974. class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r {
  975. /**
  976. * @private
  977. *
  978. * Low-level constructor
  979. * @param {number} rows number of rows
  980. * @param {number} columns number of columns
  981. * @param {number} step0 step size between two consecutive elements (e.g., 1)
  982. * @param {number} step1 step size between two consecutive columns (e.g., rows)
  983. * @param {SpeedyMatrixBufferType} data entries in column-major format
  984. */
  985. constructor(rows, columns, step0, step1, data) {
  986. super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
  987. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
  988. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
  989. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.length + rows * columns === 0 ||
  990. // empty matrix and empty buffer, or
  991. data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
  992. );
  993. /** @type {number} step size between two consecutive elements */
  994. this._step0 = step0 | 0;
  995. /** @type {number} step size between two consecutive columns */
  996. this._step1 = step1 | 0;
  997. /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
  998. this._data = data;
  999. }
  1000. /**
  1001. * Create a new matrix with the specified size and entries
  1002. * @param {number} rows number of rows
  1003. * @param {number} columns number of columns
  1004. * @param {number[]} entries in column-major format
  1005. * @param {SpeedyMatrixDtype} [dtype] data type
  1006. * @returns {SpeedyMatrix}
  1007. */
  1008. static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1009. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1010. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
  1011. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1012. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
  1013. }
  1014. /**
  1015. * Create a new matrix filled with zeros with the specified size
  1016. * @param {number} rows number of rows
  1017. * @param {number} [columns] number of columns
  1018. * @param {SpeedyMatrixDtype} [dtype] data type
  1019. * @returns {SpeedyMatrix}
  1020. */
  1021. static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1022. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1023. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1024. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
  1025. }
  1026. /**
  1027. * Create a new matrix filled with ones with the specified size
  1028. * @param {number} rows number of rows
  1029. * @param {number} [columns] number of columns
  1030. * @param {SpeedyMatrixDtype} [dtype] data type
  1031. * @returns {SpeedyMatrix}
  1032. */
  1033. static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1034. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1035. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1036. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
  1037. }
  1038. /**
  1039. * Create a new identity matrix with the specified size
  1040. * @param {number} rows number of rows
  1041. * @param {number} [columns] number of columns
  1042. * @param {SpeedyMatrixDtype} [dtype] data type
  1043. * @returns {SpeedyMatrix}
  1044. */
  1045. static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1046. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1047. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1048. const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
  1049. for (let j = Math.min(rows, columns) - 1; j >= 0; j--) data[j * rows + j] = 1;
  1050. return new SpeedyMatrix(rows, columns, 1, rows, data);
  1051. }
  1052. /**
  1053. * Evaluate an expression synchronously and store the result in a new matrix
  1054. * @param {SpeedyMatrixExpr} expr matrix expression
  1055. * @returns {SpeedyMatrix}
  1056. */
  1057. static From(expr) {
  1058. return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
  1059. }
  1060. /**
  1061. * Returns a promise that resolves immediately if the WebAssembly routines
  1062. * are ready to be used, or as soon as they do become ready
  1063. * @returns {SpeedyPromise<void>}
  1064. */
  1065. static ready() {
  1066. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void 0);
  1067. }
  1068. /**
  1069. * Get the underlying buffer
  1070. * @returns {SpeedyMatrixBufferType}
  1071. */
  1072. get data() {
  1073. return this._data;
  1074. }
  1075. /**
  1076. * Row-step
  1077. * @returns {number} defaults to 1
  1078. */
  1079. get step0() {
  1080. return this._step0;
  1081. }
  1082. /**
  1083. * Column-step
  1084. * @returns {number} defaults to this.rows
  1085. */
  1086. get step1() {
  1087. return this._step1;
  1088. }
  1089. /**
  1090. * Extract a block from this matrix. Use a shared underlying buffer
  1091. * @param {number} firstRow
  1092. * @param {number} lastRow
  1093. * @param {number} firstColumn
  1094. * @param {number} lastColumn
  1095. * @returns {SpeedyMatrix}
  1096. */
  1097. block(firstRow, lastRow, firstColumn, lastColumn) {
  1098. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(firstRow <= lastRow && firstColumn <= lastColumn, `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`);
  1099. // ensure that the indices are within bounds
  1100. firstRow = Math.max(firstRow, 0);
  1101. lastRow = Math.min(lastRow, this._rows - 1);
  1102. firstColumn = Math.max(firstColumn, 0);
  1103. lastColumn = Math.min(lastColumn, this._columns - 1);
  1104. // compute the dimensions of the new submatrix
  1105. const rows = lastRow - firstRow + 1;
  1106. const columns = lastColumn - firstColumn + 1;
  1107. // obtain the relevant portion of the data
  1108. const step0 = this._step0,
  1109. step1 = this._step1;
  1110. const begin = firstRow * step0 + firstColumn * step1; // inclusive
  1111. const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
  1112. // create new matrix
  1113. return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
  1114. }
  1115. /**
  1116. * Extract a row from this matrix
  1117. * @param {number} index 0-based
  1118. * @returns {SpeedyMatrix}
  1119. */
  1120. row(index) {
  1121. return this.block(index, index, 0, this._columns - 1);
  1122. }
  1123. /**
  1124. * Extract a column from this matrix
  1125. * @param {number} index 0-based
  1126. * @returns {SpeedyMatrix}
  1127. */
  1128. column(index) {
  1129. return this.block(0, this._rows - 1, index, index);
  1130. }
  1131. /**
  1132. * Extract the main diagonal from this matrix
  1133. * @returns {SpeedyMatrix} as a column-vector
  1134. */
  1135. diagonal() {
  1136. const diagsize = Math.min(this._rows, this._columns);
  1137. // compute the dimensions of the new submatrix
  1138. const rows = diagsize; // make it a column vector
  1139. const columns = 1;
  1140. // obtain the relevant portion of the data
  1141. const diagstep = this._step0 + this._step1; // jump a row and a column
  1142. const begin = 0; // inclusive
  1143. const end = 1 + (diagsize - 1) * diagstep; // exclusive
  1144. // create new matrix
  1145. return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
  1146. }
  1147. /**
  1148. * Read a single entry of this matrix
  1149. * @param {number} row 0-based index
  1150. * @param {number} column 0-based index
  1151. * @returns {number}
  1152. */
  1153. at(row, column) {
  1154. if (row >= 0 && row < this._rows && column >= 0 && column < this._columns) return this._data[this._step0 * row + this._step1 * column];else return Number.NaN;
  1155. }
  1156. /**
  1157. * Read the entries of the matrix in column-major format
  1158. * @returns {number[]}
  1159. */
  1160. read() {
  1161. const entries = new Array(this._rows * this._columns);
  1162. const step0 = this._step0,
  1163. step1 = this._step1;
  1164. let i = 0;
  1165. for (let column = 0; column < this._columns; column++) {
  1166. for (let row = 0; row < this._rows; row++) entries[i++] = this._data[row * step0 + column * step1];
  1167. }
  1168. return entries;
  1169. }
  1170. /**
  1171. * Returns a human-readable string representation of the matrix
  1172. * @returns {string}
  1173. */
  1174. toString() {
  1175. const DECIMALS = 5;
  1176. const rows = this.rows,
  1177. columns = this.columns;
  1178. const entries = this.read();
  1179. const mat = /** @type {number[][]} */new Array(rows);
  1180. for (let i = 0; i < rows; i++) {
  1181. mat[i] = new Array(columns);
  1182. for (let j = 0; j < columns; j++) mat[i][j] = entries[j * rows + i];
  1183. }
  1184. const fix = x => x.toFixed(DECIMALS);
  1185. const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
  1186. const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
  1187. return str;
  1188. }
  1189. /**
  1190. * Set the contents of this matrix to the result of an expression
  1191. * @param {SpeedyMatrixExpr} expr matrix expression
  1192. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1193. */
  1194. setTo(expr) {
  1195. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
  1196. // TODO: add support for WebWorkers
  1197. return this.setToSync(expr);
  1198. });
  1199. }
  1200. /**
  1201. * Synchronously set the contents of this matrix to the result of an expression
  1202. * @param {SpeedyMatrixExpr} expr matrix expression
  1203. * @returns {SpeedyMatrix} this
  1204. */
  1205. setToSync(expr) {
  1206. const {
  1207. wasm,
  1208. memory
  1209. } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
  1210. // evaluate the expression
  1211. const result = expr._evaluate(wasm, memory);
  1212. /*
  1213. // shallow copy the results to this matrix
  1214. // limitation: can't handle blocks properly
  1215. // (a tree-like structure could be useful)
  1216. this._rows = result.rows;
  1217. this._columns = result.columns;
  1218. //this._dtype = result.dtype;
  1219. this._data = result.data;
  1220. this._step0 = result.step0;
  1221. this._step1 = result.step1;
  1222. */
  1223. // validate shape
  1224. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype, `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`);
  1225. // deep copy
  1226. const step0 = this._step0,
  1227. step1 = this._step1,
  1228. rstep0 = result._step0,
  1229. rstep1 = result._step1;
  1230. if (step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
  1231. // fast copy
  1232. this._data.set(result._data);
  1233. } else {
  1234. // copy each element
  1235. for (let column = this._columns - 1; column >= 0; column--) {
  1236. for (let row = this._rows - 1; row >= 0; row--) this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
  1237. }
  1238. }
  1239. // done!
  1240. return this;
  1241. }
  1242. /**
  1243. * Fill this matrix with a scalar value
  1244. * @param {number} value
  1245. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1246. */
  1247. fill(value) {
  1248. this.fillSync(value);
  1249. return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
  1250. }
  1251. /**
  1252. * Synchronously fill this matrix with a scalar value
  1253. * @param {number} value
  1254. * @returns {SpeedyMatrix} this
  1255. */
  1256. fillSync(value) {
  1257. value = +value;
  1258. if (this._rows * this._columns === this._data.length) {
  1259. this._data.fill(value);
  1260. return this;
  1261. }
  1262. for (let column = 0; column < this._columns; column++) {
  1263. for (let row = 0; row < this._rows; row++) {
  1264. this._data[row * this._step0 + column * this._step1] = value;
  1265. }
  1266. }
  1267. return this;
  1268. }
  1269. /**
  1270. * Evaluate this expression
  1271. * @param {WebAssembly.Instance} wasm
  1272. * @param {SpeedyMatrixWASMMemory} memory
  1273. * @returns {SpeedyMatrix}
  1274. */
  1275. _evaluate(wasm, memory) {
  1276. return this;
  1277. }
  1278. }
  1279. /***/ }),
  1280. /***/ 6634:
  1281. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_48547__) => {
  1282. "use strict";
  1283. /* harmony export */ __nested_webpack_require_48547__.d(__nested_webpack_exports__, {
  1284. /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
  1285. /* harmony export */ });
  1286. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_48547__(8581);
  1287. /*
  1288. * speedy-vision.js
  1289. * GPU-accelerated Computer Vision for JavaScript
  1290. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1291. *
  1292. * Licensed under the Apache License, Version 2.0 (the "License");
  1293. * you may not use this file except in compliance with the License.
  1294. * You may obtain a copy of the License at
  1295. *
  1296. * http://www.apache.org/licenses/LICENSE-2.0
  1297. *
  1298. * Unless required by applicable law or agreed to in writing, software
  1299. * distributed under the License is distributed on an "AS IS" BASIS,
  1300. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1301. * See the License for the specific language governing permissions and
  1302. * limitations under the License.
  1303. *
  1304. * speedy-namespace.js
  1305. * Symbolizes a namespace
  1306. */
  1307. /**
  1308. * An abstract namespace
  1309. * @abstract
  1310. */
  1311. class SpeedyNamespace {
  1312. /**
  1313. * Namespaces can't be instantiated.
  1314. * Only static methods are allowed.
  1315. * @abstract
  1316. * @throws SpeedyError
  1317. */
  1318. constructor() {
  1319. // only static methods are allowed
  1320. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
  1321. }
  1322. }
  1323. /***/ }),
  1324. /***/ 9192:
  1325. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_50059__) => {
  1326. "use strict";
  1327. /* harmony export */ __nested_webpack_require_50059__.d(__nested_webpack_exports__, {
  1328. /* harmony export */ i: () => (/* binding */ SpeedyPromise)
  1329. /* harmony export */ });
  1330. /*
  1331. * speedy-vision.js
  1332. * GPU-accelerated Computer Vision for JavaScript
  1333. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1334. *
  1335. * Licensed under the Apache License, Version 2.0 (the "License");
  1336. * you may not use this file except in compliance with the License.
  1337. * You may obtain a copy of the License at
  1338. *
  1339. * http://www.apache.org/licenses/LICENSE-2.0
  1340. *
  1341. * Unless required by applicable law or agreed to in writing, software
  1342. * distributed under the License is distributed on an "AS IS" BASIS,
  1343. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1344. * See the License for the specific language governing permissions and
  1345. * limitations under the License.
  1346. *
  1347. * speedy-promise.js
  1348. * Speedy Promises: a fast implementation of Promises
  1349. */
  1350. const PENDING = 0;
  1351. const FULFILLED = 1;
  1352. const REJECTED = 2;
  1353. const SUSPEND_ASYNC = 1;
  1354. const asap = typeof queueMicrotask !== 'undefined' && queueMicrotask ||
  1355. // browsers
  1356. typeof process !== 'undefined' && process.nextTick || (
  1357. // node.js
  1358. f => Promise.resolve().then(() => f())); // most compatible
  1359. /**
  1360. * SpeedyPromise: Super Fast Promises. SpeedyPromises can
  1361. * interoperate with ES6 Promises. This implementation is
  1362. * based on the Promises/A+ specification.
  1363. * @template T
  1364. */
  1365. class SpeedyPromise {
  1366. /**
  1367. * Constructor
  1368. * @param {function(function(T=): void, function(Error): void): void} callback
  1369. */
  1370. constructor(callback) {
  1371. this._state = PENDING;
  1372. this._value = undefined;
  1373. this._onFulfillment = null;
  1374. this._onRejection = null;
  1375. this._children = 0;
  1376. this[0] = this;
  1377. this._parent = undefined;
  1378. this._flags = 0;
  1379. this._fulfill = this._fulfill.bind(this);
  1380. this._reject = this._reject.bind(this);
  1381. this._resolve = this._resolve.bind(this);
  1382. this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
  1383. callback(this._fulfill, this._reject);
  1384. }
  1385. /**
  1386. * Setup handlers
  1387. * @template U, V=never
  1388. * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
  1389. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1390. * @returns {SpeedyPromise<U>}
  1391. */
  1392. then(onFulfillment, onRejection = null) {
  1393. const child = new SpeedyPromise(this._nop);
  1394. child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
  1395. child._onRejection = typeof onRejection === 'function' && onRejection;
  1396. child._parent = this;
  1397. this[this._children++] = child; // attach child
  1398. this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
  1399. this._notify();
  1400. return child;
  1401. }
  1402. /**
  1403. * Setup rejection handler
  1404. * @template U, V=never
  1405. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1406. * @returns {SpeedyPromise<V>}
  1407. */
  1408. catch(onRejection) {
  1409. return this.then(null, onRejection);
  1410. }
  1411. /**
  1412. * Execute a callback when the promise is settled
  1413. * (i.e., fulfilled or rejected)
  1414. * @param {function(): void} onFinally
  1415. * @returns {SpeedyPromise<T>}
  1416. */
  1417. finally(onFinally) {
  1418. const fn = val => {
  1419. onFinally();
  1420. return val;
  1421. };
  1422. return this.then(fn, fn);
  1423. }
  1424. /**
  1425. * Start the computation immediately, synchronously.
  1426. * Can't afford to spend any time at all waiting for micro-tasks, etc.
  1427. * @returns {SpeedyPromise<T>} this
  1428. */
  1429. turbocharge() {
  1430. let my = this;
  1431. // suspend the async behavior
  1432. this._flags |= SUSPEND_ASYNC;
  1433. while (my._parent !== undefined) {
  1434. my = my._parent;
  1435. my._flags |= SUSPEND_ASYNC;
  1436. }
  1437. // notify the children of the root
  1438. my._notify(); // will be synchronous
  1439. // return this SpeedyPromise
  1440. return this;
  1441. }
  1442. /**
  1443. * Convert to string
  1444. * @returns {string}
  1445. */
  1446. toString() {
  1447. switch (this._state) {
  1448. case PENDING:
  1449. return `SpeedyPromise { <pending> }`;
  1450. case FULFILLED:
  1451. return `SpeedyPromise { <fulfilled> ${this._value} }`;
  1452. case REJECTED:
  1453. return `SpeedyPromise { <rejected> ${this._value} }`;
  1454. default:
  1455. return '';
  1456. }
  1457. }
  1458. /**
  1459. * Symbol.toStringTag
  1460. * @returns {string}
  1461. */
  1462. get [Symbol.toStringTag]() {
  1463. return 'SpeedyPromise';
  1464. }
  1465. /**
  1466. * Creates a resolved SpeedyPromise
  1467. * @template U
  1468. * @param {U} [value]
  1469. * @returns {SpeedyPromise<U>}
  1470. */
  1471. static resolve(value) {
  1472. const promise = new SpeedyPromise(this._snop);
  1473. if (typeof value === 'object' && value !== null && 'then' in value || typeof value === 'function' && 'then' in value) {
  1474. // resolve asynchronously
  1475. promise._resolve(value);
  1476. } else {
  1477. // fulfill synchronously
  1478. promise._value = value;
  1479. promise._state = FULFILLED;
  1480. }
  1481. return promise;
  1482. }
  1483. /**
  1484. * Creates a rejected SpeedyPromise
  1485. * @template U
  1486. * @param {Error} reason
  1487. * @returns {SpeedyPromise<U>}
  1488. */
  1489. static reject(reason) {
  1490. const promise = new SpeedyPromise(this._snop);
  1491. promise._value = reason;
  1492. promise._state = REJECTED;
  1493. return promise;
  1494. }
  1495. /**
  1496. * Returns a SpeedyPromise that resolves to an array
  1497. * containing the results of the input promises/values,
  1498. * in their given order. The returned SpeedyPromise will
  1499. * resolve if all input promises resolve, or reject if
  1500. * any input promise rejects.
  1501. * @template U
  1502. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1503. * @returns {SpeedyPromise<U[]>}
  1504. *
  1505. * FIXME iterables need not be all <U>
  1506. */
  1507. static all(iterable) {
  1508. return new SpeedyPromise((resolve, reject) => {
  1509. const input = [];
  1510. // get elements
  1511. for (const element of iterable) input.push(element);
  1512. // resolve synchronously if there are no elements
  1513. const length = input.length;
  1514. if (length == 0) {
  1515. resolve([]);
  1516. return;
  1517. }
  1518. // resolve asynchronously
  1519. let counter = length;
  1520. const output = new Array(length);
  1521. const partialResolve = i => val => {
  1522. output[i] = val;
  1523. if (0 == --counter) resolve(output);
  1524. };
  1525. for (let i = 0; i < length; i++) {
  1526. const element = input[i];
  1527. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(partialResolve(i), reject);else SpeedyPromise.resolve(element).then(partialResolve(i), reject);
  1528. }
  1529. });
  1530. }
  1531. /**
  1532. * Returns a promise that gets fulfilled or rejected as soon
  1533. * as the first promise in the iterable gets fulfilled or
  1534. * rejected (with its value/reason).
  1535. * @template U
  1536. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1537. * @returns {SpeedyPromise<U>}
  1538. */
  1539. static race(iterable) {
  1540. return new SpeedyPromise((resolve, reject) => {
  1541. const input = [];
  1542. // get elements
  1543. for (const element of iterable) input.push(element);
  1544. // if the iterable is empty, the promise
  1545. // will be pending forever...
  1546. // resolve asynchronously
  1547. const length = input.length;
  1548. for (let i = 0; i < length; i++) {
  1549. const element = input[i];
  1550. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(resolve, reject);else SpeedyPromise.resolve(element).then(resolve, reject);
  1551. }
  1552. });
  1553. }
  1554. /**
  1555. * Fulfill this promise with a value
  1556. * @param {T} value
  1557. */
  1558. _fulfill(value) {
  1559. this._setState(FULFILLED, value);
  1560. }
  1561. /**
  1562. * Reject this promise with a reason
  1563. * @param {Error} reason
  1564. */
  1565. _reject(reason) {
  1566. this._setState(REJECTED, reason);
  1567. }
  1568. /**
  1569. * Set the state and the value of this promise
  1570. * @param {number} state
  1571. * @param {T|Error} value
  1572. */
  1573. _setState(state, value) {
  1574. // the promise is already fulfilled or rejected
  1575. if (this._state != PENDING) return;
  1576. // set the new state
  1577. this._state = state;
  1578. this._value = value;
  1579. this._notify();
  1580. }
  1581. /**
  1582. * Notify my children that this promise is no
  1583. * longer pending. This is an async operation:
  1584. * my childen will be notified "as soon
  1585. * as possible" (it will be scheduled).
  1586. * We may force this to be synchronous, though
  1587. */
  1588. _notify() {
  1589. // nothing to do
  1590. if (this._state == PENDING) return;
  1591. // have we turbocharged this promise?
  1592. if (this._flags & SUSPEND_ASYNC) {
  1593. this._broadcast(); // execute synchronously
  1594. return;
  1595. }
  1596. // install a timer (default behavior)
  1597. asap(this._broadcastIfAsync);
  1598. }
  1599. /**
  1600. * Helper method
  1601. */
  1602. _broadcastIfAsync() {
  1603. // we may have installed a timer at some
  1604. // point, but turbocharged the promise later
  1605. if (!(this._flags & SUSPEND_ASYNC)) this._broadcast();
  1606. }
  1607. /**
  1608. * Tell my children that this promise
  1609. * is either fulfilled or rejected.
  1610. * This is a synchronous operation
  1611. */
  1612. _broadcast() {
  1613. const children = this._children;
  1614. const state = this._state;
  1615. if (state === FULFILLED) {
  1616. for (let i = 0; i < children; i++) {
  1617. const child = this[i];
  1618. const callback = child._onFulfillment;
  1619. try {
  1620. if (callback) {
  1621. if (callback !== child._nop) {
  1622. child._resolve(callback(this._value)); // promise resolution procedure
  1623. child._onFulfillment = child._nop; // will not be called again
  1624. }
  1625. } else child._fulfill(this._value);
  1626. } catch (e) {
  1627. child._reject(e);
  1628. }
  1629. }
  1630. } else if (state === REJECTED) {
  1631. for (let i = 0; i < children; i++) {
  1632. const child = this[i];
  1633. const callback = child._onRejection;
  1634. try {
  1635. if (callback) {
  1636. if (callback !== child._nop) {
  1637. child._resolve(callback(this._value)); // promise resolution procedure
  1638. child._onRejection = child._nop; // will not be called again
  1639. }
  1640. } else child._reject(this._value);
  1641. } catch (e) {
  1642. child._reject(e);
  1643. }
  1644. }
  1645. }
  1646. }
  1647. /**
  1648. * Promise Resolution Procedure
  1649. * based on the Promises/A+ spec
  1650. * @param {T} x
  1651. */
  1652. _resolve(x) {
  1653. if (typeof x !== 'object' && typeof x !== 'function' || x === null) {
  1654. // if(x !== Object(x))
  1655. this._fulfill(x);
  1656. return;
  1657. }
  1658. if (x === this) throw new TypeError(); // Circular reference
  1659. if (x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
  1660. x.then(this._resolve, this._reject);
  1661. return;
  1662. }
  1663. try {
  1664. const then = x.then;
  1665. if (typeof then === 'function') {
  1666. let resolve = this._resolve,
  1667. reject = this._reject;
  1668. try {
  1669. then.call(x, y => {
  1670. resolve(y);
  1671. resolve = reject = this._nop;
  1672. }, r => {
  1673. reject(r);
  1674. resolve = reject = this._nop;
  1675. });
  1676. } catch (e) {
  1677. if (resolve !== this._nop && reject !== this._nop) this._reject(e);
  1678. }
  1679. } else {
  1680. this._fulfill(x);
  1681. }
  1682. } catch (e) {
  1683. this._reject(e);
  1684. }
  1685. }
  1686. /**
  1687. * No-operation
  1688. */
  1689. _nop() {}
  1690. /**
  1691. * Static no-operation
  1692. */
  1693. static _snop() {}
  1694. }
  1695. //module.exports = { SpeedyPromise };
  1696. /*
  1697. // Uncomment to test performance with regular Promises
  1698. module.exports = { SpeedyPromise: Promise };
  1699. Promise.prototype.turbocharge = function() { return this };
  1700. */
  1701. /***/ }),
  1702. /***/ 9420:
  1703. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_61794__) => {
  1704. "use strict";
  1705. // EXPORTS
  1706. __nested_webpack_require_61794__.d(__nested_webpack_exports__, {
  1707. gx: () => (/* binding */ createShader),
  1708. bf: () => (/* binding */ importShader)
  1709. });
  1710. // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
  1711. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  1712. var speedy_gl = __nested_webpack_require_61794__(1001);
  1713. // EXTERNAL MODULE: ./src/utils/utils.js
  1714. var utils = __nested_webpack_require_61794__(9037);
  1715. // EXTERNAL MODULE: ./src/utils/types.js
  1716. var types = __nested_webpack_require_61794__(6049);
  1717. // EXTERNAL MODULE: ./src/utils/errors.js
  1718. var errors = __nested_webpack_require_61794__(8581);
  1719. ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
  1720. function _wrapRegExp() { _wrapRegExp = function (e, r) { return new BabelRegExp(e, void 0, r); }; var e = RegExp.prototype, r = new WeakMap(); function BabelRegExp(e, t, p) { var o = RegExp(e, t); return r.set(o, p || r.get(e)), _setPrototypeOf(o, BabelRegExp.prototype); } function buildGroups(e, t) { var p = r.get(t); return Object.keys(p).reduce(function (r, t) { var o = p[t]; if ("number" == typeof o) r[t] = e[o];else { for (var i = 0; void 0 === e[o[i]] && i + 1 < o.length;) i++; r[t] = e[o[i]]; } return r; }, Object.create(null)); } return _inherits(BabelRegExp, RegExp), BabelRegExp.prototype.exec = function (r) { var t = e.exec.call(this, r); if (t) { t.groups = buildGroups(t, this); var p = t.indices; p && (p.groups = buildGroups(p, this)); } return t; }, BabelRegExp.prototype[Symbol.replace] = function (t, p) { if ("string" == typeof p) { var o = r.get(this); return e[Symbol.replace].call(this, t, p.replace(/\$<([^>]+)>/g, function (e, r) { var t = o[r]; return "$" + (Array.isArray(t) ? t.join("$") : t); })); } if ("function" == typeof p) { var i = this; return e[Symbol.replace].call(this, t, function () { var e = arguments; return "object" != typeof e[e.length - 1] && (e = [].slice.call(e)).push(buildGroups(e, i)), p.apply(this, e); }); } return e[Symbol.replace].call(this, t, p); }, _wrapRegExp.apply(this, arguments); }
  1721. function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
  1722. function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
  1723. /*
  1724. * speedy-vision.js
  1725. * GPU-accelerated Computer Vision for JavaScript
  1726. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1727. *
  1728. * Licensed under the Apache License, Version 2.0 (the "License");
  1729. * you may not use this file except in compliance with the License.
  1730. * You may obtain a copy of the License at
  1731. *
  1732. * http://www.apache.org/licenses/LICENSE-2.0
  1733. *
  1734. * Unless required by applicable law or agreed to in writing, software
  1735. * distributed under the License is distributed on an "AS IS" BASIS,
  1736. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1737. * See the License for the specific language governing permissions and
  1738. * limitations under the License.
  1739. *
  1740. * shader-preprocessor.js
  1741. * Custom preprocessor for shaders
  1742. */
  1743. /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
  1744. /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
  1745. // Import numeric globals
  1746. const globals = __nested_webpack_require_61794__(3816);
  1747. const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */
  1748. Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce((obj, key) => (obj[key] = globals[key], obj), {});
  1749. /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
  1750. const basicConstants = Object.freeze(Object.assign(Object.assign({}, numericGlobals), {}, {
  1751. // fragment shader
  1752. 'FS_USE_CUSTOM_PRECISION': 0,
  1753. // use default precision settings
  1754. 'FS_OUTPUT_TYPE': 0,
  1755. // normalized RGBA
  1756. // colors
  1757. 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
  1758. 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
  1759. 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
  1760. 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA
  1761. }));
  1762. /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
  1763. const platformConstants = (platform, glRenderer) => Object.freeze({
  1764. 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0,
  1765. // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
  1766. 'APPLE_GPU': /Apple/.test(glRenderer) | 0,
  1767. // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
  1768. 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0 // Intel[(R)] ... [HD] Graphics xyz ...
  1769. });
  1770. // Regular Expressions
  1771. const commentsRegex = [/\/\*(.|\s)*?\*\//g, /\/\/.*$/gm];
  1772. const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
  1773. const constantRegex = /@(\w+)@/g;
  1774. const unrollRegex = [/*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1775. counter: 2
  1776. }), /*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+=\s*(\x2D?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1777. counter: 2
  1778. })];
  1779. /**
  1780. * Custom preprocessor for the shaders
  1781. */
  1782. class ShaderPreprocessor {
  1783. /**
  1784. * Runs the preprocessor and generates GLSL code
  1785. * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
  1786. * @param {string} infix annotated GLSL code
  1787. * @param {string} [prefix]
  1788. * @param {string} [suffix]
  1789. * @returns {string} preprocessed GLSL code
  1790. */
  1791. static generateGLSL(defines, infix, prefix = null, suffix = null) {
  1792. //
  1793. // The preprocessor will remove comments from GLSL code,
  1794. // include requested GLSL files and import global constants
  1795. // defined for all shaders (see above)
  1796. //
  1797. const errors = []; // compile-time errors
  1798. const constants = generateConstants(defines);
  1799. const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
  1800. return unrollLoops(annotatedGLSL.replace(commentsRegex[0], '').replace(commentsRegex[1], '').replace(constantRegex, (_, name) => String(
  1801. // Replace preprocessor @CONSTANTS@ by their numeric values
  1802. constants.has(name) ? Number(constants.get(name)) : (errors.push(`Undefined constant ${name}`), 0))).replace(includeRegex, (_, filename) =>
  1803. // Included files may include other files.
  1804. // XXX no cycle detection!
  1805. ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))), defines) + errors.map(msg => `\n#error ${msg}\n`).join('');
  1806. }
  1807. }
  1808. /**
  1809. * Generate GLSL code based on the input arguments
  1810. * @param {ShaderPreprocessorConstants} defines
  1811. * @param {string} infix
  1812. * @param {string} [prefix]
  1813. * @param {string} [suffix]
  1814. * @returns {string} GLSL code
  1815. */
  1816. function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null) {
  1817. const parts = [];
  1818. if (prefix !== null) parts.push(prefix);
  1819. for (const [key, value] of defines) parts.push(`#define ${key} ${Number(value)}`);
  1820. parts.push(infix);
  1821. if (suffix !== null) parts.push(suffix);
  1822. return parts.join('\n');
  1823. }
  1824. /**
  1825. * Generate pre-processor constants. Constants provided by the
  1826. * user have higher priority than globally available constants.
  1827. * @param {ShaderPreprocessorConstants} defines user-provided
  1828. * @returns {ShaderPreprocessorConstants}
  1829. */
  1830. function generateConstants(defines) {
  1831. utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
  1832. const myConstants = /** @type {ShaderPreprocessorConstants} */new Map();
  1833. const globalConstants = Object.assign(Object.create(null), basicConstants, platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer));
  1834. // globally available constants have lower priority
  1835. for (const key in globalConstants) {
  1836. //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
  1837. myConstants.set(key, globalConstants[key]);
  1838. }
  1839. // user-defined constants have higher priority
  1840. for (const [key, value] of defines) myConstants.set(key, value);
  1841. // done!
  1842. return myConstants;
  1843. }
  1844. /**
  1845. * Reads a shader from the shaders/include/ folder
  1846. * @param {string} filename
  1847. * @returns {string}
  1848. */
  1849. function readfileSync(filename) {
  1850. if (String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/)) return __nested_webpack_require_61794__(5235)("./" + filename);
  1851. throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
  1852. }
  1853. /**
  1854. * Unroll for loops in our own preprocessor
  1855. * @param {string} code
  1856. * @param {ShaderPreprocessorConstants} defines
  1857. * @returns {string}
  1858. */
  1859. function unrollLoops(code, defines) {
  1860. //
  1861. // Currently, only integer for loops with positive step values
  1862. // can be unrolled. (TODO: negative step values?)
  1863. //
  1864. // The current implementation does not support curly braces
  1865. // inside unrolled loops. You may define macros to get around
  1866. // this, but do you actually need to unroll such loops?
  1867. //
  1868. // Loops that don't fit the supported pattern will crash
  1869. // the preprocessor if you try to unroll them.
  1870. //
  1871. const fn = unroll.bind(defines); // CRAZY!
  1872. const n = unrollRegex.length;
  1873. for (let i = 0; i < n; i++) code = code.replace(unrollRegex[i], fn);
  1874. return code;
  1875. }
  1876. /**
  1877. * Unroll a loop pattern (regexp)
  1878. * @param {string} match the matched for loop
  1879. * @param {string} type
  1880. * @param {string} counter
  1881. * @param {string} start
  1882. * @param {string} cmp
  1883. * @param {string} end
  1884. * @param {string} step
  1885. * @param {string} loopcode
  1886. * @returns {string} unrolled loop
  1887. */
  1888. function unroll(match, type, counter, start, cmp, end, step, loopcode) {
  1889. const defines = /** @type {ShaderPreprocessorConstants} */this;
  1890. // check if the loop limits are numeric constants or #defined numbers from the outside
  1891. const hasStart = Number.isFinite(+start) || defines.has(start);
  1892. const hasEnd = Number.isFinite(+end) || defines.has(end);
  1893. if (!hasStart || !hasEnd) {
  1894. if (defines.size > 0) throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);else return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
  1895. }
  1896. // parse and validate limits & step
  1897. let istart = defines.has(start) ? defines.get(start) : parseInt(start);
  1898. let iend = defines.has(end) ? defines.get(end) : parseInt(end);
  1899. let istep = step.length == 0 ? 1 : parseInt(step);
  1900. utils/* Utils */.A.assert(istart <= iend && istep > 0);
  1901. /*
  1902. // debug
  1903. console.log(`Encontrei "${match}"`);
  1904. console.log(`type="${type}"`);
  1905. console.log(`counter="${counter}"`);
  1906. console.log(`start="${start}"`);
  1907. console.log(`cmp="${cmp}"`);
  1908. console.log(`end="${end}"`);
  1909. console.log(`step="${step}"`);
  1910. console.log(`loopcode="${loopcode}"`)
  1911. console.log('Defines:', defines);
  1912. */
  1913. // continue statements are not supported inside unrolled loops
  1914. // and will generate a compiler error. Using break is ok.
  1915. const hasBreak = loopcode.match(/\bbreak\s*;/) !== null;
  1916. // create a new scope
  1917. let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
  1918. // declare counter
  1919. unrolledCode += `${type} ${counter};\n`;
  1920. // unroll loop
  1921. iend += cmp == '<=' ? 1 : 0;
  1922. for (let i = istart; i < iend; i += istep) unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
  1923. // close scope
  1924. unrolledCode += '}\n';
  1925. //console.log('Unrolled code:\n\n' + unrolledCode);
  1926. // done!
  1927. return unrolledCode;
  1928. }
  1929. ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
  1930. /*
  1931. * speedy-vision.js
  1932. * GPU-accelerated Computer Vision for JavaScript
  1933. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1934. *
  1935. * Licensed under the Apache License, Version 2.0 (the "License");
  1936. * you may not use this file except in compliance with the License.
  1937. * You may obtain a copy of the License at
  1938. *
  1939. * http://www.apache.org/licenses/LICENSE-2.0
  1940. *
  1941. * Unless required by applicable law or agreed to in writing, software
  1942. * distributed under the License is distributed on an "AS IS" BASIS,
  1943. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1944. * See the License for the specific language governing permissions and
  1945. * limitations under the License.
  1946. *
  1947. * shader-declaration.js
  1948. * Encapsulates a shader declaration
  1949. */
  1950. const DEFAULT_ATTRIBUTES = Object.freeze({
  1951. position: 'a_position',
  1952. texCoord: 'a_texCoord'
  1953. });
  1954. const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
  1955. position: 0,
  1956. // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  1957. texCoord: 1
  1958. });
  1959. const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
  1960. precision highp float;
  1961. precision highp int;
  1962. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
  1963. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
  1964. out highp vec2 texCoord;
  1965. uniform highp vec2 texSize;
  1966. #define vsinit() \
  1967. gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
  1968. texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
  1969. \n\n`;
  1970. const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
  1971. const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
  1972. const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
  1973. #if @FS_USE_CUSTOM_PRECISION@ == 0
  1974. precision mediump float; // ~float16
  1975. precision mediump sampler2D;
  1976. precision highp int; // int32
  1977. #endif
  1978. #if @FS_OUTPUT_TYPE@ == 0
  1979. #define OUT_TYPE mediump vec4
  1980. #elif @FS_OUTPUT_TYPE@ == 1
  1981. #define OUT_TYPE mediump ivec4
  1982. #elif @FS_OUTPUT_TYPE@ == 2
  1983. #define OUT_TYPE mediump uvec4
  1984. #else
  1985. #error Unknown FS_OUTPUT_TYPE
  1986. #endif
  1987. out OUT_TYPE color;
  1988. in highp vec2 texCoord;
  1989. uniform highp vec2 texSize;
  1990. @include "global.glsl"\n\n`;
  1991. const PRIVATE_TOKEN = Symbol();
  1992. /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
  1993. /** @typedef {string[]} ShaderDeclarationArgumentList */
  1994. /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
  1995. /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
  1996. /**
  1997. * Shader Declaration
  1998. * @abstract
  1999. */
  2000. class ShaderDeclaration {
  2001. /**
  2002. * @private Constructor
  2003. * @param {Symbol} privateToken
  2004. * @param {ShaderDeclarationArgumentList} argumentList
  2005. * @param {ShaderDeclarationPreprocessorConstants} defines
  2006. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2007. * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
  2008. */
  2009. constructor(privateToken, argumentList, defines, fsSource, vsSource) {
  2010. // private constructor!
  2011. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er();
  2012. /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
  2013. this._arguments = [...argumentList];
  2014. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2015. this._defines = new Map(defines);
  2016. /** @type {string} preprocessed source code of the fragment shader */
  2017. this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
  2018. /** @type {string} preprocessed source code of the vertex shader */
  2019. this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
  2020. /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
  2021. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2022. // validate arguments
  2023. this._validateArguments(this._arguments, this._uniforms);
  2024. }
  2025. /**
  2026. * Return the preprocessed GLSL source code of the fragment shader
  2027. * @returns {string}
  2028. */
  2029. get fragmentSource() {
  2030. return this._fragmentSource;
  2031. }
  2032. /**
  2033. * Return the preprocessed GLSL source code of the vertex shader
  2034. * @returns {string}
  2035. */
  2036. get vertexSource() {
  2037. return this._vertexSource;
  2038. }
  2039. /**
  2040. * Get the names of the vertex shader attributes
  2041. * @returns {typeof DEFAULT_ATTRIBUTES}
  2042. */
  2043. get attributes() {
  2044. return DEFAULT_ATTRIBUTES;
  2045. }
  2046. /**
  2047. * Get the pre-defined locations of the vertex shader attributes
  2048. * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
  2049. */
  2050. get locationOfAttributes() {
  2051. return DEFAULT_ATTRIBUTES_LOCATION;
  2052. }
  2053. /**
  2054. * Names of the arguments that will be passed to the Shader,
  2055. * corresponding to GLSL uniforms, in the order they will be passed
  2056. * @returns {string[]}
  2057. */
  2058. get arguments() {
  2059. return [].concat(this._arguments);
  2060. }
  2061. /**
  2062. * Names of the uniforms declared in the shader
  2063. * @returns {string[]}
  2064. */
  2065. get uniforms() {
  2066. return Array.from(this._uniforms.keys());
  2067. }
  2068. /**
  2069. * The GLSL type of a uniform variable declared in the shader
  2070. * @param {string} name
  2071. * @returns {string}
  2072. */
  2073. uniformType(name) {
  2074. if (!this._uniforms.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
  2075. return this._uniforms.get(name);
  2076. }
  2077. /**
  2078. * The value of an externally defined constant, i.e., via withDefines()
  2079. * @param {string} name
  2080. * @returns {number}
  2081. */
  2082. definedConstant(name) {
  2083. if (!this._defines.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
  2084. return this._defines.get(name);
  2085. }
  2086. /**
  2087. * Parses a GLSL source and detects the uniform variables,
  2088. * as well as their types
  2089. * @param {string} preprocessedSource
  2090. * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
  2091. */
  2092. _autodetectUniforms(preprocessedSource) {
  2093. const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
  2094. const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
  2095. const uniforms = /** @type {ShaderDeclarationUniformTypes} */new Map();
  2096. let match;
  2097. while ((match = regex.exec(sourceWithoutComments)) !== null) {
  2098. const type = match[2];
  2099. const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
  2100. for (const name of names) {
  2101. if (name.endsWith(']')) {
  2102. // is it an array?
  2103. if (!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/))) throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
  2104. // read array name & size
  2105. const [array, size] = [match[1], Number(match[2])];
  2106. // register uniforms
  2107. for (let i = 0; i < size; i++) uniforms.set(`${array}[${i}]`, type);
  2108. } else {
  2109. // register a regular uniform
  2110. if (!uniforms.has(name) || uniforms.get(name) === type) uniforms.set(name, type);else throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
  2111. }
  2112. }
  2113. }
  2114. return uniforms;
  2115. }
  2116. /**
  2117. * Checks if all the arguments of the shader declaration are backed by a
  2118. * uniform variable in GLSL code
  2119. * @param {ShaderDeclarationArgumentList} argumentList
  2120. * @param {ShaderDeclarationUniformTypes} uniforms
  2121. * @throws {IllegalArgumentError}
  2122. */
  2123. _validateArguments(argumentList, uniforms) {
  2124. for (const argname of argumentList) {
  2125. if (!uniforms.has(argname)) {
  2126. if (!uniforms.has(argname + '[0]')) throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
  2127. }
  2128. }
  2129. }
  2130. }
  2131. /**
  2132. * A ShaderDeclaration that has its GLSL code stored in-memory
  2133. */
  2134. class MemoryShaderDeclaration extends ShaderDeclaration {
  2135. /**
  2136. * @private Constructor
  2137. * @param {Symbol} privateToken
  2138. * @param {ShaderDeclarationArgumentList} argumentList
  2139. * @param {ShaderDeclarationPreprocessorConstants} defines
  2140. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2141. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
  2142. */
  2143. constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER) {
  2144. super(privateToken, argumentList, defines, fsSource, vsSource);
  2145. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
  2146. this._fsUnprocessedSource = String(fsSource);
  2147. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
  2148. this._vsUnprocessedSource = String(vsSource);
  2149. }
  2150. }
  2151. /**
  2152. * A ShaderDeclaration that has its GLSL code stored in a file
  2153. */
  2154. class FileShaderDeclaration extends ShaderDeclaration {
  2155. /**
  2156. * @private Constructor
  2157. * @param {Symbol} privateToken
  2158. * @param {ShaderDeclarationArgumentList} argumentList
  2159. * @param {ShaderDeclarationPreprocessorConstants} defines
  2160. * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
  2161. * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
  2162. */
  2163. constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '') {
  2164. // validate paths
  2165. if (!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);else if (vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
  2166. // import files
  2167. const fsSource = __nested_webpack_require_61794__(4606)("./" + String(fsFilepath));
  2168. const vsSource = vsFilepath != '' ? __nested_webpack_require_61794__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
  2169. // super class
  2170. super(privateToken, argumentList, defines, fsSource, vsSource);
  2171. /** @type {string} filepath of the fragment shader */
  2172. this._fsFilepath = String(fsFilepath);
  2173. /** @type {string} filepath of the vertex shader */
  2174. this._vsFilepath = String(vsFilepath);
  2175. }
  2176. /**
  2177. * Return the preprocessed GLSL source code of the fragment shader
  2178. * @returns {string}
  2179. */
  2180. get fragmentSource() {
  2181. // we override this method to include the filepath. The motivation
  2182. // is to easily identify the file when debugging compiling errors.
  2183. return this._addHeader('// File: ' + this._fsFilepath, super.fragmentSource);
  2184. }
  2185. /**
  2186. * Return the preprocessed GLSL source code of the vertex shader
  2187. * @returns {string}
  2188. */
  2189. get vertexSource() {
  2190. // we override this method to include the filepath. The motivation
  2191. // is to easily identify the file when debugging compiling errors.
  2192. return this._addHeader('// File: ' + (this._vsFilepath != '' ? this._vsFilepath : '(default-vs) ' + this._fsFilepath), super.vertexSource);
  2193. }
  2194. /**
  2195. * Add a header to a GLSL code
  2196. * @param {string} header code to be added
  2197. * @param {string} src pre-processed GLSL code
  2198. * @returns {string} src with an added header
  2199. */
  2200. _addHeader(header, src) {
  2201. utils/* Utils */.A.assert(header.startsWith('//') && !header.includes('\n'));
  2202. const j = src.indexOf('\n');
  2203. const versionDirective = src.substr(0, j);
  2204. const body = src.substr(j);
  2205. utils/* Utils */.A.assert(versionDirective.startsWith('#version '));
  2206. const head = versionDirective + '\n' + header;
  2207. return head + body;
  2208. }
  2209. }
  2210. /**
  2211. * A builder of a ShaderDeclaration
  2212. * @abstract
  2213. */
  2214. class ShaderDeclarationBuilder {
  2215. /**
  2216. * @private Constructor
  2217. * @param {Symbol} privateToken
  2218. */
  2219. constructor(privateToken) {
  2220. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er(); // private constructor!
  2221. /** @type {string[]} ordered list of uniform names */
  2222. this._arguments = [];
  2223. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2224. this._defines = new Map();
  2225. }
  2226. /**
  2227. * Specify the list & order of arguments to be
  2228. * passed to the shader
  2229. * @param {string[]} args argument names
  2230. * @returns {this}
  2231. */
  2232. withArguments(...args) {
  2233. // the list of arguments may be declared only once
  2234. if (this._arguments.length > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
  2235. // get arguments
  2236. for (let j = 0; j < args.length; j++) this._arguments.push(String(args[j]));
  2237. // done!
  2238. return this;
  2239. }
  2240. /**
  2241. * Specify a set of #defines to be prepended to the shader
  2242. * @param {Object<string,number>} defines key-value pairs
  2243. * @returns {this}
  2244. */
  2245. withDefines(defines) {
  2246. // the list of #defines may be defined only once
  2247. if (this._defines.size > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
  2248. // store and write the #defines
  2249. const keys = Object.keys(defines);
  2250. for (const key of keys) {
  2251. const value = Number(defines[key]); // force numeric values (just in case)
  2252. this._defines.set(key, value);
  2253. }
  2254. // done!
  2255. return this;
  2256. }
  2257. /**
  2258. * Build a ShaderDeclaration
  2259. * @returns {ShaderDeclaration}
  2260. */
  2261. build() {
  2262. throw new errors/* AbstractMethodError */.aQ();
  2263. }
  2264. }
  2265. /**
  2266. * A builder of a MemoryShaderDeclaration
  2267. */
  2268. class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2269. /**
  2270. * @private Constructor
  2271. * @param {Symbol} privateToken
  2272. * @param {ShaderDeclarationUnprocessedGLSL} fsSource
  2273. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
  2274. */
  2275. constructor(privateToken, fsSource, vsSource) {
  2276. super(privateToken);
  2277. /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
  2278. this._fsSource = String(fsSource);
  2279. /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
  2280. this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
  2281. }
  2282. /**
  2283. * Build a MemoryShaderDeclaration
  2284. * @returns {ShaderDeclaration}
  2285. */
  2286. build() {
  2287. return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
  2288. }
  2289. }
  2290. /**
  2291. * A builder of a FileShaderDeclaration
  2292. */
  2293. class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2294. /**
  2295. * @private Constructor
  2296. * @param {Symbol} privateToken
  2297. * @param {string} fsFilepath
  2298. * @param {string} [vsFilepath]
  2299. */
  2300. constructor(privateToken, fsFilepath, vsFilepath) {
  2301. super(privateToken);
  2302. /** @type {string} path to the unprocessed GLSL code of the fragment shader */
  2303. this._fsFilepath = String(fsFilepath);
  2304. /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
  2305. this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
  2306. }
  2307. /**
  2308. * Build a FileShaderDeclaration
  2309. * @returns {ShaderDeclaration}
  2310. */
  2311. build() {
  2312. return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
  2313. }
  2314. }
  2315. /**
  2316. * Import a ShaderDeclaration from a GLSL file
  2317. * @param {string} filepath relative to the shaders/ folder (a .glsl file)
  2318. * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
  2319. * @returns {ShaderDeclaration}
  2320. */
  2321. function importShader(filepath, vsfilepath = undefined) {
  2322. return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
  2323. }
  2324. /**
  2325. * Create a ShaderDeclaration from a GLSL source code
  2326. * @param {string} source fragment shader
  2327. * @param {string} [vssource] optional vertex shader
  2328. * @returns {ShaderDeclaration}
  2329. */
  2330. function createShader(source, vssource = undefined) {
  2331. return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
  2332. }
  2333. /***/ }),
  2334. /***/ 1672:
  2335. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_89681__) => {
  2336. "use strict";
  2337. __nested_webpack_require_89681__.r(__nested_webpack_exports__);
  2338. /* harmony export */ __nested_webpack_require_89681__.d(__nested_webpack_exports__, {
  2339. /* harmony export */ conv2D: () => (/* binding */ conv2D),
  2340. /* harmony export */ convX: () => (/* binding */ convX),
  2341. /* harmony export */ convY: () => (/* binding */ convY)
  2342. /* harmony export */ });
  2343. /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_89681__(9420);
  2344. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_89681__(9037);
  2345. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_89681__(8581);
  2346. /*
  2347. * speedy-vision.js
  2348. * GPU-accelerated Computer Vision for JavaScript
  2349. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2350. *
  2351. * Licensed under the Apache License, Version 2.0 (the "License");
  2352. * you may not use this file except in compliance with the License.
  2353. * You may obtain a copy of the License at
  2354. *
  2355. * http://www.apache.org/licenses/LICENSE-2.0
  2356. *
  2357. * Unless required by applicable law or agreed to in writing, software
  2358. * distributed under the License is distributed on an "AS IS" BASIS,
  2359. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2360. * See the License for the specific language governing permissions and
  2361. * limitations under the License.
  2362. *
  2363. * convolution.js
  2364. * Convolution shader generators
  2365. */
  2366. /**
  2367. * Generate a 2D convolution with a square kernel
  2368. * @param {number[]} kernel convolution kernel
  2369. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2370. * @returns {ShaderDeclarationBuilder}
  2371. */
  2372. function conv2D(kernel, normalizationConstant = 1.0) {
  2373. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2374. const kSize = Math.sqrt(kernel32.length) | 0;
  2375. const N = kSize >> 1; // idiv 2
  2376. // validate input
  2377. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);else if (kSize * kSize != kernel32.length) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
  2378. // select the appropriate pixel function
  2379. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2380. // code generator
  2381. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(cur => fn(kernel32[(cur[0] + N) * kSize + (cur[1] + N)], cur[0], cur[1])).join('\n');
  2382. const generateCode = (k, dy, dx) => `
  2383. result += ${pixelAtOffset}(image, ivec2(${-dx | 0}, ${-dy | 0})) * float(${+k});
  2384. `;
  2385. // shader
  2386. const source = `
  2387. uniform sampler2D image;
  2388. void main()
  2389. {
  2390. float alpha = threadPixel(image).a;
  2391. vec4 result = vec4(0.0f);
  2392. ${foreachKernelElement(generateCode)}
  2393. color = vec4(result.rgb, alpha);
  2394. }
  2395. `;
  2396. // done!
  2397. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2398. }
  2399. /**
  2400. * Generate a 1D convolution function on the x-axis
  2401. * @param {number[]} kernel convolution kernel
  2402. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2403. * @returns {ShaderDeclarationBuilder}
  2404. */
  2405. function convX(kernel, normalizationConstant = 1.0) {
  2406. return conv1D('x', kernel, normalizationConstant);
  2407. }
  2408. /**
  2409. * Generate a 1D convolution function on the y-axis
  2410. * @param {number[]} kernel convolution kernel
  2411. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2412. * @returns {ShaderDeclarationBuilder}
  2413. */
  2414. function convY(kernel, normalizationConstant = 1.0) {
  2415. return conv1D('y', kernel, normalizationConstant);
  2416. }
  2417. /**
  2418. * 1D convolution function generator
  2419. * @param {string} axis either "x" or "y"
  2420. * @param {number[]} kernel convolution kernel
  2421. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2422. * @returns {ShaderDeclarationBuilder}
  2423. */
  2424. function conv1D(axis, kernel, normalizationConstant = 1.0) {
  2425. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2426. const kSize = kernel32.length;
  2427. const N = kSize >> 1; // idiv 2
  2428. // validate input
  2429. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);else if (axis != 'x' && axis != 'y') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
  2430. // select the appropriate pixel function
  2431. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2432. // code generator
  2433. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce((acc, cur) => acc + fn(kernel32[cur + N], cur), '');
  2434. const generateCode = (k, i) => axis == 'x' ? `
  2435. pixel += ${pixelAtOffset}(image, ivec2(${-i | 0}, 0)) * float(${+k});
  2436. ` : `
  2437. pixel += ${pixelAtOffset}(image, ivec2(0, ${-i | 0})) * float(${+k});
  2438. `;
  2439. // shader
  2440. const source = `
  2441. uniform sampler2D image;
  2442. void main()
  2443. {
  2444. float alpha = threadPixel(image).a;
  2445. vec4 pixel = vec4(0.0f);
  2446. ${foreachKernelElement(generateCode)}
  2447. color = vec4(pixel.rgb, alpha);
  2448. }
  2449. `;
  2450. // done!
  2451. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2452. }
  2453. /***/ }),
  2454. /***/ 1001:
  2455. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_95546__) => {
  2456. "use strict";
  2457. /* harmony export */ __nested_webpack_require_95546__.d(__nested_webpack_exports__, {
  2458. /* harmony export */ c: () => (/* binding */ SpeedyGL)
  2459. /* harmony export */ });
  2460. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_95546__(9037);
  2461. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_95546__(2199);
  2462. /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_95546__(3211);
  2463. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_95546__(9192);
  2464. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_95546__(8581);
  2465. /*
  2466. * speedy-vision.js
  2467. * GPU-accelerated Computer Vision for JavaScript
  2468. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2469. *
  2470. * Licensed under the Apache License, Version 2.0 (the "License");
  2471. * you may not use this file except in compliance with the License.
  2472. * You may obtain a copy of the License at
  2473. *
  2474. * http://www.apache.org/licenses/LICENSE-2.0
  2475. *
  2476. * Unless required by applicable law or agreed to in writing, software
  2477. * distributed under the License is distributed on an "AS IS" BASIS,
  2478. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2479. * See the License for the specific language governing permissions and
  2480. * limitations under the License.
  2481. *
  2482. * speedy-gl.js
  2483. * A wrapper around the WebGL Rendering Context
  2484. */
  2485. /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
  2486. // Constants
  2487. const SINGLETON_KEY = Symbol();
  2488. const DEFAULT_POWER_PREFERENCE = 'default';
  2489. //
  2490. // We use a small canvas to improve the performance
  2491. // of createImageBitmap() on Firefox.
  2492. //
  2493. // A large canvas (2048x2048) causes a FPS drop, even
  2494. // if we only extract a small region of it (this is
  2495. // unlike Chrome, which is fast).
  2496. //
  2497. // Note: we automatically increase the size of the
  2498. // canvas (as needed) when rendering to it.
  2499. //
  2500. const CANVAS_WIDTH = 16,
  2501. CANVAS_HEIGHT = 16;
  2502. /** @type {SpeedyGL} Singleton */
  2503. let instance = null;
  2504. /** @type {PowerPreference} power preference */
  2505. let powerPreference = DEFAULT_POWER_PREFERENCE;
  2506. /**
  2507. * A wrapper around a WebGL Rendering Context
  2508. */
  2509. class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c {
  2510. /**
  2511. * Constructor
  2512. * @param {Symbol} key
  2513. * @private
  2514. */
  2515. constructor(key) {
  2516. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
  2517. super();
  2518. /** @type {boolean} internal flag */
  2519. this._reinitializeOnContextLoss = true;
  2520. /** @type {HTMLCanvasElement} internal canvas */
  2521. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2522. /** @type {WebGL2RenderingContext} WebGL rendering context */
  2523. this._gl = this._createContext(this._canvas);
  2524. /** @type {string} vendor string of the video driver */
  2525. this._vendor = '';
  2526. /** @type {string} renderer string of the video driver */
  2527. this._renderer = '';
  2528. // read driver info
  2529. this._readDriverInfo();
  2530. // log driver info
  2531. if (_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic') this._logDriverInfo();
  2532. }
  2533. /**
  2534. * Get Singleton
  2535. * @returns {SpeedyGL}
  2536. */
  2537. static get instance() {
  2538. return instance || (instance = new SpeedyGL(SINGLETON_KEY));
  2539. }
  2540. /**
  2541. * The WebGL Rendering Context
  2542. * Be careful not to cache this rendering context, as it may be lost!
  2543. * @returns {WebGL2RenderingContext}
  2544. */
  2545. get gl() {
  2546. return this._gl;
  2547. }
  2548. /**
  2549. * The internal canvas
  2550. * @returns {HTMLCanvasElement}
  2551. */
  2552. get canvas() {
  2553. return this._canvas;
  2554. }
  2555. /**
  2556. * Renderer string of the video driver
  2557. * @returns {string}
  2558. */
  2559. get renderer() {
  2560. return this._renderer;
  2561. }
  2562. /**
  2563. * Vendor string of the video driver
  2564. * @returns {string}
  2565. */
  2566. get vendor() {
  2567. return this._vendor;
  2568. }
  2569. /**
  2570. * Create a WebGL-capable canvas
  2571. * @param {Function} reinitialize to be called if we get a WebGL context loss event
  2572. * @returns {HTMLCanvasElement}
  2573. */
  2574. _createCanvas(reinitialize) {
  2575. const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
  2576. canvas.addEventListener('webglcontextlost', ev => {
  2577. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
  2578. setTimeout(reinitialize, 0);
  2579. ev.preventDefault();
  2580. }, false);
  2581. /*canvas.addEventListener('webglcontextrestored', ev => {
  2582. Utils.warning(`Restored WebGL2 context`);
  2583. ev.preventDefault();
  2584. }, false);*/
  2585. return canvas;
  2586. }
  2587. /**
  2588. * Create a WebGL2 Rendering Context
  2589. * @param {HTMLCanvasElement} canvas
  2590. * @returns {WebGL2RenderingContext}
  2591. */
  2592. _createContext(canvas) {
  2593. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
  2594. // does the browser support WebGL2?
  2595. if (typeof WebGL2RenderingContext === 'undefined') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please update your system.`);
  2596. const gl = canvas.getContext('webgl2', {
  2597. premultipliedAlpha: false,
  2598. preserveDrawingBuffer: false,
  2599. powerPreference: powerPreference,
  2600. alpha: true,
  2601. // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
  2602. antialias: false,
  2603. depth: false,
  2604. stencil: false,
  2605. desynchronized: true
  2606. });
  2607. if (!gl) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
  2608. return gl;
  2609. }
  2610. /**
  2611. * Reinitialize WebGL
  2612. */
  2613. _reinitialize() {
  2614. // disable reinitialization?
  2615. if (!this._reinitializeOnContextLoss) return;
  2616. // warning
  2617. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
  2618. // create new canvas
  2619. this._canvas.remove();
  2620. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2621. // create new context
  2622. this._gl = this._createContext(this._canvas);
  2623. // is this needed?
  2624. this._readDriverInfo();
  2625. // notify observers: we have a new context!
  2626. // we need to recreate all textures...
  2627. this._notify();
  2628. }
  2629. /**
  2630. * Read debugging information about the video driver of the user
  2631. */
  2632. _readDriverInfo() {
  2633. // Depending on the privacy settings of the browser, this information
  2634. // may be unavailable. When available, it may not be entirely correct.
  2635. // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
  2636. const gl = this._gl;
  2637. let debugInfo = null;
  2638. if (navigator.userAgent.includes('Firefox')) {
  2639. this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
  2640. this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
  2641. } else if (null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
  2642. this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
  2643. this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
  2644. } else {
  2645. this._vendor = ''; // unavailable information
  2646. this._renderer = '';
  2647. }
  2648. }
  2649. /**
  2650. * Log debugging information about the video driver and the platform
  2651. */
  2652. _logDriverInfo() {
  2653. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
  2654. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
  2655. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
  2656. }
  2657. /**
  2658. * Lose the WebGL context. This is used to manually
  2659. * free resources, and also for purposes of testing
  2660. * @returns {WEBGL_lose_context}
  2661. */
  2662. loseContext() {
  2663. const gl = this._gl;
  2664. // find the appropriate extension
  2665. const ext = gl.getExtension('WEBGL_lose_context');
  2666. if (!ext) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
  2667. // nothing to do?
  2668. if (gl.isContextLost()) return ext;
  2669. // disable reinitialization
  2670. this._reinitializeOnContextLoss = false;
  2671. // lose context
  2672. ext.loseContext();
  2673. // done!
  2674. return ext;
  2675. }
  2676. /**
  2677. * Lose & restore the WebGL context
  2678. * @param {number} [secondsToRestore]
  2679. * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
  2680. */
  2681. loseAndRestoreContext(secondsToRestore = 1) {
  2682. const ms = Math.max(secondsToRestore, 0) * 1000;
  2683. const ext = this.loseContext();
  2684. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
  2685. setTimeout(() => {
  2686. //ext.restoreContext();
  2687. this._reinitializeOnContextLoss = true;
  2688. this._reinitialize();
  2689. setTimeout(() => resolve(ext), 0); // next frame
  2690. }, ms);
  2691. });
  2692. }
  2693. /**
  2694. * Power preference for the WebGL context
  2695. * @returns {PowerPreference}
  2696. */
  2697. static get powerPreference() {
  2698. return powerPreference;
  2699. }
  2700. /**
  2701. * Power preference for the WebGL context
  2702. * @param {PowerPreference} value
  2703. */
  2704. static set powerPreference(value) {
  2705. // validate
  2706. if (!(value === 'default' || value === 'low-power' || value === 'high-performance')) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
  2707. // the power preference should be set before we create the WebGL context
  2708. if (instance == null || powerPreference !== value) {
  2709. powerPreference = value;
  2710. // recreate the context if it already exists. Experimental.
  2711. if (instance != null) instance.loseAndRestoreContext();
  2712. }
  2713. }
  2714. /**
  2715. * Check if an instance of SpeedyGL has already been created
  2716. * @returns {boolean}
  2717. */
  2718. static isInitialized() {
  2719. return instance != null;
  2720. }
  2721. }
  2722. /***/ }),
  2723. /***/ 8581:
  2724. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_105678__) => {
  2725. "use strict";
  2726. /* harmony export */ __nested_webpack_require_105678__.d(__nested_webpack_exports__, {
  2727. /* harmony export */ EM: () => (/* binding */ NotSupportedError),
  2728. /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
  2729. /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
  2730. /* harmony export */ MU: () => (/* binding */ TimeoutError),
  2731. /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
  2732. /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
  2733. /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
  2734. /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
  2735. /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
  2736. /* harmony export */ mB: () => (/* binding */ ParseError),
  2737. /* harmony export */ pf: () => (/* binding */ AssertionError),
  2738. /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
  2739. /* harmony export */ wB: () => (/* binding */ GLError),
  2740. /* harmony export */ xB: () => (/* binding */ SpeedyError)
  2741. /* harmony export */ });
  2742. /* unused harmony export NotImplementedError */
  2743. /*
  2744. * speedy-vision.js
  2745. * GPU-accelerated Computer Vision for JavaScript
  2746. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2747. *
  2748. * Licensed under the Apache License, Version 2.0 (the "License");
  2749. * you may not use this file except in compliance with the License.
  2750. * You may obtain a copy of the License at
  2751. *
  2752. * http://www.apache.org/licenses/LICENSE-2.0
  2753. *
  2754. * Unless required by applicable law or agreed to in writing, software
  2755. * distributed under the License is distributed on an "AS IS" BASIS,
  2756. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2757. * See the License for the specific language governing permissions and
  2758. * limitations under the License.
  2759. *
  2760. * errors.js
  2761. * Error classes
  2762. */
  2763. /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
  2764. /**
  2765. * Generic error class for Speedy
  2766. */
  2767. class SpeedyError extends Error {
  2768. /**
  2769. * Class constructor
  2770. * @param {string} message message text
  2771. * @param {SpeedyErrorCause} [cause] cause of the error
  2772. */
  2773. constructor(message, cause = null) {
  2774. super([message, cause ? cause.toString() : '[speedy-vision.js]'].join('\n-> '));
  2775. /** @type {SpeedyErrorCause} cause of the error */
  2776. this._cause = cause;
  2777. }
  2778. /**
  2779. * Error name
  2780. * @returns {string}
  2781. */
  2782. get name() {
  2783. return this.constructor.name;
  2784. }
  2785. /**
  2786. * Set error name (ignored)
  2787. * @param {string} _ ignored
  2788. */
  2789. set name(_) {
  2790. void 0;
  2791. }
  2792. /**
  2793. * Get the cause of the error. Available if
  2794. * it has been specified in the constructor
  2795. * @returns {SpeedyErrorCause}
  2796. */
  2797. get cause() {
  2798. return this._cause;
  2799. }
  2800. }
  2801. /**
  2802. * Unsupported operation error
  2803. * The requested operation is not supported
  2804. */
  2805. class NotSupportedError extends SpeedyError {
  2806. /**
  2807. * Class constructor
  2808. * @param {string} [message] additional text
  2809. * @param {SpeedyErrorCause} [cause] cause of the error
  2810. */
  2811. constructor(message = '', cause = null) {
  2812. super(`Unsupported operation. ${message}`, cause);
  2813. }
  2814. }
  2815. /**
  2816. * Not implemented error
  2817. * The called method is not implemented
  2818. */
  2819. class NotImplementedError extends SpeedyError {
  2820. /**
  2821. * Class constructor
  2822. * @param {string} [message] additional text
  2823. * @param {SpeedyErrorCause} [cause] cause of the error
  2824. */
  2825. constructor(message = '', cause = null) {
  2826. super(`Method not implemented. ${message}`, cause);
  2827. }
  2828. }
  2829. /**
  2830. * WebGL error
  2831. */
  2832. class GLError extends SpeedyError {
  2833. /**
  2834. * Class constructor
  2835. * @param {string} [message] additional text
  2836. * @param {SpeedyErrorCause} [cause] cause of the error
  2837. */
  2838. constructor(message = '', cause = null) {
  2839. super(`WebGL error. ${message}`, cause);
  2840. }
  2841. /**
  2842. * Get an error object describing the latest WebGL error
  2843. * @param {WebGL2RenderingContext} gl
  2844. * @returns {GLError}
  2845. */
  2846. static from(gl) {
  2847. const recognizedErrors = ['NO_ERROR', 'INVALID_ENUM', 'INVALID_VALUE', 'INVALID_OPERATION', 'INVALID_FRAMEBUFFER_OPERATION', 'OUT_OF_MEMORY', 'CONTEXT_LOST_WEBGL'];
  2848. const glError = gl.getError();
  2849. const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
  2850. return new GLError(message);
  2851. }
  2852. }
  2853. /**
  2854. * AbstractMethodError
  2855. * Thrown when one tries to call an abstract method
  2856. */
  2857. class AbstractMethodError extends SpeedyError {
  2858. /**
  2859. * Class constructor
  2860. * @param {string} [message] additional text
  2861. * @param {SpeedyErrorCause} [cause] cause of the error
  2862. */
  2863. constructor(message = '', cause = null) {
  2864. super(`Can't call abstract method. ${message}`, cause);
  2865. }
  2866. }
  2867. /**
  2868. * Illegal argument error
  2869. * A method has received one or more illegal arguments
  2870. */
  2871. class IllegalArgumentError extends SpeedyError {
  2872. /**
  2873. * Class constructor
  2874. * @param {string} [message] additional text
  2875. * @param {SpeedyErrorCause} [cause] cause of the error
  2876. */
  2877. constructor(message = '', cause = null) {
  2878. super(`Illegal argument. ${message}`, cause);
  2879. }
  2880. }
  2881. /**
  2882. * Illegal operation error
  2883. * The method arguments are valid, but the method can't
  2884. * be called due to the current the state of the object
  2885. */
  2886. class IllegalOperationError extends SpeedyError {
  2887. /**
  2888. * Class constructor
  2889. * @param {string} [message] additional text
  2890. * @param {SpeedyErrorCause} [cause] cause of the error
  2891. */
  2892. constructor(message = '', cause = null) {
  2893. super(`Illegal operation. ${message}`, cause);
  2894. }
  2895. }
  2896. /**
  2897. * Out of memory
  2898. */
  2899. class OutOfMemoryError extends SpeedyError {
  2900. /**
  2901. * Class constructor
  2902. * @param {string} [message] additional text
  2903. * @param {SpeedyErrorCause} [cause] cause of the error
  2904. */
  2905. constructor(message = '', cause = null) {
  2906. super(`Out of memory. ${message}`, cause);
  2907. }
  2908. }
  2909. /**
  2910. * File not found error
  2911. */
  2912. class FileNotFoundError extends SpeedyError {
  2913. /**
  2914. * Class constructor
  2915. * @param {string} [message] additional text
  2916. * @param {SpeedyErrorCause} [cause] cause of the error
  2917. */
  2918. constructor(message = '', cause = null) {
  2919. super(`File not found. ${message}`, cause);
  2920. }
  2921. }
  2922. /**
  2923. * Resource not loaded error
  2924. */
  2925. class ResourceNotLoadedError extends SpeedyError {
  2926. /**
  2927. * Class constructor
  2928. * @param {string} [message] additional text
  2929. * @param {SpeedyErrorCause} [cause] cause of the error
  2930. */
  2931. constructor(message = '', cause = null) {
  2932. super(`Resource not loaded. ${message}`, cause);
  2933. }
  2934. }
  2935. /**
  2936. * Timeout error
  2937. */
  2938. class TimeoutError extends SpeedyError {
  2939. /**
  2940. * Class constructor
  2941. * @param {string} [message] additional text
  2942. * @param {SpeedyErrorCause} [cause] cause of the error
  2943. */
  2944. constructor(message = '', cause = null) {
  2945. super(`Timeout error. ${message}`, cause);
  2946. }
  2947. }
  2948. /**
  2949. * Parse error
  2950. */
  2951. class ParseError extends SpeedyError {
  2952. /**
  2953. * Class constructor
  2954. * @param {string} [message] additional text
  2955. * @param {SpeedyErrorCause} [cause] cause of the error
  2956. */
  2957. constructor(message = '', cause = null) {
  2958. super(`Parse error. ${message}`, cause);
  2959. }
  2960. }
  2961. /**
  2962. * Assertion error
  2963. */
  2964. class AssertionError extends SpeedyError {
  2965. /**
  2966. * Class constructor
  2967. * @param {string} [message] additional text
  2968. * @param {SpeedyErrorCause} [cause] cause of the error
  2969. */
  2970. constructor(message = '', cause = null) {
  2971. super(`Assertion failed. ${message}`, cause);
  2972. }
  2973. }
  2974. /**
  2975. * Access denied
  2976. */
  2977. class AccessDeniedError extends SpeedyError {
  2978. /**
  2979. * Class constructor
  2980. * @param {string} [message] additional text
  2981. * @param {SpeedyErrorCause} [cause] cause of the error
  2982. */
  2983. constructor(message = '', cause = null) {
  2984. super(`Access denied. ${message}`, cause);
  2985. }
  2986. }
  2987. /**
  2988. * WebAssembly error
  2989. */
  2990. class WebAssemblyError extends SpeedyError {
  2991. /**
  2992. * Class constructor
  2993. * @param {string} [message] additional text
  2994. * @param {SpeedyErrorCause} [cause] cause of the error
  2995. */
  2996. constructor(message = '', cause = null) {
  2997. super(`WebAssembly error. ${message}`, cause);
  2998. }
  2999. }
  3000. /***/ }),
  3001. /***/ 3816:
  3002. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_113692__) => {
  3003. "use strict";
  3004. __nested_webpack_require_113692__.r(__nested_webpack_exports__);
  3005. /* harmony export */ __nested_webpack_require_113692__.d(__nested_webpack_exports__, {
  3006. /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
  3007. /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
  3008. /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
  3009. /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
  3010. /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
  3011. /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
  3012. /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
  3013. /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
  3014. /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
  3015. /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
  3016. /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
  3017. /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
  3018. /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
  3019. /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
  3020. /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
  3021. /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
  3022. /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
  3023. /* harmony export */ });
  3024. /*
  3025. * speedy-vision.js
  3026. * GPU-accelerated Computer Vision for JavaScript
  3027. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3028. *
  3029. * Licensed under the Apache License, Version 2.0 (the "License");
  3030. * you may not use this file except in compliance with the License.
  3031. * You may obtain a copy of the License at
  3032. *
  3033. * http://www.apache.org/licenses/LICENSE-2.0
  3034. *
  3035. * Unless required by applicable law or agreed to in writing, software
  3036. * distributed under the License is distributed on an "AS IS" BASIS,
  3037. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3038. * See the License for the specific language governing permissions and
  3039. * limitations under the License.
  3040. *
  3041. * globals.js
  3042. * Global constants
  3043. */
  3044. // -----------------------------------------------------------------
  3045. // IMAGE PYRAMIDS & SCALE-SPACE
  3046. // -----------------------------------------------------------------
  3047. /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
  3048. const PYRAMID_MAX_LEVELS = 8;
  3049. /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
  3050. const LOG2_PYRAMID_MAX_SCALE = 0;
  3051. /** @type {number} The maximum supported scale for a pyramid level */
  3052. const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
  3053. // -----------------------------------------------------------------
  3054. // FIXED-POINT MATH
  3055. // -----------------------------------------------------------------
  3056. /** @type {number} How many bits do we use to store fractional data? */
  3057. const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
  3058. /** @type {number} Fixed-point resolution */
  3059. const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
  3060. // -----------------------------------------------------------------
  3061. // TEXTURE LIMITS
  3062. // -----------------------------------------------------------------
  3063. /** @type {number} Maximum texture length (width, height) */
  3064. const MAX_TEXTURE_LENGTH = (1 << 16 - FIX_BITS) - 1; // must be 2^n - 1 due to keypoint encoding
  3065. // -----------------------------------------------------------------
  3066. // KEYPOINTS
  3067. // -----------------------------------------------------------------
  3068. /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
  3069. const MIN_KEYPOINT_SIZE = 8;
  3070. /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
  3071. const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
  3072. /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
  3073. const MAX_ENCODER_CAPACITY = 8192;
  3074. /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
  3075. const DEFAULT_ENCODER_CAPACITY = 2048;
  3076. /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
  3077. const LOG2_MAX_DESCRIPTOR_SIZE = 6;
  3078. /** @type {number} maximum size of a keypoint descriptor, in bytes */
  3079. const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
  3080. /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
  3081. const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
  3082. /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
  3083. const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
  3084. /** @type {number} Maximum size of the database of keypoints for matching */
  3085. const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
  3086. /** @type {number} The maximum distance that can be stored in a match */
  3087. const MATCH_MAX_DISTANCE = (1 << 32 - MATCH_INDEX_BITS) - 1;
  3088. // -----------------------------------------------------------------
  3089. // MISC
  3090. // -----------------------------------------------------------------
  3091. /** @type {boolean} Are we in a little-endian machine? */
  3092. const LITTLE_ENDIAN = function () {
  3093. return 0xCAFE === new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer)[0];
  3094. }();
  3095. /***/ }),
  3096. /***/ 3211:
  3097. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_119275__) => {
  3098. "use strict";
  3099. /* harmony export */ __nested_webpack_require_119275__.d(__nested_webpack_exports__, {
  3100. /* harmony export */ c: () => (/* binding */ Observable)
  3101. /* harmony export */ });
  3102. /*
  3103. * speedy-vision.js
  3104. * GPU-accelerated Computer Vision for JavaScript
  3105. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3106. *
  3107. * Licensed under the Apache License, Version 2.0 (the "License");
  3108. * you may not use this file except in compliance with the License.
  3109. * You may obtain a copy of the License at
  3110. *
  3111. * http://www.apache.org/licenses/LICENSE-2.0
  3112. *
  3113. * Unless required by applicable law or agreed to in writing, software
  3114. * distributed under the License is distributed on an "AS IS" BASIS,
  3115. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3116. * See the License for the specific language governing permissions and
  3117. * limitations under the License.
  3118. *
  3119. * observable.js
  3120. * Observer design pattern
  3121. */
  3122. /**
  3123. * Implementation of the Observer design pattern
  3124. * @abstract
  3125. */
  3126. class Observable {
  3127. /**
  3128. * Constructor
  3129. */
  3130. constructor() {
  3131. /** @type {Function[]} subscribers / callbacks */
  3132. this._subscribers = [];
  3133. /** @type {object[]} "this" pointers */
  3134. this._thisptr = [];
  3135. /** @type {Array<any[]>} function arguments */
  3136. this._args = [];
  3137. }
  3138. /**
  3139. * Add subscriber
  3140. * @param {Function} fn callback
  3141. * @param {object} [thisptr] "this" pointer to be used when invoking the callback
  3142. * @param {...any} args arguments to be passed to the callback
  3143. */
  3144. subscribe(fn, thisptr, ...args) {
  3145. this._subscribers.push(fn);
  3146. this._thisptr.push(thisptr);
  3147. this._args.push(args);
  3148. }
  3149. /**
  3150. * Remove subscriber
  3151. * @param {Function} fn previously added callback
  3152. * @param {object} [thisptr] "this" pointer
  3153. */
  3154. unsubscribe(fn, thisptr) {
  3155. for (let j = this._subscribers.length - 1; j >= 0; j--) {
  3156. if (this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
  3157. this._subscribers.splice(j, 1);
  3158. this._thisptr.splice(j, 1);
  3159. this._args.splice(j, 1);
  3160. break;
  3161. }
  3162. }
  3163. }
  3164. /**
  3165. * Notify all subscribers about a state change
  3166. * @protected
  3167. */
  3168. _notify() {
  3169. for (let i = 0; i < this._subscribers.length; i++) this._subscribers[i].apply(this._thisptr[i], this._args[i]);
  3170. }
  3171. }
  3172. /***/ }),
  3173. /***/ 6049:
  3174. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_121659__) => {
  3175. "use strict";
  3176. /* harmony export */ __nested_webpack_require_121659__.d(__nested_webpack_exports__, {
  3177. /* harmony export */ f5: () => (/* binding */ ImageFormat),
  3178. /* harmony export */ kQ: () => (/* binding */ PixelComponent),
  3179. /* harmony export */ kg: () => (/* binding */ ColorComponentId),
  3180. /* harmony export */ zu: () => (/* binding */ MediaType)
  3181. /* harmony export */ });
  3182. /*
  3183. * speedy-vision.js
  3184. * GPU-accelerated Computer Vision for JavaScript
  3185. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3186. *
  3187. * Licensed under the Apache License, Version 2.0 (the "License");
  3188. * you may not use this file except in compliance with the License.
  3189. * You may obtain a copy of the License at
  3190. *
  3191. * http://www.apache.org/licenses/LICENSE-2.0
  3192. *
  3193. * Unless required by applicable law or agreed to in writing, software
  3194. * distributed under the License is distributed on an "AS IS" BASIS,
  3195. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3196. * See the License for the specific language governing permissions and
  3197. * limitations under the License.
  3198. *
  3199. * types.js
  3200. * Types & formats
  3201. */
  3202. /**
  3203. * Media types
  3204. * @enum {Symbol}
  3205. */
  3206. const MediaType = Object.freeze({
  3207. Image: Symbol('Image'),
  3208. Video: Symbol('Video'),
  3209. Canvas: Symbol('Canvas'),
  3210. OffscreenCanvas: Symbol('OffscreenCanvas'),
  3211. Bitmap: Symbol('Bitmap'),
  3212. Data: Symbol('Data')
  3213. });
  3214. /**
  3215. * Image formats
  3216. * @enum {Symbol}
  3217. */
  3218. const ImageFormat = Object.freeze({
  3219. RGBA: Symbol('RGBA'),
  3220. GREY: Symbol('GREY')
  3221. });
  3222. /**
  3223. * Pixel component (bitwise flags)
  3224. * @typedef {number} PixelComponent
  3225. */
  3226. const PixelComponent = Object.freeze({
  3227. RED: 1,
  3228. GREEN: 2,
  3229. BLUE: 4,
  3230. ALPHA: 8,
  3231. ALL: 15 // = RED | GREEN | BLUE | ALPHA
  3232. });
  3233. /**
  3234. * Component ID utility
  3235. */
  3236. const ColorComponentId = Object.freeze({
  3237. [PixelComponent.RED]: 0,
  3238. [PixelComponent.GREEN]: 1,
  3239. [PixelComponent.BLUE]: 2,
  3240. [PixelComponent.ALPHA]: 3
  3241. });
  3242. /***/ }),
  3243. /***/ 9037:
  3244. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_123644__) => {
  3245. "use strict";
  3246. /* harmony export */ __nested_webpack_require_123644__.d(__nested_webpack_exports__, {
  3247. /* harmony export */ A: () => (/* binding */ Utils)
  3248. /* harmony export */ });
  3249. /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_123644__(8581);
  3250. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_123644__(9192);
  3251. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_123644__(2199);
  3252. /*
  3253. * speedy-vision.js
  3254. * GPU-accelerated Computer Vision for JavaScript
  3255. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3256. *
  3257. * Licensed under the Apache License, Version 2.0 (the "License");
  3258. * you may not use this file except in compliance with the License.
  3259. * You may obtain a copy of the License at
  3260. *
  3261. * http://www.apache.org/licenses/LICENSE-2.0
  3262. *
  3263. * Unless required by applicable law or agreed to in writing, software
  3264. * distributed under the License is distributed on an "AS IS" BASIS,
  3265. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3266. * See the License for the specific language governing permissions and
  3267. * limitations under the License.
  3268. *
  3269. * utils.js
  3270. * Generic utilities
  3271. */
  3272. /**
  3273. * Generic utilities
  3274. */
  3275. class Utils {
  3276. /**
  3277. * Generates a warning
  3278. * @param {string} text message text
  3279. * @param {...string} args optional text
  3280. */
  3281. static warning(text, ...args) {
  3282. //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
  3283. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.warn('[speedy-vision] ' + text, ...args);
  3284. }
  3285. /**
  3286. * Logs a message
  3287. * @param {string} text message text
  3288. * @param {...string} args optional text
  3289. */
  3290. static log(text, ...args) {
  3291. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.log('[speedy-vision] ' + text, ...args);
  3292. }
  3293. /**
  3294. * Assertion
  3295. * @param {boolean} expr expression
  3296. * @param {string} [text] error message
  3297. * @throws {AssertionError}
  3298. */
  3299. static assert(expr, text = '') {
  3300. if (!expr) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
  3301. }
  3302. /**
  3303. * Gets the names of the arguments of the specified function
  3304. * @param {Function} fun
  3305. * @returns {string[]}
  3306. */
  3307. static functionArguments(fun) {
  3308. const code = fun.toString();
  3309. const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' : code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>';
  3310. const match = new RegExp(regex).exec(code);
  3311. if (match !== null) {
  3312. const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
  3313. return args.split(',').map(argname => argname.replace(/=.*$/, '').trim() // remove default params & trim
  3314. ).filter(argname => argname // handle trailing commas
  3315. );
  3316. } else throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
  3317. }
  3318. /**
  3319. * Get all property descriptors from an object,
  3320. * traversing its entire prototype chain
  3321. * @param {object} obj
  3322. * @returns {object}
  3323. */
  3324. static getAllPropertyDescriptors(obj) {
  3325. if (obj) {
  3326. const proto = Object.getPrototypeOf(obj);
  3327. return Object.assign(Object.assign({}, Utils.getAllPropertyDescriptors(proto)), Object.getOwnPropertyDescriptors(obj));
  3328. } else return Object.create(null);
  3329. }
  3330. /**
  3331. * Creates a HTMLCanvasElement with the given dimensions
  3332. * @param {number} width in pixels
  3333. * @param {number} height in pixels
  3334. * @returns {HTMLCanvasElement}
  3335. */
  3336. static createCanvas(width, height) {
  3337. const canvas = document.createElement('canvas');
  3338. canvas.width = width;
  3339. canvas.height = height;
  3340. return canvas;
  3341. }
  3342. /**
  3343. * Generate a 1D gaussian kernel with custom sigma
  3344. * Tip: use kernelSize >= (5 * sigma), kernelSize odd
  3345. * @param {number} sigma gaussian sigma
  3346. * @param {number} [kernelSize] kernel size, odd number
  3347. * @param {boolean} [normalized] normalize entries so that their sum is 1
  3348. * @returns {number[]}
  3349. */
  3350. static gaussianKernel(sigma, kernelSize = 0, normalized = true) {
  3351. /*
  3352. * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
  3353. *
  3354. * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
  3355. *
  3356. * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
  3357. *
  3358. * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
  3359. * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
  3360. *
  3361. * Setting a constant c := sqrt(2) * sigma, it follows that:
  3362. *
  3363. * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
  3364. */
  3365. // default kernel size
  3366. if (kernelSize == 0) {
  3367. kernelSize = Math.ceil(5.0 * sigma) | 0;
  3368. kernelSize += 1 - kernelSize % 2;
  3369. }
  3370. // validate input
  3371. kernelSize |= 0;
  3372. if (kernelSize < 1 || kernelSize % 2 == 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);else if (sigma <= 0.0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
  3373. // function erf(x) = -erf(-x) can be approximated numerically. See:
  3374. // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
  3375. const kernel = new Array(kernelSize);
  3376. // set constants
  3377. const N = kernelSize >> 1; // integer (floor, div 2)
  3378. const c = +sigma * 1.4142135623730951; // sigma * sqrt(2)
  3379. const m = 0.3275911;
  3380. const a1 = 0.254829592;
  3381. const a2 = -0.284496736;
  3382. const a3 = 1.421413741;
  3383. const a4 = -1.453152027;
  3384. const a5 = 1.061405429;
  3385. // compute the kernel
  3386. let sum = 0.0;
  3387. for (let j = 0; j < kernelSize; j++) {
  3388. let xa = (j - N + 0.5) / c;
  3389. let xb = (j - N - 0.5) / c;
  3390. let sa = 1.0,
  3391. sb = 1.0;
  3392. if (xa < 0.0) {
  3393. sa = -1.0;
  3394. xa = -xa;
  3395. }
  3396. if (xb < 0.0) {
  3397. sb = -1.0;
  3398. xb = -xb;
  3399. }
  3400. const ta = 1.0 / (1.0 + m * xa);
  3401. const tb = 1.0 / (1.0 + m * xb);
  3402. const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
  3403. const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
  3404. const ya = 1.0 - pa * Math.exp(-xa * xa);
  3405. const yb = 1.0 - pb * Math.exp(-xb * xb);
  3406. const erfa = sa * ya;
  3407. const erfb = sb * yb;
  3408. const fp = (erfa - erfb) / (2.0 * c);
  3409. kernel[j] = fp;
  3410. sum += fp;
  3411. }
  3412. // normalize the kernel
  3413. if (normalized) {
  3414. for (let j = 0; j < kernelSize; j++) kernel[j] /= sum;
  3415. }
  3416. // done!
  3417. return kernel;
  3418. }
  3419. /**
  3420. * Generate a 2D kernel in column-major format using two separable 1D kernels
  3421. * @param {number[]} ka 1D kernel
  3422. * @param {number[]} [kb]
  3423. * @returns {number[]}
  3424. */
  3425. static kernel2d(ka, kb = ka) {
  3426. const ksize = ka.length;
  3427. Utils.assert(ka.length == ka.length);
  3428. Utils.assert(ksize >= 1 && ksize % 2 == 1);
  3429. // compute the outer product ka x kb
  3430. let kernel2d = new Array(ksize * ksize),
  3431. k = 0;
  3432. for (let col = 0; col < ksize; col++) {
  3433. for (let row = 0; row < ksize; row++) kernel2d[k++] = ka[row] * kb[col];
  3434. }
  3435. return kernel2d;
  3436. }
  3437. /**
  3438. * Cartesian product a x b: [ [ai, bj] for all i, j ]
  3439. * @param {number[]} a
  3440. * @param {number[]} b
  3441. * @returns {Array<[number,number]>}
  3442. */
  3443. static cartesian(a, b) {
  3444. return [].concat(...a.map(a => b.map(b => [a, b])));
  3445. }
  3446. /**
  3447. * Symmetric range
  3448. * @param {number} n non-negative integer
  3449. * @returns {number[]} [ -n, ..., n ]
  3450. */
  3451. static symmetricRange(n) {
  3452. if ((n |= 0) < 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
  3453. return [...Array(2 * n + 1).keys()].map(x => x - n);
  3454. }
  3455. /**
  3456. * Compute the [0, n) range of integers
  3457. * @param {number} n positive integer
  3458. * @returns {number[]} [ 0, 1, ..., n-1 ]
  3459. */
  3460. static range(n) {
  3461. if ((n |= 0) <= 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
  3462. return [...Array(n).keys()];
  3463. }
  3464. /**
  3465. * Shuffle in-place
  3466. * @template T
  3467. * @param {T[]} arr
  3468. * @returns {T[]} arr
  3469. */
  3470. static shuffle(arr) {
  3471. const len = arr.length;
  3472. const m = len - 1;
  3473. // Fisher-Yattes
  3474. for (let i = 0; i < m; i++) {
  3475. const j = i + (Math.random() * (len - i) | 0); // i <= j < arr.length
  3476. if (i !== j) {
  3477. const t = arr[i];
  3478. arr[i] = arr[j];
  3479. arr[j] = t;
  3480. }
  3481. }
  3482. return arr;
  3483. }
  3484. /**
  3485. * Flatten an array (1 level only)
  3486. * @template U
  3487. * @param {U[]} array
  3488. * @returns {U[]}
  3489. */
  3490. static flatten(array) {
  3491. //return array.flat();
  3492. //return array.reduce((arr, val) => arr.concat(val), []);
  3493. const flat = [];
  3494. for (let i = 0, n = array.length; i < n; i++) {
  3495. const entry = array[i];
  3496. if (Array.isArray(entry)) {
  3497. for (let j = 0, m = entry.length; j < m; j++) flat.push(entry[j]);
  3498. } else flat.push(entry);
  3499. }
  3500. return flat;
  3501. }
  3502. /**
  3503. * Decode a 16-bit float from a
  3504. * unsigned 16-bit integer
  3505. * @param {number} uint16
  3506. * @returns {number}
  3507. */
  3508. static decodeFloat16(uint16) {
  3509. // decode according to sec 2.1.2
  3510. // 16-Bit Floating Point Numbers
  3511. // of the OpenGL ES 3 spec
  3512. const s = (uint16 & 0xFFFF) >> 15; // sign bit
  3513. const e = (uint16 & 0x7FFF) >> 10; // exponent
  3514. const m = uint16 & 0x3FF; // mantissa
  3515. const sign = 1 - 2 * s; // (-1)^s
  3516. if (e == 0) return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
  3517. else if (e == 31) return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
  3518. const f = e >= 15 ? 1 << e - 15 : 1.0 / (1 << 15 - e); // 2^(e-15)
  3519. return sign * f * (1.0 + m * 0.0009765625); // normal
  3520. }
  3521. /**
  3522. * Wrapper around getUserMedia()
  3523. * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
  3524. * @returns {SpeedyPromise<HTMLVideoElement>}
  3525. */
  3526. static requestCameraStream(constraints = {
  3527. audio: false,
  3528. video: true
  3529. }) {
  3530. Utils.log('Accessing the webcam...');
  3531. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
  3532. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
  3533. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  3534. const video = document.createElement('video');
  3535. video.onloadedmetadata = () => {
  3536. video.play();
  3537. Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
  3538. resolve(video);
  3539. };
  3540. video.setAttribute('playsinline', '');
  3541. video.setAttribute('autoplay', '');
  3542. if (constraints.audio === false || constraints.audio === undefined) video.setAttribute('muted', '');
  3543. video.srcObject = stream;
  3544. }).catch(err => {
  3545. if (err.name === 'NotAllowedError') {
  3546. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(`Please give access to the camera and reload the page.`, err));
  3547. } else if (err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
  3548. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(`Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`, err));
  3549. } else {
  3550. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(`Can't access the webcam.`, err));
  3551. }
  3552. });
  3553. });
  3554. }
  3555. /**
  3556. * Format binary data as a string with hex values
  3557. * @param {ArrayBuffer} bytes
  3558. * @returns {string}
  3559. */
  3560. static formatBinaryData(bytes) {
  3561. const uint8 = new Uint8Array(bytes);
  3562. const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
  3563. return array.join(' ');
  3564. }
  3565. /**
  3566. * Returns a string containing platform brand information
  3567. * @returns {string}
  3568. */
  3569. static platformString() {
  3570. // navigator.userAgent is easily and often spoofed, and thus is unreliable
  3571. // use the NavigatorUAData interface if available
  3572. if (typeof navigator.userAgentData === 'object') {
  3573. // use only low entropy data, so we don't need to ask the permission
  3574. // of the user to read this string
  3575. return navigator.userAgentData.platform;
  3576. }
  3577. // navigator.platform is deprecated. It can be spoofed on Firefox, but,
  3578. // at the time of this writing, there is no alternative apparently.
  3579. return navigator.platform;
  3580. }
  3581. }
  3582. /***/ }),
  3583. /***/ 5235:
  3584. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_136472__) => {
  3585. var map = {
  3586. "./colors.glsl": 8609,
  3587. "./filters.glsl": 4672,
  3588. "./fixed-point.glsl": 9778,
  3589. "./float16.glsl": 8710,
  3590. "./global.glsl": 2434,
  3591. "./int32.glsl": 439,
  3592. "./keypoint-descriptors.glsl": 8545,
  3593. "./keypoint-matches.glsl": 6762,
  3594. "./keypoints.glsl": 7639,
  3595. "./math.glsl": 431,
  3596. "./platform.glsl": 6822,
  3597. "./pyramids.glsl": 2728,
  3598. "./subpixel.glsl": 6823
  3599. };
  3600. function webpackContext(req) {
  3601. var id = webpackContextResolve(req);
  3602. return __nested_webpack_require_136472__(id);
  3603. }
  3604. function webpackContextResolve(req) {
  3605. if(!__nested_webpack_require_136472__.o(map, req)) {
  3606. var e = new Error("Cannot find module '" + req + "'");
  3607. e.code = 'MODULE_NOT_FOUND';
  3608. throw e;
  3609. }
  3610. return map[req];
  3611. }
  3612. webpackContext.keys = function webpackContextKeys() {
  3613. return Object.keys(map);
  3614. };
  3615. webpackContext.resolve = webpackContextResolve;
  3616. module.exports = webpackContext;
  3617. webpackContext.id = 5235;
  3618. /***/ }),
  3619. /***/ 4606:
  3620. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_137422__) => {
  3621. var map = {
  3622. "./filters/convolution": 1672,
  3623. "./filters/convolution.js": 1672,
  3624. "./filters/convolution1d.glsl": 8211,
  3625. "./filters/convolution2d.glsl": 7360,
  3626. "./filters/fast-median.glsl": 8191,
  3627. "./filters/nightvision.glsl": 4438,
  3628. "./filters/normalize-image.glsl": 5867,
  3629. "./filters/rgb2grey.glsl": 9252,
  3630. "./include/colors.glsl": 8609,
  3631. "./include/filters.glsl": 4672,
  3632. "./include/fixed-point.glsl": 9778,
  3633. "./include/float16.glsl": 8710,
  3634. "./include/global.glsl": 2434,
  3635. "./include/int32.glsl": 439,
  3636. "./include/keypoint-descriptors.glsl": 8545,
  3637. "./include/keypoint-matches.glsl": 6762,
  3638. "./include/keypoints.glsl": 7639,
  3639. "./include/math.glsl": 431,
  3640. "./include/platform.glsl": 6822,
  3641. "./include/pyramids.glsl": 2728,
  3642. "./include/subpixel.glsl": 6823,
  3643. "./keypoints/allocate-descriptors.glsl": 1341,
  3644. "./keypoints/allocate-extra.glsl": 7833,
  3645. "./keypoints/apply-homography.glsl": 2352,
  3646. "./keypoints/bf-knn.glsl": 7541,
  3647. "./keypoints/clip-border.glsl": 4868,
  3648. "./keypoints/clip.glsl": 5591,
  3649. "./keypoints/distance-filter.glsl": 191,
  3650. "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
  3651. "./keypoints/encode-keypoint-offsets.glsl": 336,
  3652. "./keypoints/encode-keypoint-positions.glsl": 8968,
  3653. "./keypoints/encode-keypoint-properties.glsl": 1733,
  3654. "./keypoints/encode-keypoints.glsl": 9674,
  3655. "./keypoints/encode-null-keypoints.glsl": 2090,
  3656. "./keypoints/fast.glsl": 1855,
  3657. "./keypoints/fast.vs.glsl": 4824,
  3658. "./keypoints/hamming-distance-filter.glsl": 2381,
  3659. "./keypoints/harris-cutoff.glsl": 6060,
  3660. "./keypoints/harris.glsl": 9974,
  3661. "./keypoints/knn-init.glsl": 3047,
  3662. "./keypoints/knn-transfer.glsl": 3266,
  3663. "./keypoints/laplacian.glsl": 8018,
  3664. "./keypoints/lk.glsl": 3168,
  3665. "./keypoints/lookup-of-locations.glsl": 3890,
  3666. "./keypoints/lookup-of-locations.vs.glsl": 8647,
  3667. "./keypoints/lsh-knn.glsl": 4776,
  3668. "./keypoints/mix-keypoints.glsl": 2648,
  3669. "./keypoints/nonmax-scale.glsl": 8825,
  3670. "./keypoints/nonmax-space.glsl": 5693,
  3671. "./keypoints/nonmax-suppression.glsl": 9280,
  3672. "./keypoints/orb-descriptor.glsl": 9108,
  3673. "./keypoints/orb-orientation.glsl": 7137,
  3674. "./keypoints/refine-scale.glsl": 9739,
  3675. "./keypoints/score-findmax.glsl": 8231,
  3676. "./keypoints/shuffle.glsl": 2518,
  3677. "./keypoints/sort-keypoints.glsl": 8096,
  3678. "./keypoints/subpixel-refinement.glsl": 5795,
  3679. "./keypoints/transfer-flow.glsl": 3169,
  3680. "./keypoints/transfer-orientation.glsl": 1337,
  3681. "./keypoints/transfer-to-extra.glsl": 6187,
  3682. "./keypoints/upload-keypoints.glsl": 477,
  3683. "./pyramids/downsample2.glsl": 4050,
  3684. "./pyramids/upsample2.glsl": 5545,
  3685. "./transforms/additive-mix.glsl": 7113,
  3686. "./transforms/resize.glsl": 1202,
  3687. "./transforms/warp-perspective.glsl": 7971,
  3688. "./utils/copy-components.glsl": 6122,
  3689. "./utils/copy-raster.glsl": 371,
  3690. "./utils/copy.glsl": 7307,
  3691. "./utils/fill-components.glsl": 8614,
  3692. "./utils/fill.glsl": 6271,
  3693. "./utils/flip-y.vs.glsl": 3016,
  3694. "./utils/scan-minmax2d.glsl": 3630,
  3695. "./utils/sobel-derivatives.glsl": 8508,
  3696. "./utils/sobel-derivatives.vs.glsl": 8073
  3697. };
  3698. function webpackContext(req) {
  3699. var id = webpackContextResolve(req);
  3700. return __nested_webpack_require_137422__(id);
  3701. }
  3702. function webpackContextResolve(req) {
  3703. if(!__nested_webpack_require_137422__.o(map, req)) {
  3704. var e = new Error("Cannot find module '" + req + "'");
  3705. e.code = 'MODULE_NOT_FOUND';
  3706. throw e;
  3707. }
  3708. return map[req];
  3709. }
  3710. webpackContext.keys = function webpackContextKeys() {
  3711. return Object.keys(map);
  3712. };
  3713. webpackContext.resolve = webpackContextResolve;
  3714. module.exports = webpackContext;
  3715. webpackContext.id = 4606;
  3716. /***/ }),
  3717. /***/ 8211:
  3718. /***/ ((module) => {
  3719. module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3720. /***/ }),
  3721. /***/ 7360:
  3722. /***/ ((module) => {
  3723. module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3724. /***/ }),
  3725. /***/ 8191:
  3726. /***/ ((module) => {
  3727. module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
  3728. /***/ }),
  3729. /***/ 4438:
  3730. /***/ ((module) => {
  3731. module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
  3732. /***/ }),
  3733. /***/ 5867:
  3734. /***/ ((module) => {
  3735. module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
  3736. /***/ }),
  3737. /***/ 9252:
  3738. /***/ ((module) => {
  3739. module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
  3740. /***/ }),
  3741. /***/ 8609:
  3742. /***/ ((module) => {
  3743. module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
  3744. /***/ }),
  3745. /***/ 4672:
  3746. /***/ ((module) => {
  3747. module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
  3748. /***/ }),
  3749. /***/ 9778:
  3750. /***/ ((module) => {
  3751. module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
  3752. /***/ }),
  3753. /***/ 8710:
  3754. /***/ ((module) => {
  3755. module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
  3756. /***/ }),
  3757. /***/ 2434:
  3758. /***/ ((module) => {
  3759. module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
  3760. /***/ }),
  3761. /***/ 439:
  3762. /***/ ((module) => {
  3763. module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
  3764. /***/ }),
  3765. /***/ 8545:
  3766. /***/ ((module) => {
  3767. module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
  3768. /***/ }),
  3769. /***/ 6762:
  3770. /***/ ((module) => {
  3771. module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
  3772. /***/ }),
  3773. /***/ 7639:
  3774. /***/ ((module) => {
  3775. module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
  3776. /***/ }),
  3777. /***/ 431:
  3778. /***/ ((module) => {
  3779. module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
  3780. /***/ }),
  3781. /***/ 6822:
  3782. /***/ ((module) => {
  3783. module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
  3784. /***/ }),
  3785. /***/ 2728:
  3786. /***/ ((module) => {
  3787. module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
  3788. /***/ }),
  3789. /***/ 6823:
  3790. /***/ ((module) => {
  3791. module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
  3792. /***/ }),
  3793. /***/ 1341:
  3794. /***/ ((module) => {
  3795. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3796. /***/ }),
  3797. /***/ 7833:
  3798. /***/ ((module) => {
  3799. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3800. /***/ }),
  3801. /***/ 2352:
  3802. /***/ ((module) => {
  3803. module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
  3804. /***/ }),
  3805. /***/ 7541:
  3806. /***/ ((module) => {
  3807. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
  3808. /***/ }),
  3809. /***/ 4868:
  3810. /***/ ((module) => {
  3811. module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
  3812. /***/ }),
  3813. /***/ 5591:
  3814. /***/ ((module) => {
  3815. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
  3816. /***/ }),
  3817. /***/ 191:
  3818. /***/ ((module) => {
  3819. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3820. /***/ }),
  3821. /***/ 5467:
  3822. /***/ ((module) => {
  3823. module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
  3824. /***/ }),
  3825. /***/ 336:
  3826. /***/ ((module) => {
  3827. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
  3828. /***/ }),
  3829. /***/ 8968:
  3830. /***/ ((module) => {
  3831. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
  3832. /***/ }),
  3833. /***/ 1733:
  3834. /***/ ((module) => {
  3835. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
  3836. /***/ }),
  3837. /***/ 9674:
  3838. /***/ ((module) => {
  3839. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
  3840. /***/ }),
  3841. /***/ 2090:
  3842. /***/ ((module) => {
  3843. module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
  3844. /***/ }),
  3845. /***/ 1855:
  3846. /***/ ((module) => {
  3847. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
  3848. /***/ }),
  3849. /***/ 4824:
  3850. /***/ ((module) => {
  3851. module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
  3852. /***/ }),
  3853. /***/ 2381:
  3854. /***/ ((module) => {
  3855. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3856. /***/ }),
  3857. /***/ 6060:
  3858. /***/ ((module) => {
  3859. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
  3860. /***/ }),
  3861. /***/ 9974:
  3862. /***/ ((module) => {
  3863. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
  3864. /***/ }),
  3865. /***/ 3047:
  3866. /***/ ((module) => {
  3867. module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
  3868. /***/ }),
  3869. /***/ 3266:
  3870. /***/ ((module) => {
  3871. module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
  3872. /***/ }),
  3873. /***/ 8018:
  3874. /***/ ((module) => {
  3875. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
  3876. /***/ }),
  3877. /***/ 3168:
  3878. /***/ ((module) => {
  3879. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
  3880. /***/ }),
  3881. /***/ 3890:
  3882. /***/ ((module) => {
  3883. module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3884. /***/ }),
  3885. /***/ 8647:
  3886. /***/ ((module) => {
  3887. module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
  3888. /***/ }),
  3889. /***/ 4776:
  3890. /***/ ((module) => {
  3891. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
  3892. /***/ }),
  3893. /***/ 2648:
  3894. /***/ ((module) => {
  3895. module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3896. /***/ }),
  3897. /***/ 8825:
  3898. /***/ ((module) => {
  3899. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
  3900. /***/ }),
  3901. /***/ 5693:
  3902. /***/ ((module) => {
  3903. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
  3904. /***/ }),
  3905. /***/ 9280:
  3906. /***/ ((module) => {
  3907. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
  3908. /***/ }),
  3909. /***/ 9108:
  3910. /***/ ((module) => {
  3911. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
  3912. /***/ }),
  3913. /***/ 7137:
  3914. /***/ ((module) => {
  3915. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
  3916. /***/ }),
  3917. /***/ 9739:
  3918. /***/ ((module) => {
  3919. module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
  3920. /***/ }),
  3921. /***/ 8231:
  3922. /***/ ((module) => {
  3923. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
  3924. /***/ }),
  3925. /***/ 2518:
  3926. /***/ ((module) => {
  3927. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
  3928. /***/ }),
  3929. /***/ 8096:
  3930. /***/ ((module) => {
  3931. module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
  3932. /***/ }),
  3933. /***/ 5795:
  3934. /***/ ((module) => {
  3935. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
  3936. /***/ }),
  3937. /***/ 3169:
  3938. /***/ ((module) => {
  3939. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
  3940. /***/ }),
  3941. /***/ 1337:
  3942. /***/ ((module) => {
  3943. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
  3944. /***/ }),
  3945. /***/ 6187:
  3946. /***/ ((module) => {
  3947. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
  3948. /***/ }),
  3949. /***/ 477:
  3950. /***/ ((module) => {
  3951. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
  3952. /***/ }),
  3953. /***/ 4050:
  3954. /***/ ((module) => {
  3955. module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
  3956. /***/ }),
  3957. /***/ 5545:
  3958. /***/ ((module) => {
  3959. module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
  3960. /***/ }),
  3961. /***/ 7113:
  3962. /***/ ((module) => {
  3963. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
  3964. /***/ }),
  3965. /***/ 1202:
  3966. /***/ ((module) => {
  3967. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
  3968. /***/ }),
  3969. /***/ 7971:
  3970. /***/ ((module) => {
  3971. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
  3972. /***/ }),
  3973. /***/ 6122:
  3974. /***/ ((module) => {
  3975. module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
  3976. /***/ }),
  3977. /***/ 371:
  3978. /***/ ((module) => {
  3979. module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
  3980. /***/ }),
  3981. /***/ 7307:
  3982. /***/ ((module) => {
  3983. module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
  3984. /***/ }),
  3985. /***/ 8614:
  3986. /***/ ((module) => {
  3987. module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
  3988. /***/ }),
  3989. /***/ 6271:
  3990. /***/ ((module) => {
  3991. module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
  3992. /***/ }),
  3993. /***/ 3016:
  3994. /***/ ((module) => {
  3995. module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
  3996. /***/ }),
  3997. /***/ 3630:
  3998. /***/ ((module) => {
  3999. module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
  4000. /***/ }),
  4001. /***/ 8508:
  4002. /***/ ((module) => {
  4003. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
  4004. /***/ }),
  4005. /***/ 8073:
  4006. /***/ ((module) => {
  4007. module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
  4008. /***/ }),
  4009. /***/ 3575:
  4010. /***/ ((module) => {
  4011. module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
  4012. f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
  4013. f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
  4014. BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
  4015. AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
  4016. CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
  4017. Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
  4018. AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
  4019. dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
  4020. TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
  4021. X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
  4022. MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
  4023. ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
  4024. PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
  4025. CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
  4026. AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
  4027. gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
  4028. AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
  4029. QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
  4030. AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
  4031. gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
  4032. IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
  4033. gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
  4034. hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
  4035. nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
  4036. AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
  4037. EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
  4038. IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
  4039. AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
  4040. IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
  4041. gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
  4042. AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
  4043. IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
  4044. AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
  4045. AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
  4046. IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
  4047. AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
  4048. QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
  4049. gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
  4050. IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
  4051. OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
  4052. IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
  4053. AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
  4054. APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
  4055. IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
  4056. AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
  4057. IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
  4058. CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
  4059. hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
  4060. IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
  4061. AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
  4062. BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
  4063. QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
  4064. ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
  4065. AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
  4066. BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
  4067. iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
  4068. IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
  4069. AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
  4070. gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
  4071. QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
  4072. QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
  4073. ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
  4074. gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
  4075. NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
  4076. AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
  4077. gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
  4078. IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
  4079. a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
  4080. KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
  4081. QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
  4082. CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
  4083. ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
  4084. QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
  4085. CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
  4086. AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
  4087. QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
  4088. ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
  4089. BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
  4090. AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
  4091. KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
  4092. EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
  4093. DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
  4094. BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
  4095. Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
  4096. DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
  4097. AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
  4098. QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
  4099. IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
  4100. QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
  4101. QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
  4102. IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
  4103. CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
  4104. KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
  4105. IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
  4106. ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
  4107. KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
  4108. AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
  4109. DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
  4110. QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
  4111. QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
  4112. ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
  4113. EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
  4114. SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
  4115. KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
  4116. gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
  4117. ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
  4118. ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
  4119. IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
  4120. IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
  4121. IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
  4122. DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
  4123. A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
  4124. akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
  4125. DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
  4126. u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
  4127. AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
  4128. IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
  4129. IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
  4130. aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
  4131. QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
  4132. KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
  4133. bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
  4134. IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
  4135. IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
  4136. IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
  4137. An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
  4138. DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
  4139. KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
  4140. QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
  4141. BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
  4142. QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
  4143. KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
  4144. ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
  4145. GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
  4146. QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
  4147. ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
  4148. B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
  4149. DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
  4150. BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
  4151. bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
  4152. IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
  4153. DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
  4154. IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
  4155. QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
  4156. FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
  4157. DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
  4158. AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
  4159. AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
  4160. QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
  4161. AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
  4162. EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
  4163. SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
  4164. Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
  4165. ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
  4166. IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
  4167. fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
  4168. IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
  4169. gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
  4170. gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
  4171. CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
  4172. IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
  4173. ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
  4174. DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
  4175. KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
  4176. gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
  4177. ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
  4178. AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
  4179. gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
  4180. aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
  4181. SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
  4182. CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
  4183. CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
  4184. CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
  4185. CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
  4186. AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
  4187. ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
  4188. gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
  4189. QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
  4190. AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
  4191. lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
  4192. kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
  4193. 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
  4194. ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
  4195. NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
  4196. gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
  4197. BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
  4198. AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
  4199. QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
  4200. GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
  4201. C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
  4202. DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
  4203. QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
  4204. IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
  4205. DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
  4206. f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
  4207. gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
  4208. gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
  4209. IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
  4210. AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
  4211. aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
  4212. bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
  4213. IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
  4214. AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
  4215. ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
  4216. gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
  4217. AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
  4218. IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
  4219. BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
  4220. l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
  4221. gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
  4222. AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
  4223. AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
  4224. aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
  4225. ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
  4226. aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
  4227. AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
  4228. FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
  4229. IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
  4230. oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
  4231. AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
  4232. IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
  4233. IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
  4234. 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
  4235. oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
  4236. IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
  4237. PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
  4238. APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
  4239. oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
  4240. GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
  4241. IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
  4242. mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
  4243. BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
  4244. AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
  4245. oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
  4246. oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
  4247. FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
  4248. oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
  4249. IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
  4250. JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
  4251. ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
  4252. AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
  4253. DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
  4254. IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
  4255. ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
  4256. CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
  4257. QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
  4258. IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
  4259. IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
  4260. ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
  4261. AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
  4262. CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
  4263. dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
  4264. KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
  4265. AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
  4266. HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
  4267. BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
  4268. AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
  4269. gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
  4270. QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
  4271. AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
  4272. BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
  4273. IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
  4274. kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
  4275. BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
  4276. IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
  4277. AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
  4278. AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
  4279. QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
  4280. CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
  4281. lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
  4282. dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
  4283. IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
  4284. IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
  4285. C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
  4286. IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
  4287. AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
  4288. KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
  4289. Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4290. ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
  4291. IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
  4292. EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
  4293. IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
  4294. IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
  4295. QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
  4296. IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
  4297. AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
  4298. ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
  4299. CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
  4300. AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
  4301. QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
  4302. kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
  4303. GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
  4304. ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
  4305. OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
  4306. BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
  4307. AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
  4308. IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
  4309. A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
  4310. dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
  4311. KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
  4312. IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
  4313. FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
  4314. GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
  4315. PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
  4316. E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
  4317. IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
  4318. IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
  4319. KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
  4320. Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4321. ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
  4322. gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
  4323. BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
  4324. EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
  4325. gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
  4326. AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
  4327. IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
  4328. AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
  4329. lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
  4330. b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
  4331. ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
  4332. IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
  4333. gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
  4334. ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
  4335. KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
  4336. AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
  4337. dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
  4338. NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
  4339. ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
  4340. DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
  4341. IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
  4342. IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
  4343. GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
  4344. BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
  4345. ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
  4346. gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
  4347. IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
  4348. FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
  4349. ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
  4350. QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
  4351. DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
  4352. dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
  4353. IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
  4354. IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
  4355. IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
  4356. AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
  4357. D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
  4358. EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
  4359. QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
  4360. IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
  4361. IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
  4362. KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
  4363. QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
  4364. GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
  4365. AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
  4366. ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
  4367. DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
  4368. AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
  4369. IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
  4370. zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
  4371. ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
  4372. AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
  4373. IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
  4374. AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
  4375. QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
  4376. B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
  4377. DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
  4378. gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
  4379. KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
  4380. AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
  4381. KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
  4382. HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
  4383. IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
  4384. IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
  4385. HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
  4386. QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
  4387. gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
  4388. QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
  4389. QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
  4390. ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
  4391. ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
  4392. ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
  4393. EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
  4394. KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
  4395. X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
  4396. ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
  4397. QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
  4398. dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
  4399. AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
  4400. AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
  4401. AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
  4402. KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
  4403. koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
  4404. CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
  4405. CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
  4406. IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
  4407. aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
  4408. ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
  4409. IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
  4410. EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
  4411. A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
  4412. A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
  4413. KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
  4414. AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
  4415. lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
  4416. ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
  4417. IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
  4418. aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
  4419. iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
  4420. DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
  4421. AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
  4422. AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
  4423. IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
  4424. lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
  4425. QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
  4426. IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
  4427. Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
  4428. BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
  4429. A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
  4430. ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
  4431. IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
  4432. taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
  4433. NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
  4434. gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
  4435. Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
  4436. DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
  4437. aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
  4438. ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
  4439. C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
  4440. dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
  4441. IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
  4442. dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
  4443. NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
  4444. YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
  4445. IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
  4446. bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
  4447. AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
  4448. b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
  4449. QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
  4450. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
  4451. dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
  4452. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
  4453. dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
  4454. NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
  4455. ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
  4456. YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
  4457. aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
  4458. MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
  4459. IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
  4460. dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
  4461. ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
  4462. IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
  4463. cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
  4464. AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
  4465. AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
  4466. OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
  4467. MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
  4468. MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
  4469. IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
  4470. cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
  4471. aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
  4472. LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
  4473. ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
  4474. bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
  4475. dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
  4476. MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
  4477. YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
  4478. YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
  4479. cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
  4480. b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
  4481. AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
  4482. NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
  4483. bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
  4484. aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
  4485. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
  4486. `
  4487. /***/ })
  4488. /******/ });
  4489. /************************************************************************/
  4490. /******/ // The module cache
  4491. /******/ var __webpack_module_cache__ = {};
  4492. /******/
  4493. /******/ // The require function
  4494. /******/ function __nested_webpack_require_314174__(moduleId) {
  4495. /******/ // Check if module is in cache
  4496. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  4497. /******/ if (cachedModule !== undefined) {
  4498. /******/ return cachedModule.exports;
  4499. /******/ }
  4500. /******/ // Create a new module (and put it into the cache)
  4501. /******/ var module = __webpack_module_cache__[moduleId] = {
  4502. /******/ // no module.id needed
  4503. /******/ // no module.loaded needed
  4504. /******/ exports: {}
  4505. /******/ };
  4506. /******/
  4507. /******/ // Execute the module function
  4508. /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_314174__);
  4509. /******/
  4510. /******/ // Return the exports of the module
  4511. /******/ return module.exports;
  4512. /******/ }
  4513. /******/
  4514. /************************************************************************/
  4515. /******/ /* webpack/runtime/define property getters */
  4516. /******/ (() => {
  4517. /******/ // define getter functions for harmony exports
  4518. /******/ __nested_webpack_require_314174__.d = (exports, definition) => {
  4519. /******/ for(var key in definition) {
  4520. /******/ if(__nested_webpack_require_314174__.o(definition, key) && !__nested_webpack_require_314174__.o(exports, key)) {
  4521. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  4522. /******/ }
  4523. /******/ }
  4524. /******/ };
  4525. /******/ })();
  4526. /******/
  4527. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  4528. /******/ (() => {
  4529. /******/ __nested_webpack_require_314174__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  4530. /******/ })();
  4531. /******/
  4532. /******/ /* webpack/runtime/make namespace object */
  4533. /******/ (() => {
  4534. /******/ // define __esModule on exports
  4535. /******/ __nested_webpack_require_314174__.r = (exports) => {
  4536. /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
  4537. /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
  4538. /******/ }
  4539. /******/ Object.defineProperty(exports, '__esModule', { value: true });
  4540. /******/ };
  4541. /******/ })();
  4542. /******/
  4543. /************************************************************************/
  4544. var __nested_webpack_exports__ = {};
  4545. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  4546. (() => {
  4547. "use strict";
  4548. // EXPORTS
  4549. __nested_webpack_require_314174__.d(__nested_webpack_exports__, {
  4550. "default": () => (/* binding */ Speedy)
  4551. });
  4552. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  4553. var speedy_gl = __nested_webpack_require_314174__(1001);
  4554. // EXTERNAL MODULE: ./src/utils/utils.js
  4555. var utils = __nested_webpack_require_314174__(9037);
  4556. // EXTERNAL MODULE: ./src/core/settings.js
  4557. var settings = __nested_webpack_require_314174__(2199);
  4558. // EXTERNAL MODULE: ./src/core/speedy-promise.js
  4559. var speedy_promise = __nested_webpack_require_314174__(9192);
  4560. ;// CONCATENATED MODULE: ./src/utils/asap.js
  4561. /*
  4562. * speedy-vision.js
  4563. * GPU-accelerated Computer Vision for JavaScript
  4564. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4565. *
  4566. * Licensed under the Apache License, Version 2.0 (the "License");
  4567. * you may not use this file except in compliance with the License.
  4568. * You may obtain a copy of the License at
  4569. *
  4570. * http://www.apache.org/licenses/LICENSE-2.0
  4571. *
  4572. * Unless required by applicable law or agreed to in writing, software
  4573. * distributed under the License is distributed on an "AS IS" BASIS,
  4574. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4575. * See the License for the specific language governing permissions and
  4576. * limitations under the License.
  4577. *
  4578. * asap.js
  4579. * Schedule a function to run "as soon as possible"
  4580. */
  4581. /** callbacks */
  4582. const callbacks = /** @type {Function[]} */[];
  4583. /** arguments to be passed to the callbacks */
  4584. const args = /** @type {any[][]} */[];
  4585. /** asap key */
  4586. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  4587. // Register an event listener
  4588. window.addEventListener('message', event => {
  4589. if (event.source !== window || event.data !== ASAP_KEY) return;
  4590. event.stopPropagation();
  4591. if (callbacks.length == 0) return;
  4592. const fn = callbacks.pop();
  4593. const argArray = args.pop();
  4594. fn.apply(undefined, argArray);
  4595. }, true);
  4596. /**
  4597. * Schedule a function to run "as soon as possible"
  4598. * @param {Function} fn callback
  4599. * @param {any[]} params optional parameters
  4600. */
  4601. function asap(fn, ...params) {
  4602. callbacks.unshift(fn);
  4603. args.unshift(params);
  4604. window.postMessage(ASAP_KEY, '*');
  4605. }
  4606. // EXTERNAL MODULE: ./src/utils/errors.js
  4607. var utils_errors = __nested_webpack_require_314174__(8581);
  4608. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
  4609. /*
  4610. * speedy-vision.js
  4611. * GPU-accelerated Computer Vision for JavaScript
  4612. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4613. *
  4614. * Licensed under the Apache License, Version 2.0 (the "License");
  4615. * you may not use this file except in compliance with the License.
  4616. * You may obtain a copy of the License at
  4617. *
  4618. * http://www.apache.org/licenses/LICENSE-2.0
  4619. *
  4620. * Unless required by applicable law or agreed to in writing, software
  4621. * distributed under the License is distributed on an "AS IS" BASIS,
  4622. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4623. * See the License for the specific language governing permissions and
  4624. * limitations under the License.
  4625. *
  4626. * speedy-texture-reader.js
  4627. * Reads data from textures
  4628. */
  4629. /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
  4630. const DEFAULT_NUMBER_OF_BUFFERS = 2;
  4631. /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
  4632. const runOnNextFrame = navigator.userAgent.includes('Firefox') ? (fn, ...args) => setTimeout(fn, 10, ...args) :
  4633. // RAF produces a warning on Firefox
  4634. (fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args)); // reduce battery usage
  4635. /**
  4636. * Reads data from textures
  4637. */
  4638. class SpeedyTextureReader {
  4639. /**
  4640. * Constructor
  4641. * @param {number} [numberOfBuffers]
  4642. */
  4643. constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS) {
  4644. utils/* Utils */.A.assert(numberOfBuffers > 0);
  4645. /** @type {boolean} is this object initialized? */
  4646. this._initialized = false;
  4647. /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
  4648. this._pixelBuffer = new Array(numberOfBuffers).fill(null).map(() => new Uint8Array(0));
  4649. /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
  4650. this._pbo = new Array(numberOfBuffers).fill(null);
  4651. /** @type {number} the index of the buffer that will be consumed in this frame */
  4652. this._consumerIndex = 0;
  4653. /** @type {number} the index of the buffer that will be produced next */
  4654. this._producerIndex = numberOfBuffers - 1;
  4655. /** @type {SpeedyPromise<void>[]} producer-consumer promises */
  4656. this._promise = Array.from({
  4657. length: numberOfBuffers
  4658. }, () => speedy_promise/* SpeedyPromise */.i.resolve());
  4659. /** @type {boolean[]} are the contents of the ith buffer being produced? */
  4660. this._busy = new Array(numberOfBuffers).fill(false);
  4661. /** @type {boolean[]} can the ith buffer be consumed? */
  4662. this._ready = new Array(numberOfBuffers).fill(true);
  4663. }
  4664. /**
  4665. * Initialize this object
  4666. * @param {SpeedyGPU} gpu
  4667. */
  4668. init(gpu) {
  4669. this._allocatePBOs(gpu);
  4670. gpu.subscribe(this._allocatePBOs, this, gpu);
  4671. this._initialized = true;
  4672. }
  4673. /**
  4674. * Release resources
  4675. * @param {SpeedyGPU} gpu
  4676. * @returns {null}
  4677. */
  4678. release(gpu) {
  4679. gpu.unsubscribe(this._allocatePBOs, this);
  4680. this._deallocatePBOs(gpu);
  4681. this._initialized = false;
  4682. return null;
  4683. }
  4684. /**
  4685. * Read pixels from a texture, synchronously.
  4686. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4687. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4688. * @param {number} [x]
  4689. * @param {number} [y]
  4690. * @param {number} [width]
  4691. * @param {number} [height]
  4692. * @returns {Uint8Array} pixels in the RGBA format
  4693. */
  4694. readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height) {
  4695. utils/* Utils */.A.assert(this._initialized);
  4696. const gl = texture.gl;
  4697. const fbo = texture.glFbo;
  4698. // clamp values
  4699. width = Math.max(0, Math.min(width, texture.width));
  4700. height = Math.max(0, Math.min(height, texture.height));
  4701. x = Math.max(0, Math.min(x, texture.width - width));
  4702. y = Math.max(0, Math.min(y, texture.height - height));
  4703. // buffer allocation
  4704. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4705. this._reallocate(sizeofBuffer);
  4706. // lost context?
  4707. if (gl.isContextLost()) return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4708. // read pixels
  4709. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4710. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
  4711. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4712. // done!
  4713. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4714. }
  4715. /**
  4716. * Read pixels from a texture, asynchronously, with PBOs.
  4717. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4718. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4719. * @param {number} [x]
  4720. * @param {number} [y]
  4721. * @param {number} [width]
  4722. * @param {number} [height]
  4723. * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
  4724. * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
  4725. */
  4726. readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false) {
  4727. utils/* Utils */.A.assert(this._initialized);
  4728. const gl = texture.gl;
  4729. const fbo = texture.glFbo;
  4730. // clamp values
  4731. width = Math.max(0, Math.min(width, texture.width));
  4732. height = Math.max(0, Math.min(height, texture.height));
  4733. x = Math.max(0, Math.min(x, texture.width - width));
  4734. y = Math.max(0, Math.min(y, texture.height - height));
  4735. // buffer allocation
  4736. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4737. this._reallocate(sizeofBuffer);
  4738. // lost context?
  4739. if (gl.isContextLost()) return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
  4740. // do not optimize?
  4741. if (!useBufferedDownloads) {
  4742. const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4743. return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() => pixelBuffer);
  4744. }
  4745. // Hide latency with a Producer-Consumer mechanism
  4746. const numberOfBuffers = this._pixelBuffer.length;
  4747. // GPU needs to produce data
  4748. const producerIndex = this._producerIndex;
  4749. if (!this._busy[producerIndex]) {
  4750. const pbo = this._pbo[producerIndex];
  4751. const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
  4752. this._producerIndex = (producerIndex + 1) % numberOfBuffers;
  4753. this._ready[producerIndex] = false;
  4754. this._busy[producerIndex] = true;
  4755. //console.time("produce "+producerIndex);
  4756. this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
  4757. //console.timeEnd("produce "+producerIndex);
  4758. this._busy[producerIndex] = false;
  4759. this._ready[producerIndex] = true;
  4760. });
  4761. }
  4762. //else console.log("skip",producerIndex);
  4763. else /* skip frame */;
  4764. // CPU needs to consume data
  4765. const consumerIndex = this._consumerIndex;
  4766. this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
  4767. if (!this._ready[consumerIndex]) {
  4768. //console.time("consume "+consumerIndex);
  4769. return this._promise[consumerIndex].then(() => {
  4770. //console.timeEnd("consume "+consumerIndex);
  4771. this._ready[consumerIndex] = false;
  4772. return this._pixelBuffer[consumerIndex];
  4773. });
  4774. }
  4775. //console.log("NO WAIT "+consumerIndex);
  4776. this._ready[consumerIndex] = false;
  4777. return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
  4778. }
  4779. /**
  4780. * Reallocate the pixel buffers, so that they can hold the required number of bytes
  4781. * If the pixel buffers already have the required capacity, then nothing is done
  4782. * @param {number} size in bytes
  4783. */
  4784. _reallocate(size) {
  4785. // no need to reallocate
  4786. if (size <= this._pixelBuffer[0].byteLength) return;
  4787. // reallocate
  4788. for (let i = 0; i < this._pixelBuffer.length; i++) {
  4789. const newBuffer = new Uint8Array(size);
  4790. //newBuffer.set(this._pixelBuffer[i]); // make this optional?
  4791. this._pixelBuffer[i] = newBuffer;
  4792. }
  4793. }
  4794. /**
  4795. * Allocate PBOs
  4796. * @param {SpeedyGPU} gpu
  4797. */
  4798. _allocatePBOs(gpu) {
  4799. const gl = gpu.gl;
  4800. for (let i = 0; i < this._pbo.length; i++) this._pbo[i] = gl.createBuffer();
  4801. }
  4802. /**
  4803. * Deallocate PBOs
  4804. * @param {SpeedyGPU} gpu
  4805. */
  4806. _deallocatePBOs(gpu) {
  4807. const gl = gpu.gl;
  4808. for (let i = this._pbo.length - 1; i >= 0; i--) {
  4809. gl.deleteBuffer(this._pbo[i]);
  4810. this._pbo[i] = null;
  4811. }
  4812. }
  4813. /**
  4814. * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
  4815. * It's assumed that the target texture is in the RGBA8 format
  4816. * @param {WebGL2RenderingContext} gl
  4817. * @param {WebGLBuffer} pbo
  4818. * @param {Uint8Array} outputBuffer with size >= width * height * 4
  4819. * @param {WebGLFramebuffer} fbo
  4820. * @param {GLint} x
  4821. * @param {GLint} y
  4822. * @param {GLsizei} width
  4823. * @param {GLsizei} height
  4824. * @returns {SpeedyPromise<void>}
  4825. */
  4826. static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height) {
  4827. /*
  4828. When testing Speedy on Chrome (mobile) using about:tracing with the
  4829. --enable-gpu-service-tracing flag, I found that A LOT of time is spent
  4830. in TraceGLAPI::glMapBufferRange, which takes place just after
  4831. GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
  4832. Using multiple PBOs doesn't seem to impact Chrome too much. Performance
  4833. is much better on Firefox. This suggests there is room for improvement.
  4834. I do not yet understand clearly the cause for this lag on Chrome. It
  4835. may be a CPU-GPU synchronization issue.
  4836. EDIT: I have found that using gl.flush() aggressively greatly improves
  4837. things. WebGL commands will be pushed frequently!
  4838. See also:
  4839. https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
  4840. https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
  4841. */
  4842. const size = width * height * 4;
  4843. // validate outputBuffer
  4844. utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
  4845. // read pixels into the PBO
  4846. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4847. gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
  4848. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4849. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
  4850. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4851. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4852. // create a fence
  4853. const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
  4854. gl.flush(); // make sure the sync command is read
  4855. // wait for the commands to be processed by the GPU
  4856. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  4857. // according to the WebGL2 spec sec 3.7.14 Sync objects,
  4858. // "sync objects may only transition to the signaled state
  4859. // when the user agent's event loop is not executing a task"
  4860. // in other words, it won't be signaled in the same frame
  4861. if (settings/* Settings */.w.gpuPollingMode != 'asap') runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);else asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  4862. }).then(() => {
  4863. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4864. gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
  4865. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4866. }).catch(err => {
  4867. throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
  4868. }).finally(() => {
  4869. gl.deleteSync(sync);
  4870. });
  4871. }
  4872. /**
  4873. * Waits for a sync object to become signaled
  4874. * @param {WebGL2RenderingContext} gl
  4875. * @param {WebGLSync} sync
  4876. * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
  4877. * @param {Function} resolve
  4878. * @param {Function} reject
  4879. * @param {number} [pollInterval] in milliseconds
  4880. * @param {number} [remainingAttempts] for timeout
  4881. */
  4882. static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000) {
  4883. (function poll() {
  4884. const status = gl.clientWaitSync(sync, flags, 0);
  4885. if (remainingAttempts-- <= 0) {
  4886. reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
  4887. } else if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
  4888. resolve();
  4889. } else {
  4890. //setTimeout(poll, pollInterval);
  4891. if (settings/* Settings */.w.gpuPollingMode != 'asap') requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
  4892. else asap(poll);
  4893. }
  4894. })();
  4895. }
  4896. }
  4897. // EXTERNAL MODULE: ./src/utils/globals.js
  4898. var globals = __nested_webpack_require_314174__(3816);
  4899. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
  4900. /*
  4901. * speedy-vision.js
  4902. * GPU-accelerated Computer Vision for JavaScript
  4903. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4904. *
  4905. * Licensed under the Apache License, Version 2.0 (the "License");
  4906. * you may not use this file except in compliance with the License.
  4907. * You may obtain a copy of the License at
  4908. *
  4909. * http://www.apache.org/licenses/LICENSE-2.0
  4910. *
  4911. * Unless required by applicable law or agreed to in writing, software
  4912. * distributed under the License is distributed on an "AS IS" BASIS,
  4913. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4914. * See the License for the specific language governing permissions and
  4915. * limitations under the License.
  4916. *
  4917. * speedy-texture.js
  4918. * A wrapper around WebGLTexture
  4919. */
  4920. /**
  4921. * Get a buffer filled with zeros
  4922. * @param {number} size number of bytes
  4923. * @returns {Uint8Array}
  4924. */
  4925. /*
  4926. const zeros = (function() {
  4927. let buffer = new Uint8Array(4);
  4928. return function(size) {
  4929. if(size > buffer.length)
  4930. buffer = new Uint8Array(size);
  4931. return buffer.subarray(0, size);
  4932. }
  4933. })();
  4934. */
  4935. /**
  4936. * A wrapper around WebGLTexture
  4937. */
  4938. class SpeedyTexture {
  4939. /**
  4940. * Constructor
  4941. * @param {WebGL2RenderingContext} gl
  4942. * @param {number} width texture width in pixels
  4943. * @param {number} height texture height in pixels
  4944. * @param {number} [format]
  4945. * @param {number} [internalFormat]
  4946. * @param {number} [dataType]
  4947. * @param {number} [filter]
  4948. * @param {number} [wrap]
  4949. */
  4950. constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT) {
  4951. /** @type {WebGL2RenderingContext} rendering context */
  4952. this._gl = gl;
  4953. /** @type {number} width of the texture */
  4954. this._width = Math.max(1, width | 0);
  4955. /** @type {number} height of the texture */
  4956. this._height = Math.max(1, height | 0);
  4957. /** @type {boolean} have we generated mipmaps for this texture? */
  4958. this._hasMipmaps = false;
  4959. /** @type {number} texture format */
  4960. this._format = format;
  4961. /** @type {number} internal format (usually a sized format) */
  4962. this._internalFormat = internalFormat;
  4963. /** @type {number} data type */
  4964. this._dataType = dataType;
  4965. /** @type {number} texture filtering (min & mag) */
  4966. this._filter = filter;
  4967. /** @type {number} texture wrapping */
  4968. this._wrap = wrap;
  4969. /** @type {WebGLTexture} internal texture object */
  4970. this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
  4971. }
  4972. /**
  4973. * Releases the texture
  4974. * @returns {null}
  4975. */
  4976. release() {
  4977. const gl = this._gl;
  4978. // already released?
  4979. if (this._glTexture == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
  4980. // release resources
  4981. this.discardMipmaps();
  4982. gl.deleteTexture(this._glTexture);
  4983. this._glTexture = null;
  4984. this._width = this._height = 0;
  4985. // done!
  4986. return null;
  4987. }
  4988. /**
  4989. * Upload pixel data to the texture. The texture will be resized if needed.
  4990. * @param {TexImageSource} data
  4991. * @param {number} [width] in pixels
  4992. * @param {number} [height] in pixels
  4993. * @return {SpeedyTexture} this
  4994. */
  4995. upload(data, width = this._width, height = this._height) {
  4996. const gl = this._gl;
  4997. // bugfix: if the media is a video, we can't really
  4998. // upload it to the GPU unless it's ready
  4999. if (data instanceof HTMLVideoElement) {
  5000. if (data.readyState < 2) {
  5001. // this may happen when the video loops (Firefox)
  5002. // keep the previously uploaded texture
  5003. //Utils.warning(`Trying to process a video that isn't ready yet`);
  5004. return this;
  5005. }
  5006. }
  5007. utils/* Utils */.A.assert(width > 0 && height > 0);
  5008. this.discardMipmaps();
  5009. this._width = width;
  5010. this._height = height;
  5011. this._internalFormat = gl.RGBA8;
  5012. this._format = gl.RGBA;
  5013. this._dataType = gl.UNSIGNED_BYTE;
  5014. SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
  5015. return this;
  5016. }
  5017. /**
  5018. * Clear the texture
  5019. * @returns {this}
  5020. */
  5021. clear() {
  5022. const gl = this._gl;
  5023. // context loss?
  5024. if (gl.isContextLost()) return this;
  5025. // clear texture data
  5026. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5027. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5028. gl.bindTexture(gl.TEXTURE_2D, null);
  5029. // no mipmaps
  5030. this.discardMipmaps();
  5031. // done!
  5032. return this;
  5033. }
  5034. /**
  5035. * Resize this texture. Its content will be lost!
  5036. * @param {number} width new width, in pixels
  5037. * @param {number} height new height, in pixels
  5038. * @returns {this}
  5039. */
  5040. resize(width, height) {
  5041. const gl = this._gl;
  5042. // no need to resize?
  5043. if (this._width === width && this._height === height) return this;
  5044. // validate size
  5045. width |= 0;
  5046. height |= 0;
  5047. if (width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH) throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);else if (width < 1 || height < 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
  5048. // context loss?
  5049. if (gl.isContextLost()) return this;
  5050. // update dimensions
  5051. this._width = width;
  5052. this._height = height;
  5053. // resize
  5054. // Note: this is fast on Chrome, but seems slow on Firefox
  5055. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5056. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5057. gl.bindTexture(gl.TEXTURE_2D, null);
  5058. // no mipmaps
  5059. this.discardMipmaps();
  5060. // done!
  5061. return this;
  5062. }
  5063. /**
  5064. * Generate mipmap
  5065. * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
  5066. * @returns {SpeedyTexture} this
  5067. */
  5068. generateMipmaps(mipmap = []) {
  5069. const gl = this._gl;
  5070. // nothing to do
  5071. if (this._hasMipmaps) return this;
  5072. // let the hardware compute the all levels of the pyramid, up to 1x1
  5073. // we also specify the TEXTURE_MIN_FILTER to be used from now on
  5074. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5075. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
  5076. gl.generateMipmap(gl.TEXTURE_2D);
  5077. gl.bindTexture(gl.TEXTURE_2D, null);
  5078. // accept custom textures
  5079. if (mipmap.length > 0) {
  5080. // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  5081. const width = this.width,
  5082. height = this.height;
  5083. const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
  5084. utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
  5085. // verify the dimensions of each level
  5086. for (let level = 1; level < mipmap.length; level++) {
  5087. // use max(1, floor(size / 2^lod)), in accordance to
  5088. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  5089. const w = Math.max(1, width >>> level);
  5090. const h = Math.max(1, height >>> level);
  5091. // verify the dimensions of this level
  5092. utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
  5093. // copy to mipmap
  5094. mipmap[level].copyTo(this, level);
  5095. }
  5096. }
  5097. // done!
  5098. this._hasMipmaps = true;
  5099. return this;
  5100. }
  5101. /**
  5102. * Invalidates previously generated mipmap, if any
  5103. */
  5104. discardMipmaps() {
  5105. const gl = this._gl;
  5106. // nothing to do
  5107. if (!this._hasMipmaps) return;
  5108. // reset the min filter
  5109. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5110. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
  5111. gl.bindTexture(gl.TEXTURE_2D, null);
  5112. // done!
  5113. this._hasMipmaps = false;
  5114. }
  5115. /**
  5116. * Does this texture have a mipmap?
  5117. * @returns {boolean}
  5118. */
  5119. hasMipmaps() {
  5120. return this._hasMipmaps;
  5121. }
  5122. /**
  5123. * Has this texture been released?
  5124. * @returns {boolean}
  5125. */
  5126. isReleased() {
  5127. return this._glTexture == null;
  5128. }
  5129. /**
  5130. * The internal WebGLTexture
  5131. * @returns {WebGLTexture}
  5132. */
  5133. get glTexture() {
  5134. return this._glTexture;
  5135. }
  5136. /**
  5137. * The width of the texture, in pixels
  5138. * @returns {number}
  5139. */
  5140. get width() {
  5141. return this._width;
  5142. }
  5143. /**
  5144. * The height of the texture, in pixels
  5145. * @returns {number}
  5146. */
  5147. get height() {
  5148. return this._height;
  5149. }
  5150. /**
  5151. * The WebGL Context
  5152. * @returns {WebGL2RenderingContext}
  5153. */
  5154. get gl() {
  5155. return this._gl;
  5156. }
  5157. /**
  5158. * Create a WebGL texture
  5159. * @param {WebGL2RenderingContext} gl
  5160. * @param {number} width in pixels
  5161. * @param {number} height in pixels
  5162. * @param {number} format usually gl.RGBA
  5163. * @param {number} internalFormat usually gl.RGBA8
  5164. * @param {number} dataType usually gl.UNSIGNED_BYTE
  5165. * @param {number} filter usually gl.NEAREST or gl.LINEAR
  5166. * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
  5167. * @returns {WebGLTexture}
  5168. */
  5169. static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap) {
  5170. utils/* Utils */.A.assert(width > 0 && height > 0);
  5171. // create & bind texture
  5172. const texture = gl.createTexture();
  5173. gl.bindTexture(gl.TEXTURE_2D, texture);
  5174. // setup
  5175. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
  5176. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
  5177. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
  5178. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
  5179. //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
  5180. gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
  5181. // unbind & return
  5182. gl.bindTexture(gl.TEXTURE_2D, null);
  5183. return texture;
  5184. }
  5185. /**
  5186. * Upload pixel data to a WebGL texture
  5187. * @param {WebGL2RenderingContext} gl
  5188. * @param {WebGLTexture} texture
  5189. * @param {GLsizei} width texture width
  5190. * @param {GLsizei} height texture height
  5191. * @param {TexImageSource} pixels
  5192. * @param {GLint} lod mipmap level-of-detail
  5193. * @param {number} format
  5194. * @param {number} internalFormat
  5195. * @param {number} dataType
  5196. * @returns {WebGLTexture} texture
  5197. */
  5198. static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType) {
  5199. // Prefer calling _upload() before gl.useProgram() to avoid the
  5200. // needless switching of GL programs internally. See also:
  5201. // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  5202. gl.bindTexture(gl.TEXTURE_2D, texture);
  5203. /*
  5204. // slower than texImage2D, unlike the spec?
  5205. gl.texSubImage2D(gl.TEXTURE_2D, // target
  5206. lod, // mip level
  5207. 0, // x-offset
  5208. 0, // y-offset
  5209. width, // texture width
  5210. height, // texture height
  5211. gl.RGBA, // source format
  5212. gl.UNSIGNED_BYTE, // source type
  5213. pixels); // source data
  5214. */
  5215. gl.texImage2D(gl.TEXTURE_2D,
  5216. // target
  5217. lod,
  5218. // mip level
  5219. internalFormat,
  5220. // internal format
  5221. width,
  5222. // texture width
  5223. height,
  5224. // texture height
  5225. 0,
  5226. // border
  5227. format,
  5228. // source format
  5229. dataType,
  5230. // source type
  5231. pixels); // source data
  5232. gl.bindTexture(gl.TEXTURE_2D, null);
  5233. return texture;
  5234. }
  5235. }
  5236. /**
  5237. * A SpeedyTexture with a framebuffer
  5238. */
  5239. class SpeedyDrawableTexture extends SpeedyTexture {
  5240. /**
  5241. * Constructor
  5242. * @param {WebGL2RenderingContext} gl
  5243. * @param {number} width texture width in pixels
  5244. * @param {number} height texture height in pixels
  5245. * @param {number} [format]
  5246. * @param {number} [internalFormat]
  5247. * @param {number} [dataType]
  5248. * @param {number} [filter]
  5249. * @param {number} [wrap]
  5250. */
  5251. constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined) {
  5252. super(gl, width, height, format, internalFormat, dataType, filter, wrap);
  5253. /** @type {WebGLFramebuffer} framebuffer */
  5254. this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
  5255. }
  5256. /**
  5257. * Releases the texture
  5258. * @returns {null}
  5259. */
  5260. release() {
  5261. const gl = this._gl;
  5262. // already released?
  5263. if (this._glFbo == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
  5264. // release the framebuffer
  5265. gl.deleteFramebuffer(this._glFbo);
  5266. this._glFbo = null;
  5267. // release the SpeedyTexture
  5268. return super.release();
  5269. }
  5270. /**
  5271. * The internal WebGLFramebuffer
  5272. * @returns {WebGLFramebuffer}
  5273. */
  5274. get glFbo() {
  5275. return this._glFbo;
  5276. }
  5277. /**
  5278. * Copy this texture into another
  5279. * (you may have to discard the mipmaps after calling this function)
  5280. * @param {SpeedyTexture} texture target texture
  5281. * @param {number} [lod] level-of-detail of the target texture
  5282. */
  5283. copyTo(texture, lod = 0) {
  5284. const gl = this._gl;
  5285. // context loss?
  5286. if (gl.isContextLost()) return;
  5287. // compute texture size as max(1, floor(size / 2^lod)),
  5288. // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
  5289. // (Mipmapping)
  5290. const pot = 1 << (lod |= 0);
  5291. const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
  5292. const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
  5293. // validate
  5294. utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
  5295. // copy to texture
  5296. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
  5297. }
  5298. /*
  5299. * Resize this texture
  5300. * @param {number} width new width, in pixels
  5301. * @param {number} height new height, in pixels
  5302. * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
  5303. * @returns {this}
  5304. */
  5305. /*resize(width, height, preserveContent = false)
  5306. {
  5307. const gl = this._gl;
  5308. // no need to preserve the content?
  5309. if(!preserveContent)
  5310. return super.resize(width, height);
  5311. // no need to resize?
  5312. if(this._width === width && this._height === height)
  5313. return this;
  5314. // validate size
  5315. width |= 0; height |= 0;
  5316. Utils.assert(width > 0 && height > 0);
  5317. // context loss?
  5318. if(gl.isContextLost())
  5319. return this;
  5320. // allocate new texture
  5321. const newTexture = SpeedyTexture._createTexture(gl, width, height);
  5322. // initialize the new texture with zeros to avoid a
  5323. // warning when calling copyTexSubImage2D() on Firefox
  5324. // this may not be very efficient?
  5325. SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
  5326. // copy the old texture to the new one
  5327. const oldWidth = this._width, oldHeight = this._height;
  5328. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
  5329. // bind FBO
  5330. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5331. // invalidate old data (is this needed?)
  5332. gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
  5333. // attach the new texture to the existing framebuffer
  5334. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5335. gl.COLOR_ATTACHMENT0, // color buffer
  5336. gl.TEXTURE_2D, // tex target
  5337. newTexture, // texture
  5338. 0); // mipmap level
  5339. // unbind FBO
  5340. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5341. // release the old texture and replace it
  5342. gl.deleteTexture(this._glTexture);
  5343. this._glTexture = newTexture;
  5344. // update dimensions & discard mipmaps
  5345. this.discardMipmaps();
  5346. this._width = width;
  5347. this._height = height;
  5348. // done!
  5349. return this;
  5350. }
  5351. */
  5352. /**
  5353. * Clear the texture
  5354. * @returns {this}
  5355. */
  5356. clear() {
  5357. //
  5358. // When we pass null to texImage2D(), it seems that Firefox
  5359. // doesn't clear the texture. Instead, it displays this warning:
  5360. //
  5361. // "WebGL warning: drawArraysInstanced:
  5362. // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
  5363. //
  5364. // Here is a workaround:
  5365. //
  5366. return this.clearToColor(0, 0, 0, 0);
  5367. }
  5368. /**
  5369. * Clear the texture to a color
  5370. * @param {number} r red component, a value in [0,1]
  5371. * @param {number} g green component, a value in [0,1]
  5372. * @param {number} b blue component, a value in [0,1]
  5373. * @param {number} a alpha component, a value in [0,1]
  5374. * @returns {this}
  5375. */
  5376. clearToColor(r, g, b, a) {
  5377. const gl = this._gl;
  5378. // context loss?
  5379. if (gl.isContextLost()) return this;
  5380. // clamp parameters
  5381. r = Math.max(0.0, Math.min(+r, 1.0));
  5382. g = Math.max(0.0, Math.min(+g, 1.0));
  5383. b = Math.max(0.0, Math.min(+b, 1.0));
  5384. a = Math.max(0.0, Math.min(+a, 1.0));
  5385. // discard mipmaps, if any
  5386. this.discardMipmaps();
  5387. // clear the texture
  5388. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5389. gl.viewport(0, 0, this._width, this._height);
  5390. gl.clearColor(r, g, b, a);
  5391. gl.clear(gl.COLOR_BUFFER_BIT);
  5392. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5393. // done!
  5394. return this;
  5395. }
  5396. /**
  5397. * Inspect the pixels of the texture for debugging purposes
  5398. * @param {SpeedyGPU} gpu
  5399. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5400. * @returns {Uint8Array}
  5401. */
  5402. inspect(gpu, textureReader) {
  5403. if (textureReader === undefined) {
  5404. textureReader = new SpeedyTextureReader();
  5405. textureReader.init(gpu);
  5406. const pixels = textureReader.readPixelsSync(this);
  5407. textureReader.release(gpu);
  5408. return new Uint8Array(pixels); // copy the array
  5409. } else {
  5410. const pixels = textureReader.readPixelsSync(this);
  5411. return new Uint8Array(pixels);
  5412. }
  5413. }
  5414. /**
  5415. * Inspect the pixels of the texture as unsigned 32-bit integers
  5416. * @param {SpeedyGPU} gpu
  5417. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5418. * @returns {Uint32Array}
  5419. */
  5420. inspect32(gpu, textureReader) {
  5421. utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
  5422. return new Uint32Array(this.inspect(gpu, textureReader).buffer);
  5423. }
  5424. /**
  5425. * Create a FBO associated with an existing texture
  5426. * @param {WebGL2RenderingContext} gl
  5427. * @param {WebGLTexture} texture
  5428. * @returns {WebGLFramebuffer}
  5429. */
  5430. static _createFramebuffer(gl, texture) {
  5431. const fbo = gl.createFramebuffer();
  5432. // setup framebuffer
  5433. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5434. gl.framebufferTexture2D(gl.FRAMEBUFFER,
  5435. // target
  5436. gl.COLOR_ATTACHMENT0,
  5437. // color buffer
  5438. gl.TEXTURE_2D,
  5439. // tex target
  5440. texture,
  5441. // texture
  5442. 0); // mipmap level
  5443. // check for errors
  5444. const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
  5445. if (status != gl.FRAMEBUFFER_COMPLETE) {
  5446. const error = (() => ['FRAMEBUFFER_UNSUPPORTED', 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS', 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'].filter(err => gl[err] === status)[0] || 'unknown error')();
  5447. throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
  5448. }
  5449. // unbind & return
  5450. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5451. return fbo;
  5452. }
  5453. /**
  5454. * Copy data from a framebuffer to a texture
  5455. * @param {WebGL2RenderingContext} gl
  5456. * @param {WebGLFramebuffer} fbo we'll read the data from this
  5457. * @param {WebGLTexture} texture destination texture
  5458. * @param {GLint} x xpos (where to start copying)
  5459. * @param {GLint} y ypos (where to start copying)
  5460. * @param {GLsizei} width width of the texture
  5461. * @param {GLsizei} height height of the texture
  5462. * @param {GLint} [lod] mipmap level-of-detail
  5463. * @returns {WebGLTexture} texture
  5464. */
  5465. static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0) {
  5466. //gl.activeTexture(gl.TEXTURE0);
  5467. gl.bindTexture(gl.TEXTURE_2D, texture);
  5468. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5469. gl.copyTexSubImage2D(gl.TEXTURE_2D,
  5470. // target
  5471. lod,
  5472. // mipmap level
  5473. 0,
  5474. // xoffset
  5475. 0,
  5476. // yoffset
  5477. x,
  5478. // xpos (where to start copying)
  5479. y,
  5480. // ypos (where to start copying)
  5481. width,
  5482. // width of the texture
  5483. height // height of the texture
  5484. );
  5485. /*
  5486. gl.copyTexImage2D(
  5487. gl.TEXTURE_2D, // target
  5488. lod, // mipmap level
  5489. gl.RGBA, // internal format
  5490. x, // xpos (where to start copying)
  5491. y, // ypos (where to start copying)
  5492. width, // width of the texture
  5493. height, // height of the texture
  5494. 0 // border
  5495. );
  5496. */
  5497. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5498. gl.bindTexture(gl.TEXTURE_2D, null);
  5499. return texture;
  5500. }
  5501. }
  5502. // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
  5503. var shader_declaration = __nested_webpack_require_314174__(9420);
  5504. ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
  5505. /*
  5506. * speedy-vision.js
  5507. * GPU-accelerated Computer Vision for JavaScript
  5508. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  5509. *
  5510. * Licensed under the Apache License, Version 2.0 (the "License");
  5511. * you may not use this file except in compliance with the License.
  5512. * You may obtain a copy of the License at
  5513. *
  5514. * http://www.apache.org/licenses/LICENSE-2.0
  5515. *
  5516. * Unless required by applicable law or agreed to in writing, software
  5517. * distributed under the License is distributed on an "AS IS" BASIS,
  5518. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5519. * See the License for the specific language governing permissions and
  5520. * limitations under the License.
  5521. *
  5522. * speedy-program.js
  5523. * SpeedyProgram class
  5524. */
  5525. /** @const {Object<string,string>} Map uniform type to a gl function */
  5526. const UNIFORM_SETTERS = Object.freeze({
  5527. 'sampler2D': 'uniform1i',
  5528. 'isampler2D': 'uniform1i',
  5529. 'usampler2D': 'uniform1i',
  5530. 'float': 'uniform1f',
  5531. 'int': 'uniform1i',
  5532. 'uint': 'uniform1ui',
  5533. 'bool': 'uniform1i',
  5534. 'vec2': 'uniform2f',
  5535. 'vec3': 'uniform3f',
  5536. 'vec4': 'uniform4f',
  5537. 'ivec2': 'uniform2i',
  5538. 'ivec3': 'uniform3i',
  5539. 'ivec4': 'uniform4i',
  5540. 'uvec2': 'uniform2ui',
  5541. 'uvec3': 'uniform3ui',
  5542. 'uvec4': 'uniform4ui',
  5543. 'bvec2': 'uniform2i',
  5544. 'bvec3': 'uniform3i',
  5545. 'bvec4': 'uniform4i',
  5546. 'mat2': 'uniformMatrix2fv',
  5547. 'mat3': 'uniformMatrix3fv',
  5548. 'mat4': 'uniformMatrix4fv'
  5549. });
  5550. /**
  5551. * @typedef {object} SpeedyProgramOptions
  5552. * @property {boolean} [renderToTexture] render results to a texture?
  5553. * @property {boolean} [pingpong] alternate output texture between calls
  5554. */
  5555. /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
  5556. /**
  5557. * A SpeedyProgram is a Function that runs GLSL code
  5558. */
  5559. class SpeedyProgram extends Function {
  5560. /**
  5561. * Creates a new SpeedyProgram
  5562. * @param {WebGL2RenderingContext} gl WebGL context
  5563. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5564. * @param {SpeedyProgramOptions} [options] user options
  5565. */
  5566. constructor(gl, shaderdecl, options = {}) {
  5567. super('...args', 'return this._self._call(...args)');
  5568. /** @type {SpeedyProgram} this function bound to this function! */
  5569. this._self = this.bind(this);
  5570. this._self._init(gl, shaderdecl, options);
  5571. return this._self;
  5572. }
  5573. /**
  5574. * Initialize the SpeedyProgram
  5575. * @param {WebGL2RenderingContext} gl WebGL context
  5576. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5577. * @param {SpeedyProgramOptions} options user options
  5578. */
  5579. _init(gl, shaderdecl, options) {
  5580. // not a valid context?
  5581. if (gl.isContextLost()) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
  5582. // options object
  5583. options = Object.assign({
  5584. // default options
  5585. renderToTexture: true,
  5586. pingpong: false
  5587. }, options);
  5588. /** @type {WebGL2RenderingContext} */
  5589. this._gl = gl;
  5590. /** @type {WebGLProgram} vertex shader + fragment shader */
  5591. this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
  5592. /** @type {ProgramGeometry} this is a quad */
  5593. this._geometry = new ProgramGeometry(gl, {
  5594. position: shaderdecl.locationOfAttributes.position,
  5595. texCoord: shaderdecl.locationOfAttributes.texCoord
  5596. });
  5597. /** @type {string[]} names of the arguments of the SpeedyProgram */
  5598. this._argnames = shaderdecl.arguments;
  5599. /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
  5600. this._argIsArray = new Array(this._argnames.length).fill(false);
  5601. /** @type {UBOHelper} UBO helper (lazy instantiation) */
  5602. this._ubo = null;
  5603. /** @type {boolean} should we render to a texture? If false, we render to the canvas */
  5604. this._renderToTexture = Boolean(options.renderToTexture);
  5605. /** @type {number} width of the output */
  5606. this._width = 1;
  5607. /** @type {number} height of the output */
  5608. this._height = 1;
  5609. /** @type {[number,number]} cached object that stores the size of the output */
  5610. this._size = [1, 1];
  5611. /** @type {SpeedyDrawableTexture[]} output texture(s) */
  5612. this._texture = new Array(options.pingpong ? 2 : 1).fill(null);
  5613. /** @type {number} used for pingpong rendering */
  5614. this._textureIndex = 0;
  5615. /** @type {Map<string,UniformVariable>} uniform variables */
  5616. this._uniform = new Map();
  5617. /** @type {ShaderDeclaration} shader declaration */
  5618. this._shaderdecl = shaderdecl;
  5619. // autodetect uniforms
  5620. gl.useProgram(this._program);
  5621. for (const name of shaderdecl.uniforms) {
  5622. const type = shaderdecl.uniformType(name);
  5623. const location = gl.getUniformLocation(this._program, name);
  5624. this._uniform.set(name, new UniformVariable(type, location));
  5625. }
  5626. // match arguments & uniforms
  5627. for (let j = 0; j < this._argnames.length; j++) {
  5628. const argname = this._argnames[j];
  5629. if (!this._uniform.has(argname)) {
  5630. this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
  5631. if (!this._argIsArray[j]) throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
  5632. }
  5633. }
  5634. }
  5635. /**
  5636. * Run the SpeedyProgram
  5637. * @param {...SpeedyProgramUniformValue} args
  5638. * @returns {SpeedyDrawableTexture}
  5639. */
  5640. _call(...args) {
  5641. const gl = this._gl;
  5642. const argnames = this._argnames;
  5643. const texture = this._texture[this._textureIndex];
  5644. // matching arguments?
  5645. if (args.length != argnames.length) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
  5646. // can't use the output texture as an input
  5647. /*
  5648. // slower method
  5649. const flatArgs = Utils.flatten(args);
  5650. for(let j = flatArgs.length - 1; j >= 0; j--) {
  5651. if(flatArgs[j] === this._texture[this._textureIndex])
  5652. throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5653. }
  5654. */
  5655. for (let j = args.length - 1; j >= 0; j--) {
  5656. if (args[j] === texture) throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5657. // else if(Array.isArray(args[j])) ...
  5658. // we don't support passing arrays of textures at the time of this writing
  5659. }
  5660. // context loss?
  5661. if (gl.isContextLost()) return texture;
  5662. // use program
  5663. gl.useProgram(this._program);
  5664. // bind the VAO
  5665. gl.bindVertexArray(this._geometry.vao);
  5666. // select the render target
  5667. const fbo = this._renderToTexture ? texture.glFbo : null;
  5668. // update texSize uniform (available in all fragment shaders)
  5669. const texSize = this._uniform.get('texSize');
  5670. this._size[0] = this._width;
  5671. this._size[1] = this._height;
  5672. texSize.setValue(gl, this._size);
  5673. // set uniforms[i] to args[i]
  5674. for (let i = 0, texNo = 0; i < args.length; i++) {
  5675. const argname = argnames[i];
  5676. if (!this._argIsArray[i]) {
  5677. // uniform variable matches argument name
  5678. const uniform = this._uniform.get(argname);
  5679. texNo = uniform.setValue(gl, args[i], texNo);
  5680. } else {
  5681. // uniform array matches argument name
  5682. const array = args[i];
  5683. if (Array.isArray(array)) {
  5684. if (this._uniform.has(indexedVariable(argname, array.length))) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
  5685. for (let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++) texNo = uniform.setValue(gl, array[j], texNo);
  5686. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
  5687. }
  5688. }
  5689. // set Uniform Buffer Objects (if any)
  5690. if (this._ubo !== null) this._ubo.update();
  5691. // bind the FBO
  5692. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5693. // draw call
  5694. gl.viewport(0, 0, this._width, this._height);
  5695. gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
  5696. // unbind the FBO
  5697. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5698. // unbind the VAO
  5699. gl.bindVertexArray(null);
  5700. // we've just changed the texture! discard the pyramid, if any
  5701. if (texture != null) texture.discardMipmaps();
  5702. // ping-pong rendering
  5703. this._pingpong();
  5704. // done!
  5705. return texture;
  5706. }
  5707. /**
  5708. * Set the output texture(s) and its (their) shape(s)
  5709. * @param {number} width new width, in pixels
  5710. * @param {number} height new height, in pixels
  5711. * @param {...SpeedyDrawableTexture|null} texture output texture(s)
  5712. * @returns {SpeedyProgram} this
  5713. */
  5714. outputs(width, height, ...texture) {
  5715. this._setOutputTexture(...texture);
  5716. this._setOutputSize(width, height);
  5717. return this;
  5718. }
  5719. /**
  5720. * Set the size of the output
  5721. * @param {number} width new width, in pixels
  5722. * @param {number} height new height, in pixels
  5723. * @returns {SpeedyProgram} this
  5724. */
  5725. _setOutputSize(width, height) {
  5726. utils/* Utils */.A.assert(width > 0 && height > 0);
  5727. // update output size
  5728. this._width = width | 0;
  5729. this._height = height | 0;
  5730. // resize the output texture(s)
  5731. for (let i = 0; i < this._texture.length; i++) {
  5732. if (this._texture[i] != null) this._texture[i].resize(this._width, this._height);
  5733. }
  5734. // done!
  5735. return this;
  5736. }
  5737. /**
  5738. * Use the provided texture(s) as output
  5739. * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
  5740. * @returns {SpeedyProgram} this
  5741. */
  5742. _setOutputTexture(...texture) {
  5743. utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
  5744. // update output texture(s)
  5745. for (let i = 0; i < this._texture.length; i++) this._texture[i] = texture[i];
  5746. this._textureIndex = 0;
  5747. // done!
  5748. return this;
  5749. }
  5750. /**
  5751. * Clear the internal textures
  5752. * @returns {SpeedyDrawableTexture}
  5753. */
  5754. clear() {
  5755. const texture = this._texture[this._textureIndex];
  5756. // clear internal textures
  5757. for (let i = 0; i < this._texture.length; i++) this._texture[i].clear();
  5758. // ping-pong rendering
  5759. this._pingpong();
  5760. // done!
  5761. return texture;
  5762. }
  5763. /**
  5764. * Set data using a Uniform Buffer Object
  5765. * @param {string} blockName uniform block name
  5766. * @param {ArrayBufferView} data
  5767. * @returns {SpeedyProgram} this
  5768. */
  5769. setUBO(blockName, data) {
  5770. if (this._ubo === null) this._ubo = new UBOHelper(this._gl, this._program);
  5771. this._ubo.set(blockName, data);
  5772. return this;
  5773. }
  5774. /**
  5775. * Release the resources associated with this SpeedyProgram
  5776. * @returns {null}
  5777. */
  5778. release() {
  5779. const gl = this._gl;
  5780. // Release UBOs (if any)
  5781. if (this._ubo != null) this._ubo = this._ubo.release();
  5782. // Unlink textures
  5783. this._texture.fill(null);
  5784. // Release geometry
  5785. this._geometry = this._geometry.release();
  5786. // Release program
  5787. gl.deleteProgram(this._program);
  5788. this._program = null;
  5789. // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
  5790. // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
  5791. // will automatically be marked for deletion when the JS object is
  5792. // destroyed (i.e., garbage collected)
  5793. // done!
  5794. return null;
  5795. }
  5796. /**
  5797. * A constant #defined in the shader declaration
  5798. * @param {string} name
  5799. * @returns {number}
  5800. */
  5801. definedConstant(name) {
  5802. return this._shaderdecl.definedConstant(name);
  5803. }
  5804. /**
  5805. * Helper method for pingpong rendering: alternates
  5806. * the texture index from 0 to 1 and vice-versa
  5807. */
  5808. _pingpong() {
  5809. if (this._texture.length > 1) this._textureIndex = 1 - this._textureIndex;
  5810. }
  5811. /**
  5812. * Compile and link GLSL shaders
  5813. * @param {WebGL2RenderingContext} gl
  5814. * @param {string} vertexShaderSource GLSL code of the vertex shader
  5815. * @param {string} fragmentShaderSource GLSL code of the fragment shader
  5816. * @returns {WebGLProgram}
  5817. */
  5818. static _compile(gl, vertexShaderSource, fragmentShaderSource) {
  5819. const program = gl.createProgram();
  5820. const vertexShader = gl.createShader(gl.VERTEX_SHADER);
  5821. const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
  5822. // compile vertex shader
  5823. gl.shaderSource(vertexShader, vertexShaderSource);
  5824. gl.compileShader(vertexShader);
  5825. gl.attachShader(program, vertexShader);
  5826. // compile fragment shader
  5827. gl.shaderSource(fragmentShader, fragmentShaderSource);
  5828. gl.compileShader(fragmentShader);
  5829. gl.attachShader(program, fragmentShader);
  5830. // link program
  5831. gl.linkProgram(program);
  5832. gl.validateProgram(program);
  5833. // return on success
  5834. if (gl.getProgramParameter(program, gl.LINK_STATUS)) return program;
  5835. // display an error
  5836. const errors = [gl.getShaderInfoLog(fragmentShader), gl.getShaderInfoLog(vertexShader), gl.getProgramInfoLog(program)];
  5837. gl.deleteProgram(program);
  5838. gl.deleteShader(fragmentShader);
  5839. gl.deleteShader(vertexShader);
  5840. // display error
  5841. const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
  5842. const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
  5843. const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
  5844. const formattedSource = source.split('\n').map((line, no) => col(1 + no) + line).join('\n');
  5845. throw new utils_errors/* GLError */.wB(`\n\n---------- ERROR ----------\n\n` + errors.filter(err => err).join('\n') + `\n\n---------- SOURCE CODE ----------\n\n` + formattedSource + '\n');
  5846. }
  5847. }
  5848. // ============================================================================
  5849. // HELPERS
  5850. // ============================================================================
  5851. /**
  5852. * Configure and store the VAO and the VBOs
  5853. * @param {WebGL2RenderingContext} gl
  5854. * @param {LocationOfAttributes} location
  5855. * @returns {ProgramGeometry}
  5856. *
  5857. * @typedef {Object} LocationOfAttributes
  5858. * @property {number} position
  5859. * @property {number} texCoord
  5860. *
  5861. * @typedef {Object} BufferOfAttributes
  5862. * @property {WebGLBuffer} position
  5863. * @property {WebGLBuffer} texCoord
  5864. */
  5865. function ProgramGeometry(gl, location) {
  5866. /** @type {WebGLVertexArrayObject} Vertex Array Object */
  5867. this.vao = gl.createVertexArray();
  5868. /** @type {BufferOfAttributes} Vertex Buffer Objects */
  5869. this.vbo = Object.freeze({
  5870. position: gl.createBuffer(),
  5871. texCoord: gl.createBuffer()
  5872. });
  5873. /** @type {WebGL2RenderingContext} */
  5874. this._gl = gl;
  5875. // bind the VAO
  5876. gl.bindVertexArray(this.vao);
  5877. // set the position attribute
  5878. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
  5879. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5880. // clip coordinates (CCW)
  5881. -1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
  5882. gl.enableVertexAttribArray(location.position);
  5883. gl.vertexAttribPointer(location.position,
  5884. // attribute location
  5885. 2,
  5886. // 2 components per vertex (x,y)
  5887. gl.FLOAT,
  5888. // type
  5889. false,
  5890. // don't normalize
  5891. 0,
  5892. // default stride (tightly packed)
  5893. 0); // offset
  5894. // set the texCoord attribute
  5895. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
  5896. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5897. // texture coordinates (CCW)
  5898. 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);
  5899. gl.enableVertexAttribArray(location.texCoord);
  5900. gl.vertexAttribPointer(location.texCoord,
  5901. // attribute location
  5902. 2,
  5903. // 2 components per vertex (x,y)
  5904. gl.FLOAT,
  5905. // type
  5906. false,
  5907. // don't normalize
  5908. 0,
  5909. // default stride (tightly packed)
  5910. 0); // offset
  5911. // unbind
  5912. gl.bindBuffer(gl.ARRAY_BUFFER, null);
  5913. gl.bindVertexArray(null);
  5914. // done!
  5915. return Object.freeze(this);
  5916. }
  5917. /**
  5918. * Releases the internal resources
  5919. * @returns {null}
  5920. */
  5921. ProgramGeometry.prototype.release = function () {
  5922. const gl = this._gl;
  5923. gl.deleteVertexArray(this.vao);
  5924. gl.deleteBuffer(this.vbo.position);
  5925. gl.deleteBuffer(this.vbo.texCoord);
  5926. return null;
  5927. };
  5928. /**
  5929. * Helper class for storing data in GLSL uniform variables
  5930. * @param {string} type
  5931. * @param {WebGLUniformLocation} location
  5932. */
  5933. function UniformVariable(type, location) {
  5934. /** @type {string} GLSL data type */
  5935. this.type = String(type);
  5936. if (!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
  5937. /** @type {WebGLUniformLocation} uniform location in a WebGL program */
  5938. this.location = location;
  5939. /** @type {string} setter function */
  5940. this.setter = UNIFORM_SETTERS[this.type];
  5941. const n = Number(this.setter.match(/^uniform(Matrix)?(\d)/)[2]) | 0;
  5942. /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
  5943. this.dim = this.type.startsWith('mat') ? 2 : this.type.indexOf('vec') >= 0 ? 1 : 0;
  5944. /** @type {number} required number of scalars */
  5945. this.length = this.dim == 2 ? n * n : n;
  5946. /** @type {SpeedyProgramUniformValue|null} cached value */
  5947. this._value = null;
  5948. }
  5949. /**
  5950. * Set the value of a uniform variable
  5951. * @param {WebGL2RenderingContext} gl
  5952. * @param {SpeedyProgramUniformValue} value use column-major format for matrices
  5953. * @param {number} [texNo] current texture index
  5954. * @returns {number} new texture index
  5955. */
  5956. UniformVariable.prototype.setValue = function (gl, value, texNo = -1) {
  5957. const setValue = /** @type {Function} */gl[this.setter];
  5958. // check uniform type
  5959. if (typeof value === 'object' && this.type.endsWith('sampler2D')) {
  5960. // set texture
  5961. if (texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS) throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);else if (Array.isArray(value)) throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);else if (value == null) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);else if (texNo < 0) throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
  5962. const tex = value;
  5963. gl.activeTexture(gl.TEXTURE0 + texNo);
  5964. gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
  5965. gl.uniform1i(this.location, texNo);
  5966. texNo++;
  5967. } else if (value === this._value && typeof value !== 'object') {
  5968. // do not update the uniform if it hasn't changed
  5969. // note that value may be an array whose entries may have been updated
  5970. void 0;
  5971. } else if (typeof value === 'number' || typeof value === 'boolean') {
  5972. // set scalar value
  5973. setValue.call(gl, this.location, value);
  5974. } else if (Array.isArray(value)) {
  5975. // set vector or matrix
  5976. if (value.length === this.length) {
  5977. if (this.dim == 2) setValue.call(gl, this.location, false, value); // matrix
  5978. else setValue.call(gl, this.location, ...value); // vector
  5979. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
  5980. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
  5981. // cache the value
  5982. this._value = value;
  5983. // done
  5984. return texNo;
  5985. };
  5986. /**
  5987. * @typedef {object} UBOStuff
  5988. * @property {WebGLBuffer} buffer
  5989. * @property {number} blockBindingIndex "global" binding index
  5990. * @property {number} blockIndex UBO "location" in the program
  5991. * @property {ArrayBufferView|null} data user-data
  5992. */
  5993. /**
  5994. * A helper class for handling Uniform Buffer Objects (UBOs)
  5995. * @param {WebGL2RenderingContext} gl
  5996. * @param {WebGLProgram} program
  5997. */
  5998. function UBOHelper(gl, program) {
  5999. /** @type {WebGL2RenderingContext} */
  6000. this._gl = gl;
  6001. /** @type {WebGLProgram} */
  6002. this._program = program;
  6003. /** @type {number} auto-increment counter */
  6004. this._nextIndex = 0;
  6005. /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
  6006. this._ubo = Object.create(null);
  6007. }
  6008. /**
  6009. * Set Uniform Buffer Object data
  6010. * (the buffer will be uploaded when the program is executed)
  6011. * @param {string} name uniform block name
  6012. * @param {ArrayBufferView} data
  6013. */
  6014. UBOHelper.prototype.set = function (name, data) {
  6015. const gl = this._gl;
  6016. // create UBO entry
  6017. if (this._ubo[name] === undefined) {
  6018. this._ubo[name] = {
  6019. buffer: gl.createBuffer(),
  6020. blockBindingIndex: this._nextIndex++,
  6021. blockIndex: -1,
  6022. data: null
  6023. };
  6024. }
  6025. // get UBO entry for the given block name
  6026. const ubo = this._ubo[name];
  6027. // read block index & assign binding point
  6028. if (ubo.blockIndex < 0) {
  6029. const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
  6030. gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
  6031. ubo.blockIndex = blockIndex;
  6032. }
  6033. // store the data - we'll upload it later
  6034. ubo.data = data;
  6035. };
  6036. /**
  6037. * Update UBO data
  6038. * Called when we're using the appropriate WebGLProgram
  6039. */
  6040. UBOHelper.prototype.update = function () {
  6041. const gl = this._gl;
  6042. for (const name in this._ubo) {
  6043. const ubo = this._ubo[name];
  6044. gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
  6045. gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
  6046. gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
  6047. gl.bindBuffer(gl.UNIFORM_BUFFER, null);
  6048. }
  6049. };
  6050. /**
  6051. * Release allocated buffers
  6052. * @returns {null}
  6053. */
  6054. UBOHelper.prototype.release = function () {
  6055. const gl = this._gl;
  6056. for (const name in this._ubo) {
  6057. const ubo = this._ubo[name];
  6058. gl.deleteBuffer(ubo.buffer);
  6059. ubo.data = null;
  6060. }
  6061. return null;
  6062. };
  6063. /**
  6064. * Generates an indexed variable name, as in variable[index]
  6065. * @param {string} variable
  6066. * @param {number} index
  6067. * @returns {string} variable[index]
  6068. */
  6069. function indexedVariable(variable, index) {
  6070. //return `${variable}[${index}]`; // no caching
  6071. // is this cache lookup really faster than string concatenation?
  6072. // what about memory consumption?
  6073. const cache = indexedVariable.cache;
  6074. let nameList = cache.get(variable);
  6075. if (nameList === undefined) cache.set(variable, nameList = []);
  6076. if (nameList[index] === undefined) nameList[index] = `${variable}[${index}]`;
  6077. return nameList[index];
  6078. }
  6079. /** @type {Map<string,string[]>} cached argument names */
  6080. indexedVariable.cache = new Map(); // Object.create(null)
  6081. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
  6082. /*
  6083. * speedy-vision.js
  6084. * GPU-accelerated Computer Vision for JavaScript
  6085. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6086. *
  6087. * Licensed under the Apache License, Version 2.0 (the "License");
  6088. * you may not use this file except in compliance with the License.
  6089. * You may obtain a copy of the License at
  6090. *
  6091. * http://www.apache.org/licenses/LICENSE-2.0
  6092. *
  6093. * Unless required by applicable law or agreed to in writing, software
  6094. * distributed under the License is distributed on an "AS IS" BASIS,
  6095. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6096. * See the License for the specific language governing permissions and
  6097. * limitations under the License.
  6098. *
  6099. * speedy-program-group.js
  6100. * An abstract group of programs that run on the GPU
  6101. */
  6102. /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
  6103. /**
  6104. * @typedef {object} SpeedyProgramHelpers
  6105. * @property {function(): SpeedyProgramOptions} usesPingpongRendering
  6106. * @property {function(): SpeedyProgramOptions} rendersToCanvas
  6107. */
  6108. /** @const {SpeedyProgramHelpers} Program settings generator */
  6109. const PROGRAM_HELPERS = Object.freeze({
  6110. /**
  6111. * Pingpong Rendering: the output texture of a
  6112. * program cannot be used as an input to itself.
  6113. * This is a convenient helper in these situations
  6114. * @returns {SpeedyProgramOptions}
  6115. */
  6116. usesPingpongRendering() {
  6117. return {
  6118. pingpong: true
  6119. };
  6120. },
  6121. /**
  6122. * Render to canvas
  6123. * Use it when we're supposed to see the texture
  6124. * @returns {SpeedyProgramOptions}
  6125. */
  6126. rendersToCanvas() {
  6127. return {
  6128. renderToTexture: false
  6129. };
  6130. }
  6131. });
  6132. /**
  6133. * SpeedyProgramGroup
  6134. * A semantically correlated group
  6135. * of programs that run on the GPU
  6136. * @abstract
  6137. */
  6138. class SpeedyProgramGroup {
  6139. /**
  6140. * Class constructor
  6141. * @protected
  6142. * @param {SpeedyGPU} gpu
  6143. */
  6144. constructor(gpu) {
  6145. /** @type {SpeedyGPU} GPU-accelerated routines */
  6146. this._gpu = gpu;
  6147. /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
  6148. this._programs = [];
  6149. }
  6150. /**
  6151. * Declare a program
  6152. * @protected
  6153. * @param {string} name Program name
  6154. * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
  6155. * @param {SpeedyProgramOptions} [options] Program settings
  6156. * @returns {this}
  6157. */
  6158. declare(name, builder, options = {}) {
  6159. // lazy instantiation of kernels
  6160. Object.defineProperty(this, name, {
  6161. get: (() => {
  6162. // Why cast a symbol to symbol?
  6163. // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
  6164. const key = /** @type {symbol} */Symbol(name);
  6165. return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
  6166. })()
  6167. });
  6168. return this;
  6169. }
  6170. /**
  6171. * Neat helpers to be used when declaring programs
  6172. * @returns {SpeedyProgramHelpers}
  6173. */
  6174. get program() {
  6175. return PROGRAM_HELPERS;
  6176. }
  6177. /**
  6178. * Releases all programs from this group
  6179. * @returns {null}
  6180. */
  6181. release() {
  6182. for (let i = 0; i < this._programs.length; i++) this._programs[i].release();
  6183. return null;
  6184. }
  6185. /**
  6186. * Spawn a SpeedyProgram
  6187. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6188. * @param {SpeedyProgramOptions} [options] Program settings
  6189. * @returns {SpeedyProgram}
  6190. */
  6191. _createProgram(shaderdecl, options = {}) {
  6192. const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
  6193. this._programs.push(program);
  6194. return program;
  6195. }
  6196. }
  6197. ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
  6198. /*
  6199. * speedy-vision.js
  6200. * GPU-accelerated Computer Vision for JavaScript
  6201. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6202. *
  6203. * Licensed under the Apache License, Version 2.0 (the "License");
  6204. * you may not use this file except in compliance with the License.
  6205. * You may obtain a copy of the License at
  6206. *
  6207. * http://www.apache.org/licenses/LICENSE-2.0
  6208. *
  6209. * Unless required by applicable law or agreed to in writing, software
  6210. * distributed under the License is distributed on an "AS IS" BASIS,
  6211. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6212. * See the License for the specific language governing permissions and
  6213. * limitations under the License.
  6214. *
  6215. * utils.js
  6216. * GPU utilities
  6217. */
  6218. //
  6219. // Shaders
  6220. //
  6221. // Copy image
  6222. const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
  6223. // Copy keypoints
  6224. const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6225. 'TYPE': 1
  6226. }).withArguments('image');
  6227. // Copy 2D vectors
  6228. const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6229. 'TYPE': 2
  6230. }).withArguments('image');
  6231. // Flip y-axis for output
  6232. const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
  6233. // Fill image with a constant
  6234. const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
  6235. // Fill zero or more color components of the input image with a constant value
  6236. const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
  6237. // Copy the src component of src to zero or more color components of a copy of dest
  6238. const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
  6239. // Scan the entire image and find the minimum & maximum pixel intensity
  6240. const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
  6241. // Compute the partial derivatives of an image
  6242. const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
  6243. /**
  6244. * SpeedyProgramGroupUtils
  6245. * Utility operations
  6246. */
  6247. class SpeedyProgramGroupUtils extends SpeedyProgramGroup {
  6248. /**
  6249. * Class constructor
  6250. * @param {SpeedyGPU} gpu
  6251. */
  6252. constructor(gpu) {
  6253. super(gpu);
  6254. this
  6255. // render to the canvas
  6256. .declare('renderToCanvas', flipY, Object.assign({}, this.program.rendersToCanvas()))
  6257. // copy image
  6258. .declare('copy', copy)
  6259. // copy keypoints
  6260. .declare('copyKeypoints', copyKeypoints)
  6261. // copy 2D vectors
  6262. .declare('copy2DVectors', copy2DVectors)
  6263. // Fill image with a constant
  6264. .declare('fill', fill)
  6265. // Fill zero or more color components of the input image with a constant value
  6266. .declare('fillComponents', fillComponents)
  6267. // Copy the src component of src to zero or more color components of a copy of dest
  6268. .declare('copyComponents', copyComponents)
  6269. // find minimum & maximum pixel intensity
  6270. .declare('scanMinMax2D', scanMinMax2D, Object.assign({}, this.program.usesPingpongRendering()))
  6271. // Compute the partial derivatives of an image
  6272. .declare('sobelDerivatives', sobelDerivatives);
  6273. }
  6274. }
  6275. // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
  6276. var convolution = __nested_webpack_require_314174__(1672);
  6277. ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
  6278. /*
  6279. * speedy-vision.js
  6280. * GPU-accelerated Computer Vision for JavaScript
  6281. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6282. *
  6283. * Licensed under the Apache License, Version 2.0 (the "License");
  6284. * you may not use this file except in compliance with the License.
  6285. * You may obtain a copy of the License at
  6286. *
  6287. * http://www.apache.org/licenses/LICENSE-2.0
  6288. *
  6289. * Unless required by applicable law or agreed to in writing, software
  6290. * distributed under the License is distributed on an "AS IS" BASIS,
  6291. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6292. * See the License for the specific language governing permissions and
  6293. * limitations under the License.
  6294. *
  6295. * filters.js
  6296. * Image filtering on the GPU
  6297. */
  6298. //
  6299. // Shaders
  6300. //
  6301. // Convert to greyscale
  6302. const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl').withArguments('image');
  6303. // Convolution
  6304. const filters_convolution = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl').withDefines({
  6305. 'KERNEL_SIZE_SQUARED': ksize * ksize
  6306. }).withArguments('image', 'kernel'), obj), {});
  6307. // Separable convolution
  6308. const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6309. 'KERNEL_SIZE': ksize,
  6310. 'AXIS': 0
  6311. }).withArguments('image', 'kernel'), obj), {});
  6312. const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6313. 'KERNEL_SIZE': ksize,
  6314. 'AXIS': 1
  6315. }).withArguments('image', 'kernel'), obj), {});
  6316. // Median filter
  6317. const median = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl').withDefines({
  6318. 'KERNEL_SIZE': ksize
  6319. }).withArguments('image'), obj), {});
  6320. // Normalize image
  6321. const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6322. 'GREYSCALE': 1
  6323. }).withArguments('minmax2d', 'minValue', 'maxValue');
  6324. const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6325. 'GREYSCALE': 0
  6326. }).withArguments('minmax2dRGB', 'minValue', 'maxValue');
  6327. // Nightvision
  6328. const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6329. 'GREYSCALE': 0
  6330. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6331. const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6332. 'GREYSCALE': 1
  6333. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6334. //
  6335. // Utilities
  6336. //
  6337. // Handy conversion for Gaussian filters
  6338. // (symmetric kernel, approx. zero after 3*sigma)
  6339. const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
  6340. // Generate a 1D Gaussian kernel
  6341. const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
  6342. // Generate a 1D Box filter
  6343. const box = ksize => new Array(ksize).fill(1.0 / ksize);
  6344. /**
  6345. * SpeedyProgramGroupFilters
  6346. * Image filtering
  6347. */
  6348. class SpeedyProgramGroupFilters extends SpeedyProgramGroup {
  6349. /**
  6350. * Class constructor
  6351. * @param {SpeedyGPU} gpu
  6352. */
  6353. constructor(gpu) {
  6354. super(gpu);
  6355. this
  6356. // convert to greyscale
  6357. .declare('rgb2grey', rgb2grey)
  6358. // median filters
  6359. .declare('median3', median[3]) // 3x3 window
  6360. .declare('median5', median[5]) // 5x5 window
  6361. .declare('median7', median[7]) // 7x7 window
  6362. // 2D convolution
  6363. .declare('convolution3', filters_convolution[3]) // 3x3 kernel
  6364. .declare('convolution5', filters_convolution[5]) // 5x5 kernel
  6365. .declare('convolution7', filters_convolution[7]) // 7x7 kernel
  6366. // 1D separable convolution
  6367. .declare('convolution3x', convolutionX[3]) // 1x3 kernel
  6368. .declare('convolution3y', convolutionY[3]) // 3x1 kernel
  6369. .declare('convolution5x', convolutionX[5]) // 1x5 kernel
  6370. .declare('convolution5y', convolutionY[5]) // 5x1 kernel
  6371. .declare('convolution7x', convolutionX[7]).declare('convolution7y', convolutionY[7]).declare('convolution9x', convolutionX[9]).declare('convolution9y', convolutionY[9]).declare('convolution11x', convolutionX[11]).declare('convolution11y', convolutionY[11]).declare('convolution13x', convolutionX[13]).declare('convolution13y', convolutionY[13]).declare('convolution15x', convolutionX[15]).declare('convolution15y', convolutionY[15])
  6372. // normalize image
  6373. .declare('normalizeGreyscale', normalizeGreyscale).declare('normalizeColored', normalizeColored)
  6374. // nightvision
  6375. .declare('nightvision', nightvision).declare('nightvisionGreyscale', nightvisionGreyscale).declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255))).declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
  6376. // gaussian: separable kernels
  6377. // see also: http://dev.theomader.com/gaussian-kernel-calculator/
  6378. .declare('gaussian3x', (0,convolution.convX)([0.25, 0.5, 0.25])) // sigma ~ 1.0
  6379. .declare('gaussian3y', (0,convolution.convY)([0.25, 0.5, 0.25])).declare('gaussian5x', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])) // sigma ~ 1.0
  6380. .declare('gaussian5y', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('gaussian7x', (0,convolution.convX)(gaussian(7))).declare('gaussian7y', (0,convolution.convY)(gaussian(7))).declare('gaussian9x', (0,convolution.convX)(gaussian(9))).declare('gaussian9y', (0,convolution.convY)(gaussian(9))).declare('gaussian11x', (0,convolution.convX)(gaussian(11))).declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
  6381. // box filter: separable kernels
  6382. .declare('box3x', (0,convolution.convX)(box(3))).declare('box3y', (0,convolution.convY)(box(3))).declare('box5x', (0,convolution.convX)(box(5))).declare('box5y', (0,convolution.convY)(box(5))).declare('box7x', (0,convolution.convX)(box(7))).declare('box7y', (0,convolution.convY)(box(7))).declare('box9x', (0,convolution.convX)(box(9))).declare('box9y', (0,convolution.convY)(box(9))).declare('box11x', (0,convolution.convX)(box(11))).declare('box11y', (0,convolution.convY)(box(11)));
  6383. }
  6384. }
  6385. // EXTERNAL MODULE: ./src/core/speedy-namespace.js
  6386. var speedy_namespace = __nested_webpack_require_314174__(6634);
  6387. ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
  6388. /*
  6389. * speedy-vision.js
  6390. * GPU-accelerated Computer Vision for JavaScript
  6391. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6392. *
  6393. * Licensed under the Apache License, Version 2.0 (the "License");
  6394. * you may not use this file except in compliance with the License.
  6395. * You may obtain a copy of the License at
  6396. *
  6397. * http://www.apache.org/licenses/LICENSE-2.0
  6398. *
  6399. * Unless required by applicable law or agreed to in writing, software
  6400. * distributed under the License is distributed on an "AS IS" BASIS,
  6401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6402. * See the License for the specific language governing permissions and
  6403. * limitations under the License.
  6404. *
  6405. * speedy-descriptordb.js
  6406. * A database of binary descriptors in video memory
  6407. */
  6408. //
  6409. // A database of binary descriptors is a texture that stores
  6410. // a set of (descriptor: uint8_t[]) entries.
  6411. //
  6412. /** @type {number} we use RGBA8 textures to store the descriptors */
  6413. const DESCRIPTORDB_BYTESPERPIXEL = 4;
  6414. /** @type {number} texture size goes up to 16 MB */
  6415. const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
  6416. /**
  6417. * Utility for generating a database of binary descriptors in video memory
  6418. */
  6419. class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q {
  6420. /**
  6421. * Create a database of binary descriptors
  6422. * @param {SpeedyTexture} texture output texture
  6423. * @param {Uint8Array[]} descriptors binary descriptors
  6424. * @param {number} descriptorSize in bytes, a multiple of 4
  6425. * @returns {SpeedyTexture} texture
  6426. */
  6427. static create(texture, descriptors, descriptorSize) {
  6428. utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
  6429. const numberOfDescriptors = descriptors.length;
  6430. const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
  6431. // find an appropriate texture size
  6432. const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
  6433. const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
  6434. // setup texture parameters
  6435. const stride = 1 << log2stride;
  6436. const width = stride,
  6437. height = stride; // we use powers-of-two
  6438. // are we within storage capacity?
  6439. const capacity = width * height / pixelsPerDescriptor;
  6440. if (numberOfDescriptors > capacity) throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
  6441. // create texture data
  6442. const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
  6443. for (let i = 0; i < numberOfDescriptors; i++) {
  6444. const byteOffset = i * descriptorSize;
  6445. const descriptor = descriptors[i];
  6446. // validate input
  6447. utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
  6448. utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
  6449. // write data
  6450. data.set(descriptor, byteOffset);
  6451. }
  6452. // log data for further study
  6453. const MEGABYTE = 1048576;
  6454. const totalSize = numberOfDescriptors * descriptorSize;
  6455. utils/* Utils */.A.log(`Creating a ${width}x${height} database of ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors ` + `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`);
  6456. // upload to the texture
  6457. texture.resize(width, height);
  6458. texture.upload(data);
  6459. return texture;
  6460. }
  6461. }
  6462. ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
  6463. /*
  6464. * speedy-vision.js
  6465. * GPU-accelerated Computer Vision for JavaScript
  6466. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6467. *
  6468. * Licensed under the Apache License, Version 2.0 (the "License");
  6469. * you may not use this file except in compliance with the License.
  6470. * You may obtain a copy of the License at
  6471. *
  6472. * http://www.apache.org/licenses/LICENSE-2.0
  6473. *
  6474. * Unless required by applicable law or agreed to in writing, software
  6475. * distributed under the License is distributed on an "AS IS" BASIS,
  6476. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6477. * See the License for the specific language governing permissions and
  6478. * limitations under the License.
  6479. *
  6480. * speedy-lsh.js
  6481. * GPU-based LSH tables for fast matching of binary descriptors
  6482. */
  6483. /*
  6484. * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
  6485. * ------------------------------------------------
  6486. *
  6487. * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
  6488. * Indices of keypoint descriptors are stored in several tables, each with many
  6489. * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
  6490. * size to match the keypoints.
  6491. *
  6492. * Buckets in video memory may get full. Wouldn't it be cool if we could use a
  6493. * probabilistic approach to let us work within their storage capacity?
  6494. *
  6495. * Let there be n buckets in a table, each with storage capacity c (holding
  6496. * up to c elements). Buckets are numbered from 0 to n-1.
  6497. *
  6498. * We pick uniformly a random bucket to store a new element in the table. Let
  6499. * X be the chosen bucket. The probability that we'll store the new element in
  6500. * any particular bucket k is:
  6501. *
  6502. * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
  6503. *
  6504. * On average, each new element stored in the table inserts 1/n of an element
  6505. * in each bucket. If we add m new elements to the table, each bucket receives
  6506. * m/n elements, on average(*).
  6507. *
  6508. * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
  6509. * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
  6510. * addition, the expected value of (m Ik) is m * E(ik) = m/n.
  6511. *
  6512. * Now let Yi be the number of elements inserted in bucket i in m additions to
  6513. * the table. We model Yi as Poisson(m/n), since on average, m additions to
  6514. * the table result in m/n new elements being inserted in bucket i. Buckets
  6515. * are picked independently. Hence, for all i, the probability that we insert
  6516. * q elements in bucket i in m additions to the table is:
  6517. *
  6518. * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
  6519. *
  6520. * Given that each bucket has storage capacity c, we require Yi <= c with a
  6521. * high probability p (say, p = 0.99). This means that, in m additions, we
  6522. * don't want to exceed the capacity c with high probability. So, let us find
  6523. * a (large) value of m such that:
  6524. *
  6525. * P(Yi <= c) >= p
  6526. *
  6527. * Sounds good! We can find the largest matching m using binary search.
  6528. *
  6529. * I don't think we need to enforce a high probability that ALL buckets stay
  6530. * within their capacity - n is large, we need to use the available space, and
  6531. * we have multiple tables anyway.
  6532. *
  6533. * In practice, the assumption that buckets are picked uniformly doesn't hold:
  6534. * keypoints that are nearby tend to have similar descriptors and buckets are
  6535. * picked according to those descriptors. Still, this model works well enough
  6536. * in practice and it is simple! That's what I like about it!
  6537. *
  6538. * ... now, how I actually do the matching is the theme of the next episode!
  6539. */
  6540. /** @type {number} Default number of tables in a LSH data structure */
  6541. const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
  6542. /** @type {number} Default number of bits of a hash */
  6543. const LSH_DEFAULT_HASH_SIZE = 15;
  6544. /** @type {number[]} Acceptable number of tables for a LSH data structure */
  6545. const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
  6546. /** @type {number[]} Acceptable values for hashSize, in bits */
  6547. const LSH_ACCEPTABLE_HASH_SIZES = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20];
  6548. /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
  6549. const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32, 64];
  6550. /**
  6551. * @typedef {Object} LSHProfile LSH profile
  6552. * @property {string} name name of the profile
  6553. * @property {number} capacity maximum number of keypoints that can be stored in such a table
  6554. * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
  6555. * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
  6556. * @property {number} bucketCapacity maximum number of entries of a bucket of a table
  6557. */
  6558. /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
  6559. const generateLSHProfiles = (t, h, p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [{
  6560. name: 'x-small',
  6561. bucketCapacity: 1,
  6562. tableCount: t,
  6563. hashSize: h,
  6564. capacity: findTableCapacity(h, 1, p)
  6565. }, {
  6566. name: 'small',
  6567. bucketCapacity: 2,
  6568. tableCount: t,
  6569. hashSize: h,
  6570. capacity: findTableCapacity(h, 2, p)
  6571. }, {
  6572. name: 'small-plus',
  6573. bucketCapacity: 3,
  6574. tableCount: t,
  6575. hashSize: h,
  6576. capacity: findTableCapacity(h, 3, p)
  6577. }, {
  6578. name: 'medium',
  6579. bucketCapacity: 4,
  6580. tableCount: t,
  6581. hashSize: h,
  6582. capacity: findTableCapacity(h, 4, p)
  6583. }, {
  6584. name: 'medium-plus',
  6585. bucketCapacity: 5,
  6586. tableCount: t,
  6587. hashSize: h,
  6588. capacity: findTableCapacity(h, 5, p)
  6589. }, {
  6590. name: 'large',
  6591. bucketCapacity: 6,
  6592. tableCount: t,
  6593. hashSize: h,
  6594. capacity: findTableCapacity(h, 6, p)
  6595. }, {
  6596. name: 'x-large',
  6597. bucketCapacity: 8,
  6598. tableCount: t,
  6599. hashSize: h,
  6600. capacity: findTableCapacity(h, 8, p)
  6601. }];
  6602. //
  6603. // LSH hash sequences: random bits in increasing order
  6604. // We generate a few sequences (one for each table) supporting up to 16 hash bits
  6605. // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
  6606. //
  6607. /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
  6608. /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
  6609. /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
  6610. /** @type {number} maximum number of elements of a sequence */
  6611. const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
  6612. /** @type {number} number of sequences in a BitSequences object */
  6613. const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
  6614. /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
  6615. const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i + 1) * LSH_SEQUENCE_MAXLEN).sort()), seq);
  6616. /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
  6617. const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray((i + 1) * LSH_SEQUENCE_MAXLEN - p, (i + 1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)), seq);
  6618. /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
  6619. const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p, o) => (p[o] = f(o), p), {}))(h => ({
  6620. // for 256-bit descriptors
  6621. 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  6622. // for 512-bit descriptors
  6623. 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN))))
  6624. }));
  6625. //
  6626. // Misc
  6627. //
  6628. /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
  6629. const LSH_BYTESPERPIXEL = 4;
  6630. /** @type {function(number): number} next power of 2 */
  6631. const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  6632. /**
  6633. * GPU-based LSH tables for fast matching of binary descriptors
  6634. */
  6635. class SpeedyLSH {
  6636. /**
  6637. * Constructor
  6638. * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
  6639. * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
  6640. * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
  6641. * @param {number} [tableCount] number of LSH tables, preferably a power of two
  6642. * @param {number} [hashSize] number of bits of a hash of a descriptor
  6643. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6644. */
  6645. constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95) {
  6646. const descriptorCount = descriptors.length;
  6647. const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
  6648. const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
  6649. // validate input
  6650. utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
  6651. utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
  6652. utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
  6653. utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
  6654. utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
  6655. /** @type {LSHProfile} LSH profile */
  6656. this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
  6657. /** @type {number} descriptor size, in bytes */
  6658. this._descriptorSize = descriptorSize;
  6659. /** @type {number} number of descriptors */
  6660. this._descriptorCount = descriptorCount;
  6661. /** @type {BitSequences} bit sequences */
  6662. this._sequences = this._pickSequences(this._descriptorSize);
  6663. /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
  6664. this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
  6665. /** @type {SpeedyTexture} a storage of descriptors */
  6666. this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
  6667. }
  6668. /**
  6669. * Descriptor size, in bytes
  6670. * @returns {number}
  6671. */
  6672. get descriptorSize() {
  6673. return this._descriptorSize;
  6674. }
  6675. /**
  6676. * Number of descriptors stored in this LSH data structure
  6677. * @returns {number}
  6678. */
  6679. get descriptorCount() {
  6680. return this._descriptorCount;
  6681. }
  6682. /**
  6683. * LSH bit sequences
  6684. * @returns {BitSequences}
  6685. */
  6686. get sequences() {
  6687. return this._sequences;
  6688. }
  6689. /**
  6690. * Number of bits that make a hash
  6691. * @returns {number}
  6692. */
  6693. get hashSize() {
  6694. return this._profile.hashSize;
  6695. }
  6696. /**
  6697. * Maximum number of descriptors that can be stored in a bucket of a table
  6698. * @returns {number}
  6699. */
  6700. get bucketCapacity() {
  6701. return this._profile.bucketCapacity;
  6702. }
  6703. /**
  6704. * How many buckets per table do we have?
  6705. * @returns {number}
  6706. */
  6707. get bucketsPerTable() {
  6708. return 1 << this._profile.hashSize;
  6709. }
  6710. /**
  6711. * Number of LSH tables
  6712. * @returns {number}
  6713. */
  6714. get tableCount() {
  6715. return this._profile.tableCount;
  6716. }
  6717. /**
  6718. * Size of one LSH table, in bytes
  6719. * @returns {number}
  6720. */
  6721. get tableSize() {
  6722. return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
  6723. }
  6724. /**
  6725. * Size of all LSH tables combined, in bytes
  6726. * @returns {number}
  6727. */
  6728. get totalSize() {
  6729. // actually, the total memory in VRAM may be a bit larger than
  6730. // this value, depending on the actual size of the texture
  6731. return this.tableCount * this.tableSize;
  6732. }
  6733. /**
  6734. * LSH tables texture
  6735. * @returns {SpeedyDrawableTexture}
  6736. */
  6737. get tables() {
  6738. return this._tables;
  6739. }
  6740. /**
  6741. * A collection of descriptors
  6742. * @returns {SpeedyDrawableTexture}
  6743. */
  6744. get descriptorDB() {
  6745. return this._descriptorDB;
  6746. }
  6747. /**
  6748. * Pick the appropriate LSH sequences for a particular descriptor size
  6749. * @param {number} descriptorSize in bytes
  6750. * @returns {BitSequences}
  6751. */
  6752. _pickSequences(descriptorSize) {
  6753. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
  6754. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
  6755. return LSH_SEQUENCES[this.hashSize][descriptorSize];
  6756. }
  6757. /**
  6758. * Create LSH tables
  6759. * @param {SpeedyTexture} texture output texture
  6760. * @param {BitSequences} sequences bit sequences
  6761. * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
  6762. * @param {number} descriptorSize in bytes
  6763. * @returns {SpeedyTexture} texture
  6764. */
  6765. _createStaticTables(texture, sequences, descriptors, descriptorSize) {
  6766. const END_OF_LIST = 0xFFFFFFFF;
  6767. const profileName = this._profile.name;
  6768. const tableCapacity = this._profile.capacity;
  6769. const tableCount = this.tableCount;
  6770. const bucketsPerTable = this.bucketsPerTable;
  6771. const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
  6772. const hashSize = this.hashSize;
  6773. const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
  6774. const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
  6775. const textureHeight = Math.ceil(numberOfPixels / textureWidth);
  6776. const numberOfDescriptors = descriptors.length;
  6777. // validate input
  6778. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
  6779. utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
  6780. utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
  6781. // log
  6782. const MEGABYTE = 1048576;
  6783. utils/* Utils */.A.log(`Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` + `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` + `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `);
  6784. // warn the user if there are too many descriptors
  6785. if (numberOfDescriptors > tableCapacity) {
  6786. const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
  6787. utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
  6788. }
  6789. // create empty LSH tables
  6790. const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
  6791. const bytes = new Uint8Array(buffer).fill(0xFF);
  6792. const data = new DataView(buffer);
  6793. // shuffle the descriptors...
  6794. // it seems like a good idea to handle collisions of similar descriptors,
  6795. // which may be located next to each other in the array
  6796. const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
  6797. // for each descriptor
  6798. // do everything in little-endian format!
  6799. const numberOfDiscardedDescriptorsPerTable = new Array(tableCount).fill(0);
  6800. for (let i = 0; i < numberOfDescriptors; i++) {
  6801. const descriptorIndex = permutation[i]; //i;
  6802. const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
  6803. // for each table
  6804. for (let table = 0; table < tableCount; table++) {
  6805. // compute hash & memory addresses
  6806. const hash = hashes[table];
  6807. const tableByteOffset = table * bucketsPerTable * bucketSize;
  6808. const bucketByteOffset = tableByteOffset + hash * bucketSize;
  6809. // find the end of the list
  6810. let index = END_OF_LIST;
  6811. for (let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
  6812. const byteOffset = bucketByteOffset + entryByteOffset;
  6813. index = data.getUint32(byteOffset, true);
  6814. // add the keypoint
  6815. if (index == END_OF_LIST) {
  6816. data.setUint32(byteOffset, descriptorIndex, true);
  6817. break;
  6818. }
  6819. }
  6820. // note: if the bucket is full, we just discard the entry :\
  6821. // we give this event a probabilistic treatment (see above),
  6822. // so it happens with low probability
  6823. if (index != END_OF_LIST) numberOfDiscardedDescriptorsPerTable[table]++;
  6824. }
  6825. }
  6826. // log data for further study
  6827. const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
  6828. const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
  6829. utils/* Utils */.A.log(`When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` + `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` + `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` + `Minimum: ${Math.min(...profile).toFixed(2)}%. ` + `Table capacity: ${tableCapacity}.`);
  6830. // upload the LSH tables to the GPU
  6831. texture.resize(textureWidth, textureHeight);
  6832. texture.upload(bytes);
  6833. return texture;
  6834. }
  6835. /**
  6836. * Pick bits from a binary descriptor
  6837. * @param {Uint8Array} descriptor a single descriptor
  6838. * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
  6839. * @returns {number[]} hash code for each table
  6840. */
  6841. _hashCodes(descriptor, sequences) {
  6842. const tableCount = this.tableCount;
  6843. const hashSize = this.hashSize;
  6844. const bucketsPerTable = this.bucketsPerTable;
  6845. const hashes = new Array(tableCount);
  6846. //const descriptorSize = descriptor.length;
  6847. // just to be sure...
  6848. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN && sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount);
  6849. // for each table
  6850. for (let table = 0; table < tableCount; table++) {
  6851. const offset = LSH_SEQUENCE_MAXLEN * table;
  6852. // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
  6853. let hash = 0;
  6854. for (let i = 0; i < hashSize; i++) {
  6855. let bit = sequences[offset + i];
  6856. let b = bit >>> 3;
  6857. let m = 1 << (bit & 7);
  6858. //Utils.assert(b < descriptorSize);
  6859. hash = hash << 1 | (descriptor[b] & m) != 0;
  6860. }
  6861. // validate & store
  6862. utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
  6863. hashes[table] = hash;
  6864. }
  6865. // done!
  6866. return hashes;
  6867. }
  6868. }
  6869. /**
  6870. * Compute P(X <= k), where X ~ Poisson(lambda)
  6871. * @param {number} lambda positive number
  6872. * @param {number} k non-negative integer
  6873. * @returns {number}
  6874. */
  6875. function cumulativePoisson(lambda, k) {
  6876. const exp = Math.exp(-lambda);
  6877. let sum = 1,
  6878. fat = 1,
  6879. pow = 1;
  6880. // k should be small!!!
  6881. for (let i = 1; i <= k; i++) sum += (pow *= lambda) / (fat *= i);
  6882. return sum * exp;
  6883. }
  6884. /**
  6885. * Find the maximum number of keypoint descriptors that a table can hold
  6886. * @param {number} hashSize positive integer
  6887. * @param {number} bucketCapacity positive integer
  6888. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6889. * @return {number} optimal table capacity
  6890. */
  6891. function findTableCapacity(hashSize, bucketCapacity, probability = 0.99) {
  6892. const n = 1 << hashSize; // number of buckets
  6893. const c = bucketCapacity;
  6894. const p = probability;
  6895. let l = 1,
  6896. r = n * c; // watch for overflow!
  6897. let m = 0,
  6898. pm = 0;
  6899. // binary search
  6900. while (l < r) {
  6901. m = Math.floor((l + r) / 2);
  6902. pm = cumulativePoisson(m / n, c);
  6903. if (pm > p)
  6904. //if(1-pm < 1-p)
  6905. l = m + 1;else r = m;
  6906. }
  6907. return m;
  6908. }
  6909. ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
  6910. /*
  6911. * speedy-vision.js
  6912. * GPU-accelerated Computer Vision for JavaScript
  6913. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6914. *
  6915. * Licensed under the Apache License, Version 2.0 (the "License");
  6916. * you may not use this file except in compliance with the License.
  6917. * You may obtain a copy of the License at
  6918. *
  6919. * http://www.apache.org/licenses/LICENSE-2.0
  6920. *
  6921. * Unless required by applicable law or agreed to in writing, software
  6922. * distributed under the License is distributed on an "AS IS" BASIS,
  6923. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6924. * See the License for the specific language governing permissions and
  6925. * limitations under the License.
  6926. *
  6927. * keypoints.js
  6928. * Facade for various keypoint detection algorithms
  6929. */
  6930. // FAST corner detector
  6931. const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl').withDefines({
  6932. 'FAST_TYPE': 916
  6933. }).withArguments('corners', 'pyramid', 'lod', 'threshold');
  6934. // Harris corner detector
  6935. const harris = [1, 3, 5, 7].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl').withDefines({
  6936. 'WINDOW_SIZE': win
  6937. }).withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian'), obj), {});
  6938. const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl').withArguments('corners', 'iterationNumber');
  6939. const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl').withArguments('corners', 'maxScore', 'quality');
  6940. // Subpixel refinement
  6941. const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6942. 'METHOD': 0
  6943. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6944. const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6945. 'METHOD': 1
  6946. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6947. const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6948. 'METHOD': 2
  6949. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6950. const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6951. 'METHOD': 3
  6952. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6953. // Scale refinement
  6954. const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6955. 'METHOD': 0
  6956. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6957. const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6958. 'METHOD': 1
  6959. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  6960. // Pixel allocation
  6961. const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6962. const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6963. const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl').withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6964. // ORB descriptors
  6965. const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl').withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
  6966. const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl').withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6967. // Non-maximum suppression
  6968. const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6969. 'MULTISCALE': 0
  6970. }).withArguments('image', 'lodStep');
  6971. const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6972. 'MULTISCALE': 1
  6973. }).withArguments('image', 'lodStep');
  6974. const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl').withArguments('corners');
  6975. const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6976. 'USE_LAPLACIAN': 1
  6977. }).withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
  6978. const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6979. 'USE_LAPLACIAN': 0
  6980. }).withArguments('corners', 'pyramid', 'lodStep');
  6981. const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl').withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
  6982. // Keypoint tracking & optical-flow
  6983. const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl').withDefines({
  6984. 'WINDOW_SIZE': win
  6985. }).withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {});
  6986. const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl').withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6987. // Brute-force matching
  6988. const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6989. 'ENCODE_FILTERS': 0
  6990. });
  6991. const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6992. 'ENCODE_FILTERS': 1
  6993. });
  6994. const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  6995. const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  6996. 'DESCRIPTOR_SIZE': 32,
  6997. 'NUMBER_OF_KEYPOINTS_PER_PASS': 16
  6998. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  6999. const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  7000. 'DESCRIPTOR_SIZE': 64,
  7001. 'NUMBER_OF_KEYPOINTS_PER_PASS': 8
  7002. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7003. // LSH-based KNN matching
  7004. const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7005. 'ENCODE_FILTERS': 0
  7006. });
  7007. const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7008. 'ENCODE_FILTERS': 1
  7009. });
  7010. const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => (obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => (obj[hashSize] = [0, 1, 2].reduce((obj, level) => (obj[level] = (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl').withDefines({
  7011. 'DESCRIPTOR_SIZE': descriptorSize,
  7012. 'HASH_SIZE': hashSize,
  7013. 'LEVEL': level,
  7014. 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
  7015. 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT
  7016. }).withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {}), obj), {}), obj), {});
  7017. const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7018. // Keypoint sorting
  7019. const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7020. 'STAGE': 1
  7021. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7022. const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7023. 'STAGE': 2
  7024. }).withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
  7025. const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7026. 'STAGE': 3
  7027. }).withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
  7028. // Keypoint mixing
  7029. const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7030. 'STAGE': 1
  7031. }).withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
  7032. const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7033. 'STAGE': 2
  7034. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7035. const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7036. 'STAGE': 3
  7037. }).withArguments('array', 'blockSize');
  7038. const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7039. 'STAGE': 5
  7040. }).withArguments('array');
  7041. const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7042. 'STAGE': 4
  7043. }).withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7044. // Keypoint encoding
  7045. const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7046. 'FS_OUTPUT_TYPE': 2,
  7047. 'STAGE': 1
  7048. }).withArguments('corners');
  7049. const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl').withDefines({
  7050. 'FS_OUTPUT_TYPE': 2,
  7051. 'FS_USE_CUSTOM_PRECISION': 1,
  7052. 'STAGE': 2
  7053. }).withArguments('lookupTable', 'blockSize', 'width', 'height');
  7054. const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7055. 'STAGE': -1
  7056. }).withArguments('lookupTable');
  7057. const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl').withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
  7058. const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl').withArguments('corners', 'imageSize');
  7059. const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl').withDefines({
  7060. 'MAX_ITERATIONS': 6
  7061. }) // dependent texture reads :(
  7062. .withArguments('offsetsImage', 'imageSize');
  7063. const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl').withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7064. const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl').withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7065. const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl').withArguments();
  7066. const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl').withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7067. const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl').withDefines({
  7068. // UBOs can hold at least 16KB of data;
  7069. // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
  7070. // according to the GL ES 3 reference.
  7071. // Each keypoint uses 16 bytes (vec4)
  7072. 'BUFFER_SIZE': 1024 //16384 / 16
  7073. }).withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
  7074. // Geometric transformations
  7075. const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl').withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7076. // Keypoint filters
  7077. const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl').withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7078. const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl').withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7079. const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7080. 'DESCRIPTOR_SIZE': 32
  7081. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7082. const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7083. 'DESCRIPTOR_SIZE': 64
  7084. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7085. // Other utilities
  7086. const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl').withDefines({
  7087. 'PERMUTATION_MAXLEN': 2048
  7088. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7089. const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl').withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7090. /**
  7091. * SpeedyProgramGroupKeypoints
  7092. * Keypoint detection
  7093. */
  7094. class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup {
  7095. /**
  7096. * Class constructor
  7097. * @param {SpeedyGPU} gpu
  7098. */
  7099. constructor(gpu) {
  7100. super(gpu);
  7101. this
  7102. //
  7103. // FAST corner detector
  7104. //
  7105. .declare('fast9_16', fast9_16, Object.assign({}, this.program.usesPingpongRendering()))
  7106. //
  7107. // Harris corner detector
  7108. //
  7109. .declare('harris1', harris[1], Object.assign({}, this.program.usesPingpongRendering())).declare('harris3', harris[3], Object.assign({}, this.program.usesPingpongRendering())).declare('harris5', harris[5], Object.assign({}, this.program.usesPingpongRendering())).declare('harris7', harris[7], Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreFindMax', harrisScoreFindMax, Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreCutoff', harrisScoreCutoff)
  7110. //
  7111. // Subpixel refinement
  7112. //
  7113. .declare('subpixelQuadratic1d', subpixelQuadratic1d).declare('subpixelTaylor2d', subpixelTaylor2d).declare('subpixelBicubic', subpixelBicubic).declare('subpixelBilinear', subpixelBilinear)
  7114. //
  7115. // Scale refinement
  7116. //
  7117. .declare('refineScaleLoG', refineScaleLoG).declare('refineScaleFAST916', refineScaleFAST916)
  7118. //
  7119. // Pixel allocation
  7120. //
  7121. .declare('allocateDescriptors', allocateDescriptors).declare('allocateExtra', allocateExtra).declare('transferToExtra', transferToExtra)
  7122. //
  7123. // ORB descriptors
  7124. //
  7125. .declare('orbDescriptor', orbDescriptor).declare('orbOrientation', orbOrientation)
  7126. //
  7127. // Non-maximum suppression
  7128. //
  7129. .declare('nonmax', nonMaxSuppression).declare('pyrnonmax', multiscaleNonMaxSuppression).declare('nonmaxSpace', nonmaxSpace).declare('nonmaxScale', nonmaxScale).declare('nonmaxScaleSimple', nonmaxScaleSimple).declare('laplacian', laplacian)
  7130. //
  7131. // LK optical-flow
  7132. //
  7133. .declare('lk21', lk[21], Object.assign({}, this.program.usesPingpongRendering())).declare('lk19', lk[19], Object.assign({}, this.program.usesPingpongRendering())).declare('lk17', lk[17], Object.assign({}, this.program.usesPingpongRendering())).declare('lk15', lk[15], Object.assign({}, this.program.usesPingpongRendering())).declare('lk13', lk[13], Object.assign({}, this.program.usesPingpongRendering())).declare('lk11', lk[11], Object.assign({}, this.program.usesPingpongRendering())).declare('lk9', lk[9], Object.assign({}, this.program.usesPingpongRendering())).declare('lk7', lk[7], Object.assign({}, this.program.usesPingpongRendering())).declare('lk5', lk[5], Object.assign({}, this.program.usesPingpongRendering())).declare('lk3', lk[3], Object.assign({}, this.program.usesPingpongRendering())).declare('transferFlow', transferFlow)
  7134. //
  7135. // Brute-force KNN matching
  7136. //
  7137. .declare('bfMatcherInitCandidates', bfMatcherInitCandidates).declare('bfMatcherInitFilters', bfMatcherInitFilters).declare('bfMatcherTransfer', bfMatcherTransfer, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher32', bfMatcher32, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher64', bfMatcher64, Object.assign({}, this.program.usesPingpongRendering()))
  7138. //
  7139. // LSH-based KNN matching
  7140. //
  7141. .declare('lshKnnInitCandidates', lshKnnInitCandidates).declare('lshKnnInitFilters', lshKnnInitFilters).declare('lshKnnTransfer', lshKnnTransfer, Object.assign({}, this.program.usesPingpongRendering()))
  7142. //
  7143. // Keypoint sorting
  7144. //
  7145. .declare('sortCreatePermutation', sortCreatePermutation).declare('sortMergePermutation', sortMergePermutation, Object.assign({}, this.program.usesPingpongRendering())).declare('sortApplyPermutation', sortApplyPermutation)
  7146. //
  7147. // Keypoint mixing
  7148. //
  7149. .declare('mixKeypointsPreInit', mixKeypointsPreInit).declare('mixKeypointsInit', mixKeypointsInit).declare('mixKeypointsSort', mixKeypointsSort, Object.assign({}, this.program.usesPingpongRendering())).declare('mixKeypointsView', mixKeypointsView).declare('mixKeypointsApply', mixKeypointsApply)
  7150. //
  7151. // Keypoint encoders
  7152. //
  7153. .declare('encodeNullKeypoints', encodeNullKeypoints).declare('encodeKeypoints', encodeKeypoints).declare('initLookupTable', initLookupTable).declare('sortLookupTable', sortLookupTable, Object.assign({}, this.program.usesPingpongRendering())).declare('viewLookupTable', viewLookupTable).declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets).declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointPositions', encodeKeypointPositions, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointProperties', encodeKeypointProperties).declare('transferOrientation', transferOrientation).declare('uploadKeypoints', uploadKeypoints, Object.assign({}, this.program.usesPingpongRendering()))
  7154. //
  7155. // Geometric transformations
  7156. //
  7157. .declare('applyHomography', applyHomography)
  7158. //
  7159. // Keypoint filters
  7160. //
  7161. .declare('clipBorder', clipBorder).declare('distanceFilter', distanceFilter).declare('hammingDistanceFilter32', hammingDistanceFilter32).declare('hammingDistanceFilter64', hammingDistanceFilter64)
  7162. //
  7163. // Other utilities
  7164. //
  7165. .declare('shuffle', shuffle).declare('clip', clip);
  7166. //
  7167. // LSH-based KNN matching
  7168. //
  7169. for (const descriptorSize of Object.keys(lshKnn)) {
  7170. for (const hashSize of Object.keys(lshKnn[descriptorSize])) {
  7171. for (const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
  7172. const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
  7173. this.declare(name, lshKnn[descriptorSize][hashSize][level], Object.assign({}, this.program.usesPingpongRendering()));
  7174. }
  7175. }
  7176. }
  7177. }
  7178. }
  7179. ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
  7180. /*
  7181. * speedy-vision.js
  7182. * GPU-accelerated Computer Vision for JavaScript
  7183. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7184. *
  7185. * Licensed under the Apache License, Version 2.0 (the "License");
  7186. * you may not use this file except in compliance with the License.
  7187. * You may obtain a copy of the License at
  7188. *
  7189. * http://www.apache.org/licenses/LICENSE-2.0
  7190. *
  7191. * Unless required by applicable law or agreed to in writing, software
  7192. * distributed under the License is distributed on an "AS IS" BASIS,
  7193. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7194. * See the License for the specific language governing permissions and
  7195. * limitations under the License.
  7196. *
  7197. * pyramids.js
  7198. * Image pyramids
  7199. */
  7200. //
  7201. // Shaders
  7202. //
  7203. const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
  7204. const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
  7205. /**
  7206. * SpeedyProgramGroupPyramids
  7207. * Image pyramids
  7208. */
  7209. class SpeedyProgramGroupPyramids extends SpeedyProgramGroup {
  7210. /**
  7211. * Class constructor
  7212. * @param {SpeedyGPU} gpu
  7213. */
  7214. constructor(gpu) {
  7215. super(gpu);
  7216. this
  7217. // upsampling & downsampling
  7218. .declare('upsample2', upsample2).declare('downsample2', downsample2)
  7219. // separable kernels for gaussian smoothing
  7220. // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
  7221. // pick a = 0.4 for gaussian approximation (sigma = 1)
  7222. .declare('smoothX', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('smoothY', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05]))
  7223. /*
  7224. .declare('reduce', conv2D([
  7225. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
  7226. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7227. 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
  7228. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7229. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
  7230. ]))
  7231. */
  7232. // smoothing for 2x image
  7233. // same rules as above with sum(k) = 2
  7234. .declare('smoothX2', (0,convolution.convX)([0.1, 0.5, 0.8, 0.5, 0.1
  7235. // NOTE: this would saturate the image, but we apply it
  7236. // on a 2x upsampled version with lots of zero pixels
  7237. ])).declare('smoothY2', (0,convolution.convY)([0.1, 0.5, 0.8, 0.5, 0.1], 1.0 / 2.0));
  7238. }
  7239. }
  7240. ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
  7241. /*
  7242. * speedy-vision.js
  7243. * GPU-accelerated Computer Vision for JavaScript
  7244. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7245. *
  7246. * Licensed under the Apache License, Version 2.0 (the "License");
  7247. * you may not use this file except in compliance with the License.
  7248. * You may obtain a copy of the License at
  7249. *
  7250. * http://www.apache.org/licenses/LICENSE-2.0
  7251. *
  7252. * Unless required by applicable law or agreed to in writing, software
  7253. * distributed under the License is distributed on an "AS IS" BASIS,
  7254. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7255. * See the License for the specific language governing permissions and
  7256. * limitations under the License.
  7257. *
  7258. * transforms.js
  7259. * Geometric transformations
  7260. */
  7261. //
  7262. // Shaders
  7263. //
  7264. // Perspective warp
  7265. const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl').withArguments('image', 'inverseHomography');
  7266. // Resize image
  7267. const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7268. 'INTERPOLATION_METHOD': 0 // Nearest neighbors
  7269. }).withArguments('image');
  7270. const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7271. 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
  7272. }).withArguments('image');
  7273. // Additive mix (TODO create a new program group?)
  7274. const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl').withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
  7275. /**
  7276. * SpeedyProgramGroupTransforms
  7277. * Geometric transformations
  7278. */
  7279. class SpeedyProgramGroupTransforms extends SpeedyProgramGroup {
  7280. /**
  7281. * Class constructor
  7282. * @param {SpeedyGPU} gpu
  7283. */
  7284. constructor(gpu) {
  7285. super(gpu);
  7286. this.declare('warpPerspective', warpPerspective).declare('resizeNearest', resizeNearest).declare('resizeBilinear', resizeBilinear).declare('additiveMix', additiveMix);
  7287. }
  7288. }
  7289. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
  7290. /*
  7291. * speedy-vision.js
  7292. * GPU-accelerated Computer Vision for JavaScript
  7293. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7294. *
  7295. * Licensed under the Apache License, Version 2.0 (the "License");
  7296. * you may not use this file except in compliance with the License.
  7297. * You may obtain a copy of the License at
  7298. *
  7299. * http://www.apache.org/licenses/LICENSE-2.0
  7300. *
  7301. * Unless required by applicable law or agreed to in writing, software
  7302. * distributed under the License is distributed on an "AS IS" BASIS,
  7303. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7304. * See the License for the specific language governing permissions and
  7305. * limitations under the License.
  7306. *
  7307. * speedy-program-center.js
  7308. * An access point to all programs that run on the GPU
  7309. */
  7310. /**
  7311. * An access point to all programs that run on the CPU
  7312. * All program groups can be accessed via this class
  7313. */
  7314. class SpeedyProgramCenter {
  7315. /**
  7316. * Class constructor
  7317. * @param {SpeedyGPU} gpu reference to SpeedyGPU
  7318. */
  7319. constructor(gpu) {
  7320. // Note: we instantiate the program groups lazily
  7321. /** @type {SpeedyGPU} reference to SpeedyGPU */
  7322. this._gpu = gpu;
  7323. /** @type {SpeedyProgramGroupFilters} image filters */
  7324. this._filters = null;
  7325. /** @type {SpeedyProgramGroupTransforms} geometric transformations */
  7326. this._transforms = null;
  7327. /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
  7328. this._pyramids = null;
  7329. /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
  7330. this._keypoints = null;
  7331. /** @type {SpeedyProgramGroupUtils} utility programs */
  7332. this._utils = null;
  7333. }
  7334. /**
  7335. * Image filters & convolutions
  7336. * @returns {SpeedyProgramGroupFilters}
  7337. */
  7338. get filters() {
  7339. return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
  7340. }
  7341. /**
  7342. * Geometric transformations
  7343. * @returns {SpeedyProgramGroupTransforms}
  7344. */
  7345. get transforms() {
  7346. return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
  7347. }
  7348. /**
  7349. * Image pyramids & scale-space
  7350. * @returns {SpeedyProgramGroupPyramids}
  7351. */
  7352. get pyramids() {
  7353. return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
  7354. }
  7355. /**
  7356. * Keypoint detection & description
  7357. * @returns {SpeedyProgramGroupKeypoints}
  7358. */
  7359. get keypoints() {
  7360. return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
  7361. }
  7362. /**
  7363. * Utility programs
  7364. * @returns {SpeedyProgramGroupUtils}
  7365. */
  7366. get utils() {
  7367. return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
  7368. }
  7369. /**
  7370. * Release all programs from all groups. You'll
  7371. * no longer be able to use any of them.
  7372. * @returns {null}
  7373. */
  7374. release() {
  7375. for (const key in this) {
  7376. if (Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
  7377. const group = this[key];
  7378. if (group instanceof SpeedyProgramGroup) group.release();
  7379. }
  7380. }
  7381. return null;
  7382. }
  7383. }
  7384. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
  7385. /*
  7386. * speedy-vision.js
  7387. * GPU-accelerated Computer Vision for JavaScript
  7388. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7389. *
  7390. * Licensed under the Apache License, Version 2.0 (the "License");
  7391. * you may not use this file except in compliance with the License.
  7392. * You may obtain a copy of the License at
  7393. *
  7394. * http://www.apache.org/licenses/LICENSE-2.0
  7395. *
  7396. * Unless required by applicable law or agreed to in writing, software
  7397. * distributed under the License is distributed on an "AS IS" BASIS,
  7398. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7399. * See the License for the specific language governing permissions and
  7400. * limitations under the License.
  7401. *
  7402. * speedy-texture-pool.js
  7403. * Texture pool
  7404. */
  7405. // Constants
  7406. const DEFAULT_CAPACITY = 1024;
  7407. const BUCKET = Symbol('Bucket');
  7408. /*
  7409. === Heuristics to figure out the capacity of a texture pool ===
  7410. 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
  7411. 2. Figure out the average texture size in your application (say, 640x360 pixels).
  7412. 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
  7413. uses 4 bytes (RGBA format).
  7414. 4. Divide the maximum amount of VRAM by the average texture size in bytes
  7415. (say, 72). That's the capacity of the pool.
  7416. Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
  7417. Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  7418. */
  7419. /**
  7420. * @typedef {number} TextureBucketIndex index of a bucket in a pool
  7421. */
  7422. /**
  7423. * A bucket
  7424. */
  7425. class TextureBucket {
  7426. /**
  7427. * Constructor
  7428. * @param {SpeedyDrawableTexture} texture managed texture
  7429. * @param {TextureBucketIndex} index index of this bucket
  7430. * @param {TextureBucketIndex} next index of the next bucket
  7431. */
  7432. constructor(texture, index, next) {
  7433. /** @type {SpeedyDrawableTexture} managed texture */
  7434. this.texture = texture;
  7435. /** @type {TextureBucketIndex} index of this bucket */
  7436. this.index = index;
  7437. /** @type {TextureBucketIndex} index of the next bucket */
  7438. this.next = next;
  7439. /** @type {boolean} whether the texture is available or not */
  7440. this.free = true;
  7441. }
  7442. }
  7443. /**
  7444. * Texture pool
  7445. */
  7446. class SpeedyTexturePool {
  7447. /**
  7448. * Constructor
  7449. * @param {SpeedyGPU} gpu
  7450. * @param {number} [capacity] number of textures in the pool
  7451. */
  7452. constructor(gpu, capacity = DEFAULT_CAPACITY) {
  7453. utils/* Utils */.A.assert(capacity > 0);
  7454. /** @type {TextureBucket[]} buckets */
  7455. this._bucket = Array.from({
  7456. length: capacity
  7457. }, (_, i) => new TextureBucket(null, i, i - 1));
  7458. /** @type {TextureBucketIndex} index of an available bucket */
  7459. this._head = capacity - 1;
  7460. /** @type {SpeedyGPU} GPU instance */
  7461. this._gpu = gpu;
  7462. }
  7463. /**
  7464. * Get a texture from the pool
  7465. * @returns {SpeedyDrawableTexture}
  7466. */
  7467. allocate() {
  7468. if (this._head < 0) throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
  7469. const bucket = this._bucket[this._head];
  7470. bucket.free = false;
  7471. this._head = bucket.next;
  7472. if (bucket.texture == null)
  7473. // lazy instantiation
  7474. bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
  7475. return bucket.texture;
  7476. }
  7477. /**
  7478. * Put a texture back in the pool
  7479. * @param {SpeedyDrawableTexture} texture
  7480. * @returns {null}
  7481. */
  7482. free(texture) {
  7483. const bucket = texture[BUCKET];
  7484. utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
  7485. bucket.next = this._head;
  7486. bucket.free = true;
  7487. this._head = bucket.index;
  7488. return null;
  7489. }
  7490. /**
  7491. * Release the texture pool
  7492. * @returns {null}
  7493. */
  7494. release() {
  7495. for (let i = 0; i < this._bucket.length; i++) {
  7496. if (this._bucket[i].texture != null) this._bucket[i].texture = this._bucket[i].texture.release();
  7497. }
  7498. return null;
  7499. }
  7500. /**
  7501. * Create a texture with a reference to a bucket
  7502. * @param {WebGL2RenderingContext} gl
  7503. * @param {TextureBucket} bucket
  7504. * @returns {SpeedyDrawableTexture}
  7505. */
  7506. static _createManagedTexture(gl, bucket) {
  7507. const texture = new SpeedyDrawableTexture(gl, 1, 1);
  7508. return Object.defineProperty(texture, BUCKET, {
  7509. configurable: false,
  7510. enumerable: false,
  7511. writable: false,
  7512. value: bucket
  7513. });
  7514. }
  7515. }
  7516. // EXTERNAL MODULE: ./src/utils/types.js
  7517. var types = __nested_webpack_require_314174__(6049);
  7518. ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
  7519. /*
  7520. * speedy-vision.js
  7521. * GPU-accelerated Computer Vision for JavaScript
  7522. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7523. *
  7524. * Licensed under the Apache License, Version 2.0 (the "License");
  7525. * you may not use this file except in compliance with the License.
  7526. * You may obtain a copy of the License at
  7527. *
  7528. * http://www.apache.org/licenses/LICENSE-2.0
  7529. *
  7530. * Unless required by applicable law or agreed to in writing, software
  7531. * distributed under the License is distributed on an "AS IS" BASIS,
  7532. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7533. * See the License for the specific language governing permissions and
  7534. * limitations under the License.
  7535. *
  7536. * speedy-media-source.js
  7537. * Wrappers around <img>, <video>, <canvas>, etc.
  7538. */
  7539. /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
  7540. /** Internal token for protected constructors */
  7541. const PRIVATE_TOKEN = Symbol();
  7542. /**
  7543. * An abstract media source: a wrapper around native
  7544. * elements such as: HTMLImageElement, HTMLVideoElement,
  7545. * and so on
  7546. * @abstract
  7547. */
  7548. class SpeedyMediaSource {
  7549. /**
  7550. * @protected Constructor
  7551. * @param {symbol} token
  7552. */
  7553. constructor(token) {
  7554. // the constructor is not public
  7555. if (token !== PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  7556. /** @type {SpeedyMediaSourceNativeElement} underlying media object */
  7557. this._data = null;
  7558. }
  7559. /**
  7560. * Load a media source
  7561. * @param {SpeedyMediaSourceNativeElement} wrappedObject
  7562. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7563. */
  7564. static load(wrappedObject) {
  7565. if (wrappedObject instanceof HTMLImageElement) return SpeedyImageMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLVideoElement) return SpeedyVideoMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLCanvasElement) return SpeedyCanvasMediaSource.load(wrappedObject);else if (typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas) return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageBitmap) return SpeedyBitmapMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageData) return SpeedyDataMediaSource.load(wrappedObject);else throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
  7566. }
  7567. /**
  7568. * The underlying wrapped object
  7569. * @returns {SpeedyMediaSourceNativeElement}
  7570. */
  7571. get data() {
  7572. return this._data;
  7573. }
  7574. /**
  7575. * Is the underlying media loaded?
  7576. * @returns {boolean}
  7577. */
  7578. isLoaded() {
  7579. return this._data !== null;
  7580. }
  7581. /**
  7582. * The type of the underlying media source
  7583. * @abstract
  7584. * @returns {MediaType}
  7585. */
  7586. get type() {
  7587. throw new utils_errors/* AbstractMethodError */.aQ();
  7588. }
  7589. /**
  7590. * Media width, in pixels
  7591. * @abstract
  7592. * @returns {number}
  7593. */
  7594. get width() {
  7595. throw new utils_errors/* AbstractMethodError */.aQ();
  7596. }
  7597. /**
  7598. * Media height, in pixels
  7599. * @abstract
  7600. * @returns {number}
  7601. */
  7602. get height() {
  7603. throw new utils_errors/* AbstractMethodError */.aQ();
  7604. }
  7605. /**
  7606. * Clone this media source
  7607. * @abstract
  7608. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7609. */
  7610. clone() {
  7611. throw new utils_errors/* AbstractMethodError */.aQ();
  7612. }
  7613. /**
  7614. * Release resources associated with this object
  7615. * @returns {null}
  7616. */
  7617. release() {
  7618. return this._data = null;
  7619. }
  7620. /**
  7621. * Load the underlying media
  7622. * @abstract
  7623. * @param {SpeedyMediaSourceNativeElement} element
  7624. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7625. */
  7626. _load(element) {
  7627. throw new utils_errors/* AbstractMethodError */.aQ();
  7628. }
  7629. /**
  7630. * Wait for an event to be triggered in an element
  7631. * @param {Element} element
  7632. * @param {string} eventName
  7633. * @param {number} [timeout] in ms
  7634. * @returns {SpeedyPromise<Element>}
  7635. */
  7636. static _waitUntil(element, eventName, timeout = 30000) {
  7637. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7638. utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
  7639. const timer = setTimeout(() => {
  7640. clear();
  7641. reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
  7642. }, timeout);
  7643. function clear() {
  7644. clearTimeout(timer);
  7645. element.removeEventListener('error', handleError, false);
  7646. element.removeEventListener(eventName, handleSuccess, false);
  7647. }
  7648. function handleError() {
  7649. const hasError = element.error !== null && typeof element.error === 'object';
  7650. const error = hasError ? element.error : {
  7651. code: -1,
  7652. message: ''
  7653. };
  7654. const info = `${error.message} (error code ${error.code})`;
  7655. clear();
  7656. reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
  7657. }
  7658. function handleSuccess() {
  7659. clear();
  7660. resolve(element);
  7661. }
  7662. element.addEventListener('error', handleError, false);
  7663. element.addEventListener(eventName, handleSuccess, false);
  7664. });
  7665. }
  7666. }
  7667. /**
  7668. * Image media source:
  7669. * a wrapper around HTMLImageElement
  7670. */
  7671. class SpeedyImageMediaSource extends SpeedyMediaSource {
  7672. /**
  7673. * @private Constructor
  7674. * @param {symbol} token
  7675. */
  7676. constructor(token) {
  7677. super(token);
  7678. /** @type {HTMLImageElement} image element */
  7679. this._data = null;
  7680. }
  7681. /**
  7682. * The underlying wrapped object
  7683. * @returns {HTMLImageElement}
  7684. */
  7685. get data() {
  7686. return this._data;
  7687. }
  7688. /**
  7689. * The type of the underlying media source
  7690. * @returns {MediaType}
  7691. */
  7692. get type() {
  7693. return types/* MediaType */.zu.Image;
  7694. }
  7695. /**
  7696. * Media width, in pixels
  7697. * @returns {number}
  7698. */
  7699. get width() {
  7700. return this._data ? this._data.naturalWidth : 0;
  7701. }
  7702. /**
  7703. * Media height, in pixels
  7704. * @returns {number}
  7705. */
  7706. get height() {
  7707. return this._data ? this._data.naturalHeight : 0;
  7708. }
  7709. /**
  7710. * Clone this media source
  7711. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7712. */
  7713. clone() {
  7714. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7715. const newNode = /** @type {HTMLImageElement} */this._data.cloneNode(true);
  7716. return SpeedyImageMediaSource.load(newNode);
  7717. }
  7718. /**
  7719. * Load the underlying media
  7720. * @param {HTMLImageElement} image
  7721. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7722. */
  7723. _load(image) {
  7724. if (this.isLoaded()) this.release();
  7725. if (image.complete && image.naturalWidth !== 0) {
  7726. // already loaded?
  7727. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7728. this._data = image;
  7729. resolve(this);
  7730. });
  7731. } else {
  7732. return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
  7733. this._data = image;
  7734. return this;
  7735. });
  7736. }
  7737. }
  7738. /**
  7739. * Load the underlying media
  7740. * @param {HTMLImageElement} image
  7741. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7742. */
  7743. static load(image) {
  7744. return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
  7745. }
  7746. }
  7747. /**
  7748. * Video media source:
  7749. * a wrapper around HTMLVideoElement
  7750. */
  7751. class SpeedyVideoMediaSource extends SpeedyMediaSource {
  7752. /**
  7753. * @private Constructor
  7754. * @param {symbol} token
  7755. */
  7756. constructor(token) {
  7757. super(token);
  7758. /** @type {HTMLVideoElement} video element */
  7759. this._data = null;
  7760. }
  7761. /**
  7762. * The underlying wrapped object
  7763. * @returns {HTMLVideoElement}
  7764. */
  7765. get data() {
  7766. return this._data;
  7767. }
  7768. /**
  7769. * The type of the underlying media source
  7770. * @returns {MediaType}
  7771. */
  7772. get type() {
  7773. return types/* MediaType */.zu.Video;
  7774. }
  7775. /**
  7776. * Media width, in pixels
  7777. * @returns {number}
  7778. */
  7779. get width() {
  7780. // Warning: videoWidth & videoHeight may change at any time !!!
  7781. // so you can't cache these dimensions
  7782. return this._data ? this._data.videoWidth : 0;
  7783. }
  7784. /**
  7785. * Media height, in pixels
  7786. * @returns {number}
  7787. */
  7788. get height() {
  7789. return this._data ? this._data.videoHeight : 0;
  7790. }
  7791. /**
  7792. * Clone this media source
  7793. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7794. */
  7795. clone() {
  7796. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7797. const newNode = /** @type {HTMLVideoElement} */this._data.cloneNode(true);
  7798. return SpeedyVideoMediaSource.load(newNode);
  7799. }
  7800. /**
  7801. * Load the underlying media
  7802. * @param {HTMLVideoElement} video
  7803. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7804. */
  7805. _load(video) {
  7806. if (this.isLoaded()) this.release();
  7807. utils/* Utils */.A.log('Loading a video...');
  7808. video.load();
  7809. return SpeedyVideoMediaSource._waitUntilPlayable(video).then(() => {
  7810. return SpeedyVideoMediaSource._handleAutoplay(video).then(() => {
  7811. this._data = video;
  7812. return this;
  7813. });
  7814. });
  7815. }
  7816. /**
  7817. * Load the underlying media
  7818. * @param {HTMLVideoElement} video
  7819. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7820. */
  7821. static load(video) {
  7822. return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
  7823. }
  7824. /**
  7825. * Handle browser quirks concerning autoplay
  7826. * @param {HTMLVideoElement} video
  7827. * @returns {SpeedyPromise<void>} gets rejected if we can't autoplay
  7828. */
  7829. static _handleAutoplay(video) {
  7830. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  7831. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  7832. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  7833. // videos marked as autoplay may not play if not visible on-screen
  7834. // videos marked as autoplay should be muted
  7835. if (video.autoplay /*&& video.muted*/) {
  7836. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7837. const promise = video.play();
  7838. // handle older browsers
  7839. if (promise === undefined) {
  7840. resolve();
  7841. return;
  7842. }
  7843. // wrap promise
  7844. promise.then(resolve, reject);
  7845. });
  7846. }
  7847. // nothing to do
  7848. return speedy_promise/* SpeedyPromise */.i.resolve();
  7849. }
  7850. /**
  7851. * Wait for the input video to be playable
  7852. * @param {HTMLVideoElement} video
  7853. * @returns {SpeedyPromise<HTMLVideoElement>} resolves to the input video when it can be played
  7854. */
  7855. static _waitUntilPlayable(video) {
  7856. const TIMEOUT = 30000,
  7857. INTERVAL = 500;
  7858. if (video.readyState >= 3) return speedy_promise/* SpeedyPromise */.i.resolve(video);
  7859. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7860. let ms = 0,
  7861. t = setInterval(() => {
  7862. //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
  7863. if (video.readyState >= 3) {
  7864. clearInterval(t);
  7865. resolve(video);
  7866. } else if ((ms += INTERVAL) >= TIMEOUT) {
  7867. clearInterval(t);
  7868. reject(new utils_errors/* TimeoutError */.MU('The video took too long to load'));
  7869. }
  7870. }, INTERVAL);
  7871. });
  7872. }
  7873. }
  7874. /**
  7875. * Canvas media source:
  7876. * a wrapper around HTMLCanvasElement
  7877. */
  7878. class SpeedyCanvasMediaSource extends SpeedyMediaSource {
  7879. /**
  7880. * @private Constructor
  7881. * @param {symbol} token
  7882. */
  7883. constructor(token) {
  7884. super(token);
  7885. /** @type {HTMLCanvasElement} canvas element */
  7886. this._data = null;
  7887. }
  7888. /**
  7889. * The underlying wrapped object
  7890. * @returns {HTMLCanvasElement}
  7891. */
  7892. get data() {
  7893. return this._data;
  7894. }
  7895. /**
  7896. * The type of the underlying media source
  7897. * @returns {MediaType}
  7898. */
  7899. get type() {
  7900. return types/* MediaType */.zu.Canvas;
  7901. }
  7902. /**
  7903. * Media width, in pixels
  7904. * @returns {number}
  7905. */
  7906. get width() {
  7907. return this._data ? this._data.width : 0;
  7908. }
  7909. /**
  7910. * Media height, in pixels
  7911. * @returns {number}
  7912. */
  7913. get height() {
  7914. return this._data ? this._data.height : 0;
  7915. }
  7916. /**
  7917. * Clone this media source
  7918. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7919. */
  7920. clone() {
  7921. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7922. const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
  7923. const newContext = newCanvas.getContext('2d');
  7924. newContext.drawImage(this._data, 0, 0);
  7925. return SpeedyCanvasMediaSource.load(newCanvas);
  7926. }
  7927. /**
  7928. * Load the underlying media
  7929. * @param {HTMLCanvasElement} canvas
  7930. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7931. */
  7932. _load(canvas) {
  7933. if (this.isLoaded()) this.release();
  7934. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7935. this._data = canvas;
  7936. resolve(this);
  7937. });
  7938. }
  7939. /**
  7940. * Load the underlying media
  7941. * @param {HTMLCanvasElement} canvas
  7942. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7943. */
  7944. static load(canvas) {
  7945. return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
  7946. }
  7947. }
  7948. /**
  7949. * OffscreenCanvas media source:
  7950. * a wrapper around OffscreenCanvas
  7951. */
  7952. class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource {
  7953. /**
  7954. * @private Constructor
  7955. * @param {symbol} token
  7956. */
  7957. constructor(token) {
  7958. super(token);
  7959. /** @type {OffscreenCanvas} offscreen canvas element */
  7960. this._data = null;
  7961. }
  7962. /**
  7963. * The underlying wrapped object
  7964. * @returns {OffscreenCanvas}
  7965. */
  7966. get data() {
  7967. return this._data;
  7968. }
  7969. /**
  7970. * The type of the underlying media source
  7971. * @returns {MediaType}
  7972. */
  7973. get type() {
  7974. return types/* MediaType */.zu.OffscreenCanvas;
  7975. }
  7976. /**
  7977. * Media width, in pixels
  7978. * @returns {number}
  7979. */
  7980. get width() {
  7981. return this._data ? this._data.width : 0;
  7982. }
  7983. /**
  7984. * Media height, in pixels
  7985. * @returns {number}
  7986. */
  7987. get height() {
  7988. return this._data ? this._data.height : 0;
  7989. }
  7990. /**
  7991. * Clone this media source
  7992. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7993. */
  7994. clone() {
  7995. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7996. const newCanvas = new OffscreenCanvas(this.width, this.height);
  7997. const newContext = newCanvas.getContext('2d');
  7998. newContext.drawImage(this._data, 0, 0);
  7999. return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
  8000. }
  8001. /**
  8002. * Load the underlying media
  8003. * @param {OffscreenCanvas} offscreenCanvas
  8004. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8005. */
  8006. _load(offscreenCanvas) {
  8007. if (this.isLoaded()) this.release();
  8008. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8009. this._data = offscreenCanvas;
  8010. resolve(this);
  8011. });
  8012. }
  8013. /**
  8014. * Load the underlying media
  8015. * @param {OffscreenCanvas} offscreenCanvas
  8016. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8017. */
  8018. static load(offscreenCanvas) {
  8019. return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
  8020. }
  8021. }
  8022. /**
  8023. * Bitmap media source:
  8024. * a wrapper around ImageBitmap
  8025. */
  8026. class SpeedyBitmapMediaSource extends SpeedyMediaSource {
  8027. /**
  8028. * @private Constructor
  8029. * @param {symbol} token
  8030. */
  8031. constructor(token) {
  8032. super(token);
  8033. /** @type {ImageBitmap} image bitmap */
  8034. this._data = null;
  8035. }
  8036. /**
  8037. * The underlying wrapped object
  8038. * @returns {ImageBitmap}
  8039. */
  8040. get data() {
  8041. return this._data;
  8042. }
  8043. /**
  8044. * The type of the underlying media source
  8045. * @returns {MediaType}
  8046. */
  8047. get type() {
  8048. return types/* MediaType */.zu.Bitmap;
  8049. }
  8050. /**
  8051. * Media width, in pixels
  8052. * @returns {number}
  8053. */
  8054. get width() {
  8055. return this._data ? this._data.width : 0;
  8056. }
  8057. /**
  8058. * Media height, in pixels
  8059. * @returns {number}
  8060. */
  8061. get height() {
  8062. return this._data ? this._data.height : 0;
  8063. }
  8064. /**
  8065. * Clone this media source
  8066. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8067. */
  8068. clone() {
  8069. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8070. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  8071. createImageBitmap(this._data).then(newBitmap => {
  8072. const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
  8073. newSource._load(newBitmap).then(resolve, reject);
  8074. }, reject);
  8075. });
  8076. }
  8077. /**
  8078. * Release resources associated with this object
  8079. * @returns {null}
  8080. */
  8081. release() {
  8082. if (this._data != null) this._data.close();
  8083. return super.release();
  8084. }
  8085. /**
  8086. * Load the underlying media
  8087. * @param {ImageBitmap} bitmap
  8088. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8089. */
  8090. _load(bitmap) {
  8091. if (this.isLoaded()) this.release();
  8092. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8093. this._data = bitmap;
  8094. resolve(this);
  8095. });
  8096. }
  8097. /**
  8098. * Load the underlying media
  8099. * @param {ImageBitmap} bitmap
  8100. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8101. */
  8102. static load(bitmap) {
  8103. return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
  8104. }
  8105. }
  8106. /**
  8107. * Data media source:
  8108. * a wrapper around ImageData
  8109. */
  8110. class SpeedyDataMediaSource extends SpeedyMediaSource {
  8111. /**
  8112. * @private Constructor
  8113. * @param {symbol} token
  8114. */
  8115. constructor(token) {
  8116. super(token);
  8117. /** @type {ImageData} image data */
  8118. this._data = null;
  8119. }
  8120. /**
  8121. * The underlying wrapped object
  8122. * @returns {ImageData}
  8123. */
  8124. get data() {
  8125. return this._data;
  8126. }
  8127. /**
  8128. * The type of the underlying media source
  8129. * @returns {MediaType}
  8130. */
  8131. get type() {
  8132. return types/* MediaType */.zu.Data;
  8133. }
  8134. /**
  8135. * Media width, in pixels
  8136. * @returns {number}
  8137. */
  8138. get width() {
  8139. return this._data ? this._data.width : 0;
  8140. }
  8141. /**
  8142. * Media height, in pixels
  8143. * @returns {number}
  8144. */
  8145. get height() {
  8146. return this._data ? this._data.height : 0;
  8147. }
  8148. /**
  8149. * Clone this media source
  8150. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8151. */
  8152. clone() {
  8153. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8154. const imageDataCopy = new ImageData(new Uint8ClampedArray(this._data.data), this._data.width, this._data.height);
  8155. return SpeedyDataMediaSource.load(imageDataCopy);
  8156. }
  8157. /**
  8158. * Load the underlying media
  8159. * @param {ImageData} imageData
  8160. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8161. */
  8162. _load(imageData) {
  8163. if (this.isLoaded()) this.release();
  8164. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8165. this._data = imageData;
  8166. resolve(this);
  8167. });
  8168. }
  8169. /**
  8170. * Load the underlying media
  8171. * @param {ImageData} imageData
  8172. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8173. */
  8174. static load(imageData) {
  8175. return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
  8176. }
  8177. }
  8178. // EXTERNAL MODULE: ./src/utils/observable.js
  8179. var observable = __nested_webpack_require_314174__(3211);
  8180. ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
  8181. /*
  8182. * speedy-vision.js
  8183. * GPU-accelerated Computer Vision for JavaScript
  8184. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8185. *
  8186. * Licensed under the Apache License, Version 2.0 (the "License");
  8187. * you may not use this file except in compliance with the License.
  8188. * You may obtain a copy of the License at
  8189. *
  8190. * http://www.apache.org/licenses/LICENSE-2.0
  8191. *
  8192. * Unless required by applicable law or agreed to in writing, software
  8193. * distributed under the License is distributed on an "AS IS" BASIS,
  8194. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8195. * See the License for the specific language governing permissions and
  8196. * limitations under the License.
  8197. *
  8198. * speedy-gpu.js
  8199. * GPU-accelerated routines for Computer Vision
  8200. */
  8201. /**
  8202. * GPU-accelerated routines for Computer Vision
  8203. */
  8204. class SpeedyGPU extends observable/* Observable */.c {
  8205. /**
  8206. * Constructor
  8207. */
  8208. constructor() {
  8209. super();
  8210. /** @type {SpeedyGL} cached reference */
  8211. this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
  8212. /** @type {SpeedyProgramCenter} GPU-based programs */
  8213. this._programs = new SpeedyProgramCenter(this);
  8214. /** @type {SpeedyTexturePool} texture pool */
  8215. this._texturePool = new SpeedyTexturePool(this);
  8216. // recreate the state if necessary
  8217. this._speedyGL.subscribe(this._reset, this);
  8218. }
  8219. /**
  8220. * Access point to all GPU programs
  8221. * @returns {SpeedyProgramCenter}
  8222. */
  8223. get programs() {
  8224. return this._programs;
  8225. }
  8226. /**
  8227. * The WebGL Rendering Context
  8228. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  8229. * @returns {WebGL2RenderingContext}
  8230. */
  8231. get gl() {
  8232. return this._speedyGL.gl;
  8233. }
  8234. /**
  8235. * Internal canvas
  8236. * @returns {HTMLCanvasElement}
  8237. */
  8238. get canvas() {
  8239. return this._speedyGL.canvas;
  8240. }
  8241. /**
  8242. * Texture pool
  8243. * @returns {SpeedyTexturePool}
  8244. */
  8245. get texturePool() {
  8246. return this._texturePool;
  8247. }
  8248. /**
  8249. * Renders a texture to the canvas
  8250. * @param {SpeedyTexture} texture
  8251. * @returns {HTMLCanvasElement} returned for convenience
  8252. */
  8253. renderToCanvas(texture) {
  8254. const width = texture.width;
  8255. const height = texture.height;
  8256. const canvas = this.canvas;
  8257. // do we need to resize the canvas?
  8258. if (width > canvas.width || height > canvas.height) {
  8259. utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
  8260. canvas.width = width;
  8261. canvas.height = height;
  8262. }
  8263. // render
  8264. this.programs.utils.renderToCanvas.outputs(width, height, null);
  8265. this.programs.utils.renderToCanvas(texture);
  8266. // done!
  8267. return canvas;
  8268. }
  8269. /**
  8270. * Upload an image to the GPU
  8271. * @param {SpeedyMediaSource} source
  8272. * @param {SpeedyTexture} outputTexture
  8273. * @returns {SpeedyTexture} outputTexture
  8274. */
  8275. upload(source, outputTexture) {
  8276. return outputTexture.upload(source.data, source.width, source.height);
  8277. }
  8278. /**
  8279. * Releases resources
  8280. * @returns {null}
  8281. */
  8282. release() {
  8283. utils/* Utils */.A.assert(!this.isReleased());
  8284. // release internal components
  8285. this._programs = this._programs.release();
  8286. this._texturePool = this._texturePool.release();
  8287. // unsubscribe
  8288. this._speedyGL.unsubscribe(this._reset);
  8289. return null;
  8290. }
  8291. /**
  8292. * Has this SpeedyGPU been released?
  8293. * @returns {boolean}
  8294. */
  8295. isReleased() {
  8296. return this._programs == null;
  8297. }
  8298. /**
  8299. * Lose & restore the WebGL context (useful for testing purposes)
  8300. * @return {SpeedyPromise<void>} resolves as soon as the context is restored
  8301. */
  8302. loseAndRestoreWebGLContext() {
  8303. return this._speedyGL.loseAndRestoreContext().then(() => void 0);
  8304. }
  8305. /**
  8306. * Reset the internal state
  8307. * (called on context reset)
  8308. */
  8309. _reset() {
  8310. if (this.isReleased()) return;
  8311. this._programs = new SpeedyProgramCenter(this);
  8312. this._texturePool = new SpeedyTexturePool(this);
  8313. this._notify();
  8314. }
  8315. }
  8316. ;// CONCATENATED MODULE: ./src/core/speedy-size.js
  8317. /*
  8318. * speedy-vision.js
  8319. * GPU-accelerated Computer Vision for JavaScript
  8320. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8321. *
  8322. * Licensed under the Apache License, Version 2.0 (the "License");
  8323. * you may not use this file except in compliance with the License.
  8324. * You may obtain a copy of the License at
  8325. *
  8326. * http://www.apache.org/licenses/LICENSE-2.0
  8327. *
  8328. * Unless required by applicable law or agreed to in writing, software
  8329. * distributed under the License is distributed on an "AS IS" BASIS,
  8330. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8331. * See the License for the specific language governing permissions and
  8332. * limitations under the License.
  8333. *
  8334. * speedy-size.js
  8335. * Size of a rectangle
  8336. */
  8337. /**
  8338. * Size of a rectangle
  8339. */
  8340. class SpeedySize {
  8341. /**
  8342. * Constructor
  8343. * @param {number} width non-negative number
  8344. * @param {number} height non-negative number
  8345. */
  8346. constructor(width, height) {
  8347. /** @type {number} width */
  8348. this._width = Math.max(0, +width);
  8349. /** @type {number} height */
  8350. this._height = Math.max(0, +height);
  8351. }
  8352. //
  8353. // ===== METHODS =====
  8354. //
  8355. /**
  8356. * Width
  8357. * @returns {number}
  8358. */
  8359. get width() {
  8360. return this._width;
  8361. }
  8362. /**
  8363. * Width
  8364. * @param {number} value
  8365. */
  8366. set width(value) {
  8367. this._width = Math.max(0, +value);
  8368. }
  8369. /**
  8370. * Height
  8371. * @returns {number}
  8372. */
  8373. get height() {
  8374. return this._height;
  8375. }
  8376. /**
  8377. * Height
  8378. * @param {number} value
  8379. */
  8380. set height(value) {
  8381. this._height = Math.max(0, +value);
  8382. }
  8383. /**
  8384. * Convert to string
  8385. * @returns {string}
  8386. */
  8387. toString() {
  8388. return `SpeedySize(${this.width}, ${this.height})`;
  8389. }
  8390. /**
  8391. * Is this size equal to anotherSize?
  8392. * @param {SpeedySize} anotherSize
  8393. * @returns {boolean}
  8394. */
  8395. equals(anotherSize) {
  8396. return this.width === anotherSize.width && this.height === anotherSize.height;
  8397. }
  8398. /**
  8399. * The area of the rectangle
  8400. * @returns {number}
  8401. */
  8402. area() {
  8403. return this.width * this.height;
  8404. }
  8405. }
  8406. ;// CONCATENATED MODULE: ./src/core/speedy-media.js
  8407. /*
  8408. * speedy-vision.js
  8409. * GPU-accelerated Computer Vision for JavaScript
  8410. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8411. *
  8412. * Licensed under the Apache License, Version 2.0 (the "License");
  8413. * you may not use this file except in compliance with the License.
  8414. * You may obtain a copy of the License at
  8415. *
  8416. * http://www.apache.org/licenses/LICENSE-2.0
  8417. *
  8418. * Unless required by applicable law or agreed to in writing, software
  8419. * distributed under the License is distributed on an "AS IS" BASIS,
  8420. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8421. * See the License for the specific language governing permissions and
  8422. * limitations under the License.
  8423. *
  8424. * speedy-media.js
  8425. * SpeedyMedia implementation
  8426. */
  8427. /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  8428. /**
  8429. * @typedef {object} SpeedyMediaOptions
  8430. * @property {ImageFormat} [format] default is RGBA
  8431. */
  8432. /** A helper used to keep the constructor of SpeedyMedia private */
  8433. const speedy_media_PRIVATE_TOKEN = Symbol();
  8434. /**
  8435. * SpeedyMedia encapsulates a media element
  8436. * (e.g., image, video, canvas)
  8437. */
  8438. class SpeedyMedia {
  8439. /**
  8440. * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
  8441. * @param {symbol} token
  8442. * @param {SpeedyMediaSource} source
  8443. * @param {SpeedyMediaOptions} [options] options object
  8444. */
  8445. constructor(token, source, options = {}) {
  8446. // private constructor
  8447. if (token !== speedy_media_PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  8448. /** @type {SpeedyMediaSource} media source */
  8449. this._source = source;
  8450. /** @type {ImageFormat} format */
  8451. this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
  8452. /** @type {SpeedyMediaOptions} options */
  8453. this._options = Object.freeze(Object.assign(Object.assign({}, options), {}, {
  8454. format: this._format
  8455. }));
  8456. // validate
  8457. if (!source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);else if (this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
  8458. }
  8459. /**
  8460. * Load a media source
  8461. * Will wait until the HTML media source is loaded
  8462. * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
  8463. * @param {SpeedyMediaOptions} [options] options object
  8464. * @param {boolean} [log] show log message?
  8465. * @returns {SpeedyPromise<SpeedyMedia>}
  8466. */
  8467. static load(mediaSource, options = {}, log = true) {
  8468. return SpeedyMediaSource.load(mediaSource).then(source => {
  8469. utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
  8470. // FIXME user could pass an invalid format in options if ImageFormat is made public
  8471. const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
  8472. // show log message
  8473. if (log) utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
  8474. // done!
  8475. return media;
  8476. });
  8477. }
  8478. /**
  8479. * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
  8480. * @returns {SpeedyMediaSourceNativeElement} the media element
  8481. */
  8482. get source() {
  8483. return this._source ? this._source.data : null;
  8484. }
  8485. /**
  8486. * The type of the media attached to this SpeedyMedia object
  8487. * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
  8488. */
  8489. get type() {
  8490. if (this.isReleased()) return 'unknown';
  8491. switch (this._source.type) {
  8492. case types/* MediaType */.zu.Image:
  8493. return 'image';
  8494. case types/* MediaType */.zu.Video:
  8495. return 'video';
  8496. case types/* MediaType */.zu.Canvas:
  8497. return 'canvas';
  8498. case types/* MediaType */.zu.OffscreenCanvas:
  8499. return 'offscreen-canvas';
  8500. case types/* MediaType */.zu.Bitmap:
  8501. return 'bitmap';
  8502. case types/* MediaType */.zu.Data:
  8503. return 'data';
  8504. default:
  8505. // this shouldn't happen
  8506. return 'unknown';
  8507. }
  8508. }
  8509. /**
  8510. * Gets the width of the media
  8511. * @returns {number} media width
  8512. */
  8513. get width() {
  8514. return this._source ? this._source.width : 0;
  8515. }
  8516. /**
  8517. * Gets the height of the media
  8518. * @returns {number} media height
  8519. */
  8520. get height() {
  8521. return this._source ? this._source.height : 0;
  8522. }
  8523. /**
  8524. * The size of this media, in pixels
  8525. * @returns {SpeedySize}
  8526. */
  8527. get size() {
  8528. return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
  8529. }
  8530. /**
  8531. * Returns a read-only object featuring advanced options
  8532. * related to this SpeedyMedia object
  8533. * @returns {SpeedyMediaOptions}
  8534. */
  8535. get options() {
  8536. return this._options;
  8537. }
  8538. /**
  8539. * Releases resources associated with this media
  8540. * @returns {null}
  8541. */
  8542. release() {
  8543. if (!this.isReleased()) {
  8544. utils/* Utils */.A.log('Releasing SpeedyMedia object...');
  8545. this._source = this._source.release();
  8546. }
  8547. return null;
  8548. }
  8549. /**
  8550. * Has this media been released?
  8551. * @returns {boolean}
  8552. */
  8553. isReleased() {
  8554. return this._source == null;
  8555. }
  8556. /**
  8557. * Clones the SpeedyMedia object
  8558. * @returns {SpeedyPromise<SpeedyMedia>} a clone object
  8559. */
  8560. clone() {
  8561. // has the media been released?
  8562. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
  8563. // clone the object
  8564. const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
  8565. // done!
  8566. return speedy_promise/* SpeedyPromise */.i.resolve(clone);
  8567. }
  8568. /**
  8569. * Converts the media to an ImageBitmap
  8570. * @returns {SpeedyPromise<ImageBitmap>}
  8571. */
  8572. toBitmap() {
  8573. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');else if (!this._source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');else if (this._source.type == types/* MediaType */.zu.Bitmap) return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);else return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
  8574. }
  8575. }
  8576. ;// CONCATENATED MODULE: ./src/core/speedy-platform.js
  8577. /*
  8578. * speedy-vision.js
  8579. * GPU-accelerated Computer Vision for JavaScript
  8580. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8581. *
  8582. * Licensed under the Apache License, Version 2.0 (the "License");
  8583. * you may not use this file except in compliance with the License.
  8584. * You may obtain a copy of the License at
  8585. *
  8586. * http://www.apache.org/licenses/LICENSE-2.0
  8587. *
  8588. * Unless required by applicable law or agreed to in writing, software
  8589. * distributed under the License is distributed on an "AS IS" BASIS,
  8590. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8591. * See the License for the specific language governing permissions and
  8592. * limitations under the License.
  8593. *
  8594. * speedy-platform.js
  8595. * Utilities to query information about the graphics driver
  8596. */
  8597. /**
  8598. * Utilities to query information about the graphics driver. This information
  8599. * may or may not be available, depending on the privacy settings of the web
  8600. * browser. In addition, it may be more or less accurate in different browsers.
  8601. */
  8602. class SpeedyPlatform extends speedy_namespace/* SpeedyNamespace */.Q {
  8603. /**
  8604. * Renderer string of the graphics driver
  8605. * @returns {string}
  8606. */
  8607. static get renderer() {
  8608. return speedy_gl/* SpeedyGL */.c.instance.renderer;
  8609. }
  8610. /**
  8611. * Vendor string of the graphics driver
  8612. * @returns {string}
  8613. */
  8614. static get vendor() {
  8615. return speedy_gl/* SpeedyGL */.c.instance.vendor;
  8616. }
  8617. }
  8618. ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
  8619. /*
  8620. * speedy-vision.js
  8621. * GPU-accelerated Computer Vision for JavaScript
  8622. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8623. *
  8624. * Licensed under the Apache License, Version 2.0 (the "License");
  8625. * you may not use this file except in compliance with the License.
  8626. * You may obtain a copy of the License at
  8627. *
  8628. * http://www.apache.org/licenses/LICENSE-2.0
  8629. *
  8630. * Unless required by applicable law or agreed to in writing, software
  8631. * distributed under the License is distributed on an "AS IS" BASIS,
  8632. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8633. * See the License for the specific language governing permissions and
  8634. * limitations under the License.
  8635. *
  8636. * speedy-vector.js
  8637. * Vectors
  8638. */
  8639. /**
  8640. * 2D vector of floating-point numbers
  8641. */
  8642. class SpeedyVector2 {
  8643. /**
  8644. * Create a 2D vector
  8645. * @param {number} x
  8646. * @param {number} y
  8647. */
  8648. constructor(x, y) {
  8649. /** @type {number} x coordinate */
  8650. this._x = +x;
  8651. /** @type {number} y coordinate */
  8652. this._y = +y;
  8653. }
  8654. //
  8655. // ===== METHODS =====
  8656. //
  8657. /**
  8658. * x-coordinate
  8659. * @returns {number}
  8660. */
  8661. get x() {
  8662. return this._x;
  8663. }
  8664. /**
  8665. * x-coordinate
  8666. * @param {number} value
  8667. */
  8668. set x(value) {
  8669. this._x = +value;
  8670. }
  8671. /**
  8672. * y-coordinate
  8673. * @returns {number}
  8674. */
  8675. get y() {
  8676. return this._y;
  8677. }
  8678. /**
  8679. * y-coordinate
  8680. * @param {number} value
  8681. */
  8682. set y(value) {
  8683. this._y = +value;
  8684. }
  8685. /**
  8686. * Convert to string
  8687. * @returns {string}
  8688. */
  8689. toString() {
  8690. return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8691. }
  8692. /**
  8693. * Is this vector equal to v?
  8694. * @param {SpeedyVector2} v
  8695. * @returns {boolean}
  8696. */
  8697. equals(v) {
  8698. return this.x === v.x && this.y === v.y;
  8699. }
  8700. /**
  8701. * Dot product between this vector and another vector
  8702. * @param {SpeedyVector2} v another vector
  8703. * @returns {number}
  8704. */
  8705. dot(v) {
  8706. return this.x * v.x + this.y * v.y;
  8707. }
  8708. /**
  8709. * The distance between this vector and another vector
  8710. * @param {SpeedyVector2} v another vector
  8711. * @returns {number}
  8712. */
  8713. distanceTo(v) {
  8714. const dx = this.x - v.x;
  8715. const dy = this.y - v.y;
  8716. return Math.sqrt(dx * dx + dy * dy);
  8717. }
  8718. /**
  8719. * Euclidean norm
  8720. * @returns {number}
  8721. */
  8722. length() {
  8723. return Math.sqrt(this.x * this.x + this.y * this.y);
  8724. }
  8725. /**
  8726. * Returns a normalized version of this vector
  8727. * @returns {SpeedyVector2}
  8728. */
  8729. normalized() {
  8730. const len = this.length();
  8731. if (len > 0.0) return new SpeedyVector2(this.x / len, this.y / len);else return new SpeedyVector2(0.0, 0.0);
  8732. }
  8733. /**
  8734. * Returns a copy of this vector translated by offset
  8735. * @param {SpeedyVector2} offset
  8736. * @returns {SpeedyVector2}
  8737. */
  8738. plus(offset) {
  8739. return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
  8740. }
  8741. /**
  8742. * Returns a copy of this vector translated by -offset
  8743. * @param {SpeedyVector2} offset
  8744. * @returns {SpeedyVector2}
  8745. */
  8746. minus(offset) {
  8747. return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
  8748. }
  8749. /**
  8750. * Returns a copy of this vector scaled by a scalar
  8751. * @param {number} scalar
  8752. * @returns {SpeedyVector2}
  8753. */
  8754. times(scalar) {
  8755. return new SpeedyVector2(this.x * scalar, this.y * scalar);
  8756. }
  8757. }
  8758. ;// CONCATENATED MODULE: ./src/core/speedy-point.js
  8759. /*
  8760. * speedy-vision.js
  8761. * GPU-accelerated Computer Vision for JavaScript
  8762. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8763. *
  8764. * Licensed under the Apache License, Version 2.0 (the "License");
  8765. * you may not use this file except in compliance with the License.
  8766. * You may obtain a copy of the License at
  8767. *
  8768. * http://www.apache.org/licenses/LICENSE-2.0
  8769. *
  8770. * Unless required by applicable law or agreed to in writing, software
  8771. * distributed under the License is distributed on an "AS IS" BASIS,
  8772. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8773. * See the License for the specific language governing permissions and
  8774. * limitations under the License.
  8775. *
  8776. * speedy-point.js
  8777. * Points in space
  8778. */
  8779. /**
  8780. * 2D point
  8781. */
  8782. class SpeedyPoint2 {
  8783. /**
  8784. * Create a 2D point
  8785. * @param {number} x
  8786. * @param {number} y
  8787. */
  8788. constructor(x, y) {
  8789. /** @type {number} x coordinate */
  8790. this._x = +x;
  8791. /** @type {number} y coordinate */
  8792. this._y = +y;
  8793. }
  8794. //
  8795. // ===== METHODS =====
  8796. //
  8797. /**
  8798. * x-coordinate
  8799. * @returns {number}
  8800. */
  8801. get x() {
  8802. return this._x;
  8803. }
  8804. /**
  8805. * x-coordinate
  8806. * @param {number} value
  8807. */
  8808. set x(value) {
  8809. this._x = +value;
  8810. }
  8811. /**
  8812. * y-coordinate
  8813. * @returns {number}
  8814. */
  8815. get y() {
  8816. return this._y;
  8817. }
  8818. /**
  8819. * y-coordinate
  8820. * @param {number} value
  8821. */
  8822. set y(value) {
  8823. this._y = +value;
  8824. }
  8825. /**
  8826. * Convert to string
  8827. * @returns {string}
  8828. */
  8829. toString() {
  8830. return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8831. }
  8832. /**
  8833. * Add a vector to this point
  8834. * @param {SpeedyVector2} v
  8835. * @returns {SpeedyPoint2}
  8836. */
  8837. plus(v) {
  8838. return new SpeedyPoint2(this.x + v.x, this.y + v.y);
  8839. }
  8840. /**
  8841. * Subtracts a point p from this point
  8842. * @param {SpeedyPoint2} p
  8843. * @returns {SpeedyVector2}
  8844. */
  8845. minus(p) {
  8846. return new SpeedyVector2(this.x - p.x, this.y - p.y);
  8847. }
  8848. /**
  8849. * Is this point equal to p?
  8850. * @param {SpeedyPoint2} p
  8851. * @returns {boolean}
  8852. */
  8853. equals(p) {
  8854. return this.x === p.x && this.y === p.y;
  8855. }
  8856. }
  8857. // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
  8858. var speedy_matrix_expr = __nested_webpack_require_314174__(6306);
  8859. // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
  8860. var speedy_matrix_wasm = __nested_webpack_require_314174__(6465);
  8861. // EXTERNAL MODULE: ./src/core/speedy-matrix.js
  8862. var speedy_matrix = __nested_webpack_require_314174__(4188);
  8863. ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
  8864. /*
  8865. * speedy-vision.js
  8866. * GPU-accelerated Computer Vision for JavaScript
  8867. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8868. *
  8869. * Licensed under the Apache License, Version 2.0 (the "License");
  8870. * you may not use this file except in compliance with the License.
  8871. * You may obtain a copy of the License at
  8872. *
  8873. * http://www.apache.org/licenses/LICENSE-2.0
  8874. *
  8875. * Unless required by applicable law or agreed to in writing, software
  8876. * distributed under the License is distributed on an "AS IS" BASIS,
  8877. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8878. * See the License for the specific language governing permissions and
  8879. * limitations under the License.
  8880. *
  8881. * speedy-matrix-factory.js
  8882. * A factory of matrices
  8883. */
  8884. /**
  8885. * Matrix routines
  8886. */
  8887. class SpeedyMatrixFactory extends Function {
  8888. /**
  8889. * Constructor
  8890. */
  8891. constructor() {
  8892. // This factory can be invoked as a function
  8893. super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
  8894. return this.bind(this);
  8895. }
  8896. /**
  8897. * @private
  8898. *
  8899. * Create a new matrix filled with the specified size and entries
  8900. * @param {number} rows
  8901. * @param {number} [columns]
  8902. * @param {number[]} [entries] in column-major format
  8903. * @returns {SpeedyMatrix}
  8904. */
  8905. _create(rows, columns = rows, entries = []) {
  8906. return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
  8907. }
  8908. /**
  8909. * @private
  8910. *
  8911. * Evaluate an expression synchronously and store the result in a new matrix
  8912. * @param {SpeedyMatrixExpr} expr matrix expression
  8913. * @returns {SpeedyMatrix}
  8914. */
  8915. _from(expr) {
  8916. return speedy_matrix.SpeedyMatrix.From(expr);
  8917. }
  8918. /**
  8919. * Create a new matrix filled with zeros with the specified size
  8920. * @param {number} rows
  8921. * @param {number} [columns]
  8922. * @returns {SpeedyMatrix}
  8923. */
  8924. Zeros(rows, columns = rows) {
  8925. return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
  8926. }
  8927. /**
  8928. * Create a new matrix filled with ones with the specified size
  8929. * @param {number} rows
  8930. * @param {number} [columns]
  8931. * @returns {SpeedyMatrix}
  8932. */
  8933. Ones(rows, columns = rows) {
  8934. return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
  8935. }
  8936. /**
  8937. * Create an identity matrix with the specified size
  8938. * @param {number} rows
  8939. * @param {number} [columns]
  8940. * @returns {SpeedyMatrix}
  8941. */
  8942. Eye(rows, columns = rows) {
  8943. return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
  8944. }
  8945. /**
  8946. * Returns a promise that resolves immediately if the WebAssembly routines
  8947. * are ready to be used, or as soon as they do become ready
  8948. * @returns {SpeedyPromise<void>}
  8949. */
  8950. ready() {
  8951. return speedy_matrix.SpeedyMatrix.ready();
  8952. }
  8953. /**
  8954. * QR decomposition
  8955. * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
  8956. * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
  8957. * @param {SpeedyMatrix} mat is m x n, input
  8958. * @param {object} [options]
  8959. * @param {'reduced'|'full'} [options.mode]
  8960. * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
  8961. */
  8962. qr(Q, R, mat, {
  8963. mode = 'reduced'
  8964. } = {}) {
  8965. const A = mat,
  8966. m = mat.rows,
  8967. n = mat.columns;
  8968. // validate shapes & mode
  8969. if (mode == 'reduced') {
  8970. if (Q.rows != m || Q.columns != n || R.rows != n || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
  8971. } else if (mode == 'full') {
  8972. if (Q.rows != m || Q.columns != m || R.rows != m || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
  8973. } else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
  8974. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  8975. wasm,
  8976. memory
  8977. }) => {
  8978. // allocate matrices
  8979. const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
  8980. const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
  8981. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  8982. // copy input matrices to WASM memory
  8983. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  8984. // run the WASM routine
  8985. if (mode == 'reduced') wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);else wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
  8986. // copy output matrices from WASM memory
  8987. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
  8988. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
  8989. // deallocate matrices
  8990. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  8991. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
  8992. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
  8993. // done!
  8994. return [Q, R];
  8995. });
  8996. }
  8997. /**
  8998. * Solve a possibly overdetermined system of linear
  8999. * equations Ax = b for x using ordinary least squares
  9000. * @param {SpeedyMatrix} solution n x 1, output
  9001. * @param {SpeedyMatrix} A m x n, m >= n, input
  9002. * @param {SpeedyMatrix} b m x 1, output
  9003. * @param {object} [options]
  9004. * @param {'qr'} [options.method] method of resolution
  9005. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9006. */
  9007. ols(solution, A, b, {
  9008. method = 'qr'
  9009. } = {}) {
  9010. const m = A.rows,
  9011. n = A.columns;
  9012. const x = solution;
  9013. // validate shapes
  9014. if (m < n || n == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9015. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9016. wasm,
  9017. memory
  9018. }) => {
  9019. // allocate matrices
  9020. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  9021. const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
  9022. const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
  9023. // copy input matrices to WASM memory
  9024. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  9025. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
  9026. // run the WASM routine
  9027. switch (method) {
  9028. case 'qr':
  9029. wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
  9030. break;
  9031. default:
  9032. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9033. }
  9034. // copy output matrix from WASM memory
  9035. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
  9036. // deallocate matrices
  9037. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
  9038. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
  9039. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  9040. // done!
  9041. return solution;
  9042. });
  9043. }
  9044. /**
  9045. * Solve a system of linear equations Ax = b for x
  9046. * @param {SpeedyMatrix} solution m x 1, output
  9047. * @param {SpeedyMatrix} A m x m, input
  9048. * @param {SpeedyMatrix} b m x 1, output
  9049. * @param {object} [options]
  9050. * @param {'qr'} [options.method] method of resolution
  9051. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9052. */
  9053. solve(solution, A, b, {
  9054. method = 'qr'
  9055. } = {}) {
  9056. const m = A.rows,
  9057. n = A.columns;
  9058. const x = solution;
  9059. // validate shapes
  9060. if (m != n) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9061. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9062. wasm,
  9063. memory
  9064. }) => {
  9065. // select method
  9066. switch (method) {
  9067. case 'qr':
  9068. return this.ols(x, A, b, {
  9069. method
  9070. });
  9071. /*case 'lu':
  9072. break;*/
  9073. default:
  9074. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9075. }
  9076. });
  9077. }
  9078. /**
  9079. * Compute a perspective transformation using 4 correspondences of points
  9080. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9081. * @param {SpeedyMatrix} src 2x4 input points - source coordinates
  9082. * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
  9083. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9084. */
  9085. perspective(homography, src, dest) {
  9086. // validate shapes
  9087. if (src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
  9088. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9089. wasm,
  9090. memory
  9091. }) => {
  9092. // allocate matrices
  9093. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9094. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9095. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9096. // copy input matrices to WASM memory
  9097. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9098. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9099. // run the WASM routine
  9100. wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
  9101. // copy output matrix from WASM memory
  9102. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9103. // deallocate matrices
  9104. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9105. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9106. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9107. // done!
  9108. return homography;
  9109. });
  9110. }
  9111. /**
  9112. * Compute a perspective transformation using n >= 4 correspondences of points
  9113. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9114. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9115. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9116. * @param {object} [options]
  9117. * @param {'default'|'pransac'} [options.method] method of computation
  9118. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9119. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9120. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9121. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9122. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9123. */
  9124. findHomography(homography, src, dest, {
  9125. method = 'default',
  9126. mask = null,
  9127. reprojectionError = 3,
  9128. numberOfHypotheses = 512,
  9129. bundleSize = 128
  9130. } = {}) {
  9131. // validate shapes
  9132. if (src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9133. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9134. wasm,
  9135. memory
  9136. }) => {
  9137. // allocate matrices
  9138. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9139. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9140. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9141. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9142. // copy input matrices to WASM memory
  9143. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9144. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9145. // run the WASM routine
  9146. switch (method) {
  9147. case 'pransac':
  9148. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9149. wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9150. break;
  9151. case 'default':
  9152. case 'dlt':
  9153. // obsolete
  9154. wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
  9155. break;
  9156. default:
  9157. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
  9158. }
  9159. // copy output matrices from WASM memory
  9160. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9161. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9162. // deallocate matrices
  9163. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9164. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9165. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9166. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9167. // done!
  9168. return homography;
  9169. });
  9170. }
  9171. /**
  9172. * Apply a perspective transformation to a set of 2D points
  9173. * @param {SpeedyMatrix} dest 2 x n output matrix
  9174. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9175. * @param {SpeedyMatrix} transform 3x3 homography matrix
  9176. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9177. */
  9178. applyPerspectiveTransform(dest, src, transform) {
  9179. // validate shapes
  9180. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 3 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
  9181. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9182. wasm,
  9183. memory
  9184. }) => {
  9185. // allocate matrices
  9186. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9187. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9188. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9189. // copy input matrices to WASM memory
  9190. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9191. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9192. // run the WASM routine
  9193. wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
  9194. // copy output matrix from WASM memory
  9195. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9196. // deallocate matrices
  9197. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9198. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9199. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9200. // done!
  9201. return dest;
  9202. });
  9203. }
  9204. /**
  9205. * Compute an affine transform using 3 correspondences of points
  9206. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9207. * @param {SpeedyMatrix} src 2x3 input points - source coordinates
  9208. * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
  9209. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9210. */
  9211. affine(transform, src, dest) {
  9212. // validate shapes
  9213. if (src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
  9214. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9215. wasm,
  9216. memory
  9217. }) => {
  9218. // allocate matrices
  9219. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9220. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9221. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9222. // copy input matrices to WASM memory
  9223. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9224. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9225. // run the WASM routine
  9226. wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
  9227. // copy output matrix from WASM memory
  9228. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9229. // deallocate matrices
  9230. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9231. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9232. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9233. // done!
  9234. return transform;
  9235. });
  9236. }
  9237. /**
  9238. * Compute an affine transformation using n >= 3 correspondences of points
  9239. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9240. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9241. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9242. * @param {object} [options]
  9243. * @param {'default'|'pransac'} [options.method] method of computation
  9244. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9245. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9246. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9247. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9248. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
  9249. */
  9250. findAffineTransform(transform, src, dest, {
  9251. method = 'default',
  9252. mask = null,
  9253. reprojectionError = 3,
  9254. numberOfHypotheses = 512,
  9255. bundleSize = 128
  9256. } = {}) {
  9257. // validate shapes
  9258. if (src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9259. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9260. wasm,
  9261. memory
  9262. }) => {
  9263. // allocate matrices
  9264. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9265. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9266. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9267. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9268. // copy input matrices to WASM memory
  9269. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9270. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9271. // run the WASM routine
  9272. switch (method) {
  9273. case 'pransac':
  9274. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9275. wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9276. break;
  9277. case 'default':
  9278. wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
  9279. break;
  9280. default:
  9281. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
  9282. }
  9283. // copy output matrices from WASM memory
  9284. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9285. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9286. // deallocate matrices
  9287. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9288. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9289. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9290. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9291. // done!
  9292. return transform;
  9293. });
  9294. }
  9295. /**
  9296. * Apply an affine transformation to a set of 2D points
  9297. * @param {SpeedyMatrix} dest 2 x n output matrix
  9298. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9299. * @param {SpeedyMatrix} transform 2x3 affine transform
  9300. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9301. */
  9302. applyAffineTransform(dest, src, transform) {
  9303. // validate shapes
  9304. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
  9305. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9306. wasm,
  9307. memory
  9308. }) => {
  9309. // allocate matrices
  9310. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9311. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9312. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9313. // copy input matrices to WASM memory
  9314. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9315. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9316. // run the WASM routine
  9317. wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
  9318. // copy output matrix from WASM memory
  9319. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9320. // deallocate matrices
  9321. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9322. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9323. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9324. // done!
  9325. return dest;
  9326. });
  9327. }
  9328. }
  9329. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
  9330. /*
  9331. * speedy-vision.js
  9332. * GPU-accelerated Computer Vision for JavaScript
  9333. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9334. *
  9335. * Licensed under the Apache License, Version 2.0 (the "License");
  9336. * you may not use this file except in compliance with the License.
  9337. * You may obtain a copy of the License at
  9338. *
  9339. * http://www.apache.org/licenses/LICENSE-2.0
  9340. *
  9341. * Unless required by applicable law or agreed to in writing, software
  9342. * distributed under the License is distributed on an "AS IS" BASIS,
  9343. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9344. * See the License for the specific language governing permissions and
  9345. * limitations under the License.
  9346. *
  9347. * pipeline-message.js
  9348. * A message that is shared between nodes of a pipeline
  9349. */
  9350. /**
  9351. * Types of messages
  9352. * @enum {Symbol}
  9353. */
  9354. const SpeedyPipelineMessageType = Object.freeze({
  9355. Nothing: Symbol('Nothing'),
  9356. Image: Symbol('Image'),
  9357. Keypoints: Symbol('Keypoints'),
  9358. Vector2: Symbol('Vector2'),
  9359. LSHTables: Symbol('LSHTables'),
  9360. KeypointMatches: Symbol('KeypointMatches')
  9361. });
  9362. /**
  9363. * Diagnostic data
  9364. * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
  9365. */
  9366. /**
  9367. * A message that is shared between nodes of a pipeline
  9368. * @abstract
  9369. */
  9370. class SpeedyPipelineMessage {
  9371. /**
  9372. * Constructor
  9373. * @param {SpeedyPipelineMessageType} type message type
  9374. */
  9375. constructor(type) {
  9376. /** @type {SpeedyPipelineMessageType} message type */
  9377. this._type = type;
  9378. }
  9379. /**
  9380. * Message type
  9381. * @returns {SpeedyPipelineMessageType}
  9382. */
  9383. get type() {
  9384. return this._type;
  9385. }
  9386. /**
  9387. * Checks if the type of this message is equal to parameter type
  9388. * @param {SpeedyPipelineMessageType} type
  9389. * @returns {boolean}
  9390. */
  9391. hasType(type) {
  9392. return this._type === type;
  9393. }
  9394. /**
  9395. * Is this an empty message?
  9396. * @returns {boolean}
  9397. */
  9398. isEmpty() {
  9399. return this.hasType(SpeedyPipelineMessageType.Nothing);
  9400. }
  9401. /**
  9402. * Convert to string
  9403. * @returns {string}
  9404. */
  9405. toString() {
  9406. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this.type);
  9407. return `message of type ${type}`;
  9408. }
  9409. /**
  9410. * Inspect this message for debugging purposes
  9411. * @param {SpeedyGPU} gpu
  9412. * @returns {SpeedyPipelineMessageDiagnosticData}
  9413. */
  9414. inspect(gpu) {
  9415. throw new utils_errors/* AbstractMethodError */.aQ();
  9416. }
  9417. /**
  9418. * Set parameters
  9419. * @abstract
  9420. * @param {...any} args
  9421. * @returns {SpeedyPipelineMessage} this message
  9422. */
  9423. set(...args) {
  9424. throw new utils_errors/* AbstractMethodError */.aQ();
  9425. }
  9426. /**
  9427. * Create a message of the specified type
  9428. * @param {SpeedyPipelineMessageType} type
  9429. * @returns {SpeedyPipelineMessage}
  9430. */
  9431. static create(type) {
  9432. return createMessage(type);
  9433. }
  9434. }
  9435. /**
  9436. * An empty message carrying nothing
  9437. */
  9438. class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage {
  9439. /**
  9440. * Constructor
  9441. */
  9442. constructor() {
  9443. super(SpeedyPipelineMessageType.Nothing);
  9444. }
  9445. /**
  9446. * Set parameters
  9447. * @returns {SpeedyPipelineMessage} this message
  9448. */
  9449. set() {
  9450. return this;
  9451. }
  9452. /**
  9453. * Inspect this message for debugging purposes
  9454. * @param {SpeedyGPU} gpu
  9455. * @returns {SpeedyPipelineMessageDiagnosticData}
  9456. */
  9457. inspect(gpu) {
  9458. return {
  9459. type: this.constructor.name
  9460. };
  9461. }
  9462. }
  9463. /**
  9464. * A message transporting an image
  9465. */
  9466. class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage {
  9467. /**
  9468. * Constructor
  9469. */
  9470. constructor() {
  9471. super(SpeedyPipelineMessageType.Image);
  9472. /** @type {SpeedyDrawableTexture} the image we carry */
  9473. this._image = null;
  9474. /** @type {ImageFormat} image format */
  9475. this._format = types/* ImageFormat */.f5.RGBA;
  9476. }
  9477. /**
  9478. * Set parameters
  9479. * @param {SpeedyDrawableTexture} image the image we carry
  9480. * @param {ImageFormat} [format] image format
  9481. * @returns {SpeedyPipelineMessage} this message
  9482. */
  9483. set(image, format = types/* ImageFormat */.f5.RGBA) {
  9484. // set parameters
  9485. this._image = image;
  9486. this._format = format;
  9487. // done!
  9488. return this;
  9489. }
  9490. /**
  9491. * Inspect this message for debugging purposes
  9492. * @param {SpeedyGPU} gpu
  9493. * @returns {SpeedyPipelineMessageDiagnosticData}
  9494. */
  9495. inspect(gpu) {
  9496. const formatName = Object.keys(types/* ImageFormat */.f5).find(format => types/* ImageFormat */.f5[format] === this.format);
  9497. return {
  9498. type: this.constructor.name,
  9499. format: String(formatName),
  9500. imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
  9501. image: this.image ? '<image data>' /* possibly MBs of data */ : '',
  9502. hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
  9503. };
  9504. }
  9505. /**
  9506. * The image we carry
  9507. * @returns {SpeedyDrawableTexture}
  9508. */
  9509. get image() {
  9510. return this._image;
  9511. }
  9512. /**
  9513. * Image format
  9514. * @returns {ImageFormat}
  9515. */
  9516. get format() {
  9517. return this._format;
  9518. }
  9519. }
  9520. /**
  9521. * A message transporting keypoints
  9522. */
  9523. class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage {
  9524. /**
  9525. * Constructor
  9526. */
  9527. constructor() {
  9528. super(SpeedyPipelineMessageType.Keypoints);
  9529. /** @type {SpeedyDrawableTexture} encoded keypoints */
  9530. this._encodedKeypoints = null;
  9531. /** @type {number} descriptor size in bytes */
  9532. this._descriptorSize = 0;
  9533. /** @type {number} extra size in bytes */
  9534. this._extraSize = 0;
  9535. /** @type {number} encoder length */
  9536. this._encoderLength = 1;
  9537. }
  9538. /**
  9539. * Set parameters
  9540. * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
  9541. * @param {number} descriptorSize in bytes
  9542. * @param {number} extraSize in bytes
  9543. * @param {number} encoderLength positive integer
  9544. * @returns {SpeedyPipelineMessage} this message
  9545. */
  9546. set(encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  9547. // set parameters
  9548. this._encodedKeypoints = encodedKeypoints;
  9549. this._descriptorSize = descriptorSize | 0;
  9550. this._extraSize = extraSize | 0;
  9551. this._encoderLength = encoderLength | 0;
  9552. // validate
  9553. utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
  9554. utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
  9555. utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
  9556. // done!
  9557. return this;
  9558. }
  9559. /**
  9560. * Inspect this message for debugging purposes
  9561. * @param {SpeedyGPU} gpu
  9562. * @returns {SpeedyPipelineMessageDiagnosticData}
  9563. */
  9564. inspect(gpu) {
  9565. return {
  9566. type: this.constructor.name,
  9567. descriptorSize: this.descriptorSize,
  9568. extraSize: this.extraSize,
  9569. encoderLength: this.encoderLength,
  9570. encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
  9571. encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : ''
  9572. };
  9573. }
  9574. /**
  9575. * Encoded keypoints
  9576. * @returns {SpeedyDrawableTexture}
  9577. */
  9578. get encodedKeypoints() {
  9579. return this._encodedKeypoints;
  9580. }
  9581. /**
  9582. * Descriptor size, in bytes
  9583. * @returns {number}
  9584. */
  9585. get descriptorSize() {
  9586. return this._descriptorSize;
  9587. }
  9588. /**
  9589. * Extra size, in bytes
  9590. * @returns {number}
  9591. */
  9592. get extraSize() {
  9593. return this._extraSize;
  9594. }
  9595. /**
  9596. * Encoder length
  9597. * @returns {number}
  9598. */
  9599. get encoderLength() {
  9600. return this._encoderLength;
  9601. }
  9602. }
  9603. /*
  9604. * A message transporting a set of 2D vectors
  9605. */
  9606. class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage {
  9607. /**
  9608. * Constructor
  9609. */
  9610. constructor() {
  9611. super(SpeedyPipelineMessageType.Vector2);
  9612. /** @type {SpeedyDrawableTexture} the set of vectors */
  9613. this._vectors = null;
  9614. }
  9615. /**
  9616. * Set parameters
  9617. * @param {SpeedyDrawableTexture} vectors the set of vectors
  9618. * @returns {SpeedyPipelineMessage} this message
  9619. */
  9620. set(vectors) {
  9621. // set parameters
  9622. this._vectors = vectors;
  9623. // done!
  9624. return this;
  9625. }
  9626. /**
  9627. * Inspect this message for debugging purposes
  9628. * @param {SpeedyGPU} gpu
  9629. * @returns {SpeedyPipelineMessageDiagnosticData}
  9630. */
  9631. inspect(gpu) {
  9632. return {
  9633. type: this.constructor.name,
  9634. vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
  9635. vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
  9636. };
  9637. }
  9638. /**
  9639. * The set of vectors
  9640. * @returns {SpeedyDrawableTexture}
  9641. */
  9642. get vectors() {
  9643. return this._vectors;
  9644. }
  9645. }
  9646. /**
  9647. * A message transporting LSH tables
  9648. */
  9649. class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage {
  9650. /**
  9651. * Constructor
  9652. */
  9653. constructor() {
  9654. super(SpeedyPipelineMessageType.LSHTables);
  9655. /** @type {SpeedyLSH} LSH data structure */
  9656. this._lsh = null;
  9657. }
  9658. /**
  9659. * Set parameters
  9660. * @param {SpeedyLSH} lsh
  9661. * @returns {SpeedyPipelineMessage} this message
  9662. */
  9663. set(lsh) {
  9664. // set parameters
  9665. this._lsh = lsh;
  9666. // done!
  9667. return this;
  9668. }
  9669. /**
  9670. * Inspect this message for debugging purposes
  9671. * @param {SpeedyGPU} gpu
  9672. * @returns {SpeedyPipelineMessageDiagnosticData}
  9673. */
  9674. inspect(gpu) {
  9675. return {
  9676. type: this.constructor.name,
  9677. lsh: '<LSH tables>'
  9678. };
  9679. }
  9680. /**
  9681. * LSH data structure
  9682. * @returns {SpeedyLSH}
  9683. */
  9684. get lsh() {
  9685. return this._lsh;
  9686. }
  9687. }
  9688. /*
  9689. * A message transporting a set of keypoint matches
  9690. */
  9691. class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage {
  9692. /**
  9693. * Constructor
  9694. */
  9695. constructor() {
  9696. super(SpeedyPipelineMessageType.KeypointMatches);
  9697. /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
  9698. this._encodedMatches = null;
  9699. /** @type {number} number of matches per keypoint */
  9700. this._matchesPerKeypoint = 1;
  9701. }
  9702. /**
  9703. * Set parameters
  9704. * @param {SpeedyDrawableTexture} encodedMatches
  9705. * @param {number} matchesPerKeypoint
  9706. * @returns {SpeedyPipelineMessage} this message
  9707. */
  9708. set(encodedMatches, matchesPerKeypoint) {
  9709. // set parameters
  9710. this._encodedMatches = encodedMatches;
  9711. this._matchesPerKeypoint = matchesPerKeypoint | 0;
  9712. // validate
  9713. utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
  9714. // done!
  9715. return this;
  9716. }
  9717. /**
  9718. * Inspect this message for debugging purposes
  9719. * @param {SpeedyGPU} gpu
  9720. * @returns {SpeedyPipelineMessageDiagnosticData}
  9721. */
  9722. inspect(gpu) {
  9723. return {
  9724. type: this.constructor.name,
  9725. matchesPerKeypoint: this.matchesPerKeypoint,
  9726. encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
  9727. encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
  9728. };
  9729. }
  9730. /**
  9731. * The matches
  9732. * @returns {SpeedyDrawableTexture}
  9733. */
  9734. get encodedMatches() {
  9735. return this._encodedMatches;
  9736. }
  9737. /**
  9738. * Number of matches per keypoint
  9739. * @returns {number}
  9740. */
  9741. get matchesPerKeypoint() {
  9742. return this._matchesPerKeypoint;
  9743. }
  9744. }
  9745. //
  9746. // Utilities
  9747. //
  9748. /** Map message type to message class */
  9749. const MESSAGE_CLASS = Object.freeze({
  9750. [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
  9751. [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
  9752. [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
  9753. [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
  9754. [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
  9755. [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches
  9756. });
  9757. /**
  9758. * Create a message of the specified type
  9759. * @param {SpeedyPipelineMessageType} type
  9760. * @returns {SpeedyPipelineMessage}
  9761. */
  9762. function createMessage(type) {
  9763. //return Reflect.construct(MESSAGE_CLASS[type], []);
  9764. return new MESSAGE_CLASS[
  9765. // error TS2538: Type 'Symbol' cannot be used as an index type.
  9766. // heck, what the hack...
  9767. /** @type {any} */
  9768. type]();
  9769. }
  9770. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
  9771. /*
  9772. * speedy-vision.js
  9773. * GPU-accelerated Computer Vision for JavaScript
  9774. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9775. *
  9776. * Licensed under the Apache License, Version 2.0 (the "License");
  9777. * you may not use this file except in compliance with the License.
  9778. * You may obtain a copy of the License at
  9779. *
  9780. * http://www.apache.org/licenses/LICENSE-2.0
  9781. *
  9782. * Unless required by applicable law or agreed to in writing, software
  9783. * distributed under the License is distributed on an "AS IS" BASIS,
  9784. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9785. * See the License for the specific language governing permissions and
  9786. * limitations under the License.
  9787. *
  9788. * pipeline-portspec.js
  9789. * Specification (requirements) of a port of a node of a pipeline
  9790. */
  9791. /**
  9792. * A message constraint is a message validation predicate
  9793. * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
  9794. */
  9795. /**
  9796. * A validation predicate that validates all messages
  9797. * @type {SpeedyPipelineMessageConstraint}
  9798. */
  9799. const always = message => true;
  9800. /**
  9801. * Specification (requirements) of a port of a node of a pipeline
  9802. */
  9803. class SpeedyPipelinePortSpec {
  9804. /**
  9805. * Constructor
  9806. * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
  9807. * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
  9808. */
  9809. constructor(expectedMessageType, messageConstraint = always) {
  9810. /** @type {SpeedyPipelineMessageType} expected message type */
  9811. this._expectedMessageType = expectedMessageType;
  9812. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  9813. this._isValidMessage = typeof messageConstraint === 'function' ? messageConstraint : always;
  9814. // expect a valid type
  9815. utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
  9816. }
  9817. /**
  9818. * Checks if two specs have the same expected type
  9819. * @param {SpeedyPipelinePortSpec} spec
  9820. * @returns {boolean}
  9821. */
  9822. isCompatibleWith(spec) {
  9823. return this._expectedMessageType == spec._expectedMessageType;
  9824. }
  9825. /**
  9826. * Is the given message accepted by a port that abides by this specification?
  9827. * @param {SpeedyPipelineMessage} message
  9828. * @returns {boolean}
  9829. */
  9830. accepts(message) {
  9831. return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
  9832. }
  9833. /**
  9834. * Convert to string
  9835. * @returns {string}
  9836. */
  9837. toString() {
  9838. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this._expectedMessageType);
  9839. return `Port expects ${type} satisfying ${this._isValidMessage}`;
  9840. }
  9841. /**
  9842. * Expected message type
  9843. * @returns {SpeedyPipelineMessageType}
  9844. */
  9845. get expectedMessageType() {
  9846. return this._expectedMessageType;
  9847. }
  9848. }
  9849. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
  9850. /*
  9851. * speedy-vision.js
  9852. * GPU-accelerated Computer Vision for JavaScript
  9853. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9854. *
  9855. * Licensed under the Apache License, Version 2.0 (the "License");
  9856. * you may not use this file except in compliance with the License.
  9857. * You may obtain a copy of the License at
  9858. *
  9859. * http://www.apache.org/licenses/LICENSE-2.0
  9860. *
  9861. * Unless required by applicable law or agreed to in writing, software
  9862. * distributed under the License is distributed on an "AS IS" BASIS,
  9863. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9864. * See the License for the specific language governing permissions and
  9865. * limitations under the License.
  9866. *
  9867. * pipeline-port.js
  9868. * Port of a node of a pipeline
  9869. */
  9870. // Constants
  9871. const DEFAULT_INPUT_PORT_NAME = 'in';
  9872. const DEFAULT_OUTPUT_PORT_NAME = 'out';
  9873. const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
  9874. const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
  9875. /**
  9876. * Diagnostic data
  9877. * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
  9878. */
  9879. /**
  9880. * Port of a node of a pipeline
  9881. * @abstract
  9882. */
  9883. class SpeedyPipelinePort {
  9884. /**
  9885. * Constructor
  9886. * @param {string} name the name of this port
  9887. * @param {SpeedyPipelinePortSpec} spec port specification
  9888. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9889. */
  9890. constructor(name, spec, node) {
  9891. /** @type {string} the name of this port */
  9892. this._name = String(name);
  9893. /** @type {SpeedyPipelinePortSpec} the specification of this port */
  9894. this._spec = spec;
  9895. /** @type {SpeedyPipelineNode} the node to which this port belongs */
  9896. this._node = node;
  9897. /** @type {SpeedyPipelineMessage} the message located in this port */
  9898. this._message = EMPTY_MESSAGE;
  9899. // check if we've got an acceptable port name
  9900. utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
  9901. }
  9902. /**
  9903. * The name of this port
  9904. * @returns {string}
  9905. */
  9906. get name() {
  9907. return this._name;
  9908. }
  9909. /**
  9910. * The node to which this port belongs
  9911. * @returns {SpeedyPipelineNode}
  9912. */
  9913. get node() {
  9914. return this._node;
  9915. }
  9916. /**
  9917. * Connect this port to another
  9918. * @abstract
  9919. * @param {SpeedyPipelinePort} port
  9920. */
  9921. connectTo(port) {
  9922. throw new utils_errors/* AbstractMethodError */.aQ();
  9923. }
  9924. /**
  9925. * Is this an input port?
  9926. * @abstract
  9927. * @returns {boolean}
  9928. */
  9929. isInputPort() {
  9930. throw new utils_errors/* AbstractMethodError */.aQ();
  9931. }
  9932. /**
  9933. * Is this an output port?
  9934. * @returns {boolean}
  9935. */
  9936. isOutputPort() {
  9937. return !this.isInputPort();
  9938. }
  9939. /**
  9940. * Clear the message stored in this port
  9941. */
  9942. clearMessage() {
  9943. this._message = EMPTY_MESSAGE;
  9944. }
  9945. /**
  9946. * Is there a valid message located in this port?
  9947. * @returns {boolean}
  9948. */
  9949. hasMessage() {
  9950. return !this._message.isEmpty();
  9951. }
  9952. /**
  9953. * Read the message that is in this port
  9954. * @returns {SpeedyPipelineMessage}
  9955. */
  9956. read() {
  9957. if (this._message.isEmpty()) throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
  9958. return this._message;
  9959. }
  9960. /**
  9961. * Write a message to this port
  9962. * @param {SpeedyPipelineMessage} message
  9963. */
  9964. write(message) {
  9965. throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
  9966. }
  9967. /**
  9968. * Inspect this port for debugging purposes
  9969. * @param {SpeedyGPU} gpu
  9970. * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
  9971. */
  9972. inspect(gpu) {
  9973. return this._message.inspect(gpu);
  9974. }
  9975. /**
  9976. * Default port name
  9977. * @abstract
  9978. * @returns {string}
  9979. */
  9980. static get DEFAULT_NAME() {
  9981. throw new utils_errors/* AbstractMethodError */.aQ();
  9982. }
  9983. }
  9984. /**
  9985. * Output port
  9986. */
  9987. class SpeedyPipelineOutputPort extends SpeedyPipelinePort {
  9988. /**
  9989. * Constructor
  9990. * @param {string} name the name of this port
  9991. * @param {SpeedyPipelinePortSpec} spec port specification
  9992. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9993. */
  9994. constructor(name, spec, node) {
  9995. super(name, spec, node);
  9996. /** @type {SpeedyPipelineMessage} cached message */
  9997. this._cachedMessage = null;
  9998. }
  9999. /**
  10000. * Connect this port to another
  10001. * @param {SpeedyPipelineInputPort} port
  10002. */
  10003. connectTo(port) {
  10004. if (!port.isInputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
  10005. port.connectTo(this);
  10006. }
  10007. /**
  10008. * Is this an input port?
  10009. * @returns {boolean}
  10010. */
  10011. isInputPort() {
  10012. return false;
  10013. }
  10014. /**
  10015. * Write a message to this port
  10016. * @param {SpeedyPipelineMessage} message
  10017. */
  10018. write(message) {
  10019. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
  10020. this._message = message;
  10021. }
  10022. /**
  10023. * Write a message to this port using a cached message object
  10024. * @param {...any} args to be passed to SpeedyPipelineMessage.set()
  10025. */
  10026. swrite(...args) {
  10027. if (this._cachedMessage == null) this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
  10028. this.write(this._cachedMessage.set(...args));
  10029. }
  10030. /**
  10031. * Default port name
  10032. * @returns {string}
  10033. */
  10034. static get DEFAULT_NAME() {
  10035. return DEFAULT_OUTPUT_PORT_NAME;
  10036. }
  10037. }
  10038. /**
  10039. * Input port
  10040. */
  10041. class SpeedyPipelineInputPort extends SpeedyPipelinePort {
  10042. /**
  10043. * Constructor
  10044. * @param {string} name the name of this port
  10045. * @param {SpeedyPipelinePortSpec} spec port specification
  10046. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10047. */
  10048. constructor(name, spec, node) {
  10049. super(name, spec, node);
  10050. /** @type {SpeedyPipelineOutputPort|null} incoming link */
  10051. this._incomingLink = null;
  10052. }
  10053. /**
  10054. * Incoming link
  10055. * @returns {SpeedyPipelineOutputPort|null}
  10056. */
  10057. get incomingLink() {
  10058. return this._incomingLink;
  10059. }
  10060. /**
  10061. * Connect this port to another
  10062. * @param {SpeedyPipelineOutputPort} port
  10063. */
  10064. connectTo(port) {
  10065. if (!port.isOutputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);else if (!this._spec.isCompatibleWith(port._spec)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
  10066. this._incomingLink = port;
  10067. }
  10068. /**
  10069. * Unlink this port
  10070. */
  10071. disconnect() {
  10072. this._incomingLink = null;
  10073. }
  10074. /**
  10075. * Is this an input port?
  10076. * @returns {boolean}
  10077. */
  10078. isInputPort() {
  10079. return true;
  10080. }
  10081. /**
  10082. * Receive a message using the incoming link
  10083. * @param {string} [nodeName]
  10084. * @returns {SpeedyPipelineMessage}
  10085. */
  10086. pullMessage(nodeName = '') {
  10087. const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
  10088. if (this._incomingLink == null) throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
  10089. const message = this._incomingLink.read();
  10090. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
  10091. return this._message = message;
  10092. }
  10093. /**
  10094. * Default port name
  10095. * @returns {string}
  10096. */
  10097. static get DEFAULT_NAME() {
  10098. return DEFAULT_INPUT_PORT_NAME;
  10099. }
  10100. }
  10101. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
  10102. /*
  10103. * speedy-vision.js
  10104. * GPU-accelerated Computer Vision for JavaScript
  10105. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10106. *
  10107. * Licensed under the Apache License, Version 2.0 (the "License");
  10108. * you may not use this file except in compliance with the License.
  10109. * You may obtain a copy of the License at
  10110. *
  10111. * http://www.apache.org/licenses/LICENSE-2.0
  10112. *
  10113. * Unless required by applicable law or agreed to in writing, software
  10114. * distributed under the License is distributed on an "AS IS" BASIS,
  10115. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10116. * See the License for the specific language governing permissions and
  10117. * limitations under the License.
  10118. *
  10119. * pipeline-portbuilder.js
  10120. * Builder of a port of a node of a pipeline
  10121. */
  10122. /**
  10123. * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
  10124. */
  10125. /**
  10126. * Builder of a port of a node of a pipeline
  10127. */
  10128. class SpeedyPipelinePortBuilder {
  10129. /**
  10130. * Constructor
  10131. * @param {typeof SpeedyPipelinePort} portClass input or output?
  10132. * @param {string} portName
  10133. */
  10134. constructor(portClass, portName) {
  10135. /** @type {typeof SpeedyPipelinePort} input or output? */
  10136. this._class = portClass;
  10137. /** @type {string} port name */
  10138. this._name = String(portName);
  10139. /** @type {SpeedyPipelineMessageType} accepted message type */
  10140. this._type = SpeedyPipelineMessageType.Nothing;
  10141. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10142. this._messageConstraint = undefined;
  10143. }
  10144. /**
  10145. * Declare that the new port expects a certain type of message
  10146. * @param {SpeedyPipelineMessageType} type expected type
  10147. * @returns {SpeedyPipelinePortBuilder} this builder
  10148. */
  10149. expects(type) {
  10150. utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
  10151. utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
  10152. this._type = type;
  10153. return this;
  10154. }
  10155. /**
  10156. * Declare that the new port expects messages satisfying a constraint
  10157. * @param {SpeedyPipelineMessageConstraint} constraint
  10158. * @returns {SpeedyPipelinePortBuilder} this builder
  10159. */
  10160. satisfying(constraint) {
  10161. utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
  10162. utils/* Utils */.A.assert(this._messageConstraint === undefined);
  10163. utils/* Utils */.A.assert(typeof constraint === 'function');
  10164. this._messageConstraint = constraint;
  10165. return this;
  10166. }
  10167. /**
  10168. * Build a port
  10169. * @param {SpeedyPipelineNode} node the node to which the new port will belong
  10170. * @returns {SpeedyPipelinePort}
  10171. */
  10172. build(node) {
  10173. const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
  10174. return Reflect.construct(this._class, [this._name, spec, node]);
  10175. }
  10176. }
  10177. /**
  10178. * Creates a builder for an input port
  10179. * @param {string} [portName]
  10180. * @returns {SpeedyPipelinePortBuilder}
  10181. */
  10182. function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10183. return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
  10184. }
  10185. /**
  10186. * Creates a builder for an output port
  10187. * @param {string} [portName]
  10188. * @returns {SpeedyPipelinePortBuilder}
  10189. */
  10190. function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10191. return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
  10192. }
  10193. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
  10194. /*
  10195. * speedy-vision.js
  10196. * GPU-accelerated Computer Vision for JavaScript
  10197. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10198. *
  10199. * Licensed under the Apache License, Version 2.0 (the "License");
  10200. * you may not use this file except in compliance with the License.
  10201. * You may obtain a copy of the License at
  10202. *
  10203. * http://www.apache.org/licenses/LICENSE-2.0
  10204. *
  10205. * Unless required by applicable law or agreed to in writing, software
  10206. * distributed under the License is distributed on an "AS IS" BASIS,
  10207. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10208. * See the License for the specific language governing permissions and
  10209. * limitations under the License.
  10210. *
  10211. * pipeline-node.js
  10212. * Node of a pipeline
  10213. */
  10214. /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
  10215. /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
  10216. /** Generate a random name for a node */
  10217. const generateRandomName = () => Math.random().toString(16).substr(2);
  10218. /** Create an empty input port dictionary */
  10219. const createInputPortDictionary = () => ( /** @type {InputPortDictionary} */Object.create(null));
  10220. /** Create an empty output port dictionary */
  10221. const createOutputPortDictionary = () => ( /** @type {OutputPortDictionary} */Object.create(null));
  10222. /**
  10223. * Map an array of input ports to an InputPortDictionary whose keys are their names
  10224. * @param {SpeedyPipelineInputPort[]} ports
  10225. * @returns {InputPortDictionary}
  10226. */
  10227. function InputPortDictionary(ports) {
  10228. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createInputPortDictionary());
  10229. }
  10230. /**
  10231. * Map an array of output ports to an OutputPortDictionary whose keys are their names
  10232. * @param {SpeedyPipelineOutputPort[]} ports
  10233. * @returns {OutputPortDictionary}
  10234. */
  10235. function OutputPortDictionary(ports) {
  10236. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createOutputPortDictionary());
  10237. }
  10238. /** A flag used for debugging purposes */
  10239. let _texView = false;
  10240. /**
  10241. * Node of a pipeline
  10242. * @abstract
  10243. */
  10244. class SpeedyPipelineNode {
  10245. /**
  10246. * Constructor
  10247. * @param {string} [name] the name of this node
  10248. * @param {number} [texCount] number of work textures
  10249. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10250. */
  10251. constructor(name = generateRandomName(), texCount = 0, portBuilders = []) {
  10252. /** @type {string} the name of this node */
  10253. this._name = String(name);
  10254. /** @type {SpeedyDrawableTexture[]} work texture(s) */
  10255. this._tex = new Array(texCount).fill(null);
  10256. // build the ports
  10257. const ports = portBuilders.map(builder => builder.build(this));
  10258. const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ports.filter(port => port.isInputPort());
  10259. const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ports.filter(port => port.isOutputPort());
  10260. /** @type {InputPortDictionary} input ports */
  10261. this._inputPorts = InputPortDictionary(inputPorts);
  10262. /** @type {OutputPortDictionary} output ports */
  10263. this._outputPorts = OutputPortDictionary(outputPorts);
  10264. // validate
  10265. if (this._name.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);else if (portBuilders.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
  10266. }
  10267. /**
  10268. * The name of this node
  10269. * @returns {string}
  10270. */
  10271. get name() {
  10272. return this._name;
  10273. }
  10274. /**
  10275. * Name and type of this node
  10276. * @returns {string}
  10277. */
  10278. get fullName() {
  10279. return `${this.constructor.name}[${this.name}]`;
  10280. }
  10281. /**
  10282. * Find input port by name
  10283. * @param {string} [portName]
  10284. * @returns {SpeedyPipelineInputPort}
  10285. */
  10286. input(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10287. if (portName in this._inputPorts) return this._inputPorts[portName];
  10288. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
  10289. }
  10290. /**
  10291. * Find output port by name
  10292. * @param {string} [portName]
  10293. * @returns {SpeedyPipelineOutputPort}
  10294. */
  10295. output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10296. if (portName in this._outputPorts) return this._outputPorts[portName];
  10297. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
  10298. }
  10299. /**
  10300. * Get data from the input ports and execute
  10301. * the task that this node is supposed to!
  10302. * @param {SpeedyGPU} gpu
  10303. * @returns {void|SpeedyPromise<void>}
  10304. */
  10305. execute(gpu) {
  10306. let portName;
  10307. // clear output ports
  10308. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10309. // let the input ports receive what is due
  10310. for (portName in this._inputPorts) this._inputPorts[portName].pullMessage(this.fullName);
  10311. // run the task
  10312. const runTask = this._run(gpu);
  10313. if (typeof runTask === 'undefined') return void this._finishExecution(gpu);else return runTask.then(() => this._finishExecution(gpu));
  10314. }
  10315. /**
  10316. * Finish the execution of this node;
  10317. * to be called after execute()
  10318. * @param {SpeedyGPU} gpu
  10319. */
  10320. _finishExecution(gpu) {
  10321. // ensure that no output ports are empty
  10322. for (const portName in this._outputPorts) {
  10323. utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
  10324. }
  10325. // diagnosticize the node / pipeline
  10326. if (settings/* Settings */.w.logging === 'diagnostic') {
  10327. utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
  10328. // Inspecting the data has performance implications.
  10329. // It is for diagnostic purposes only, not meant to be done in production!
  10330. for (const portName in this._inputPorts) utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
  10331. for (const portName in this._outputPorts) utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
  10332. }
  10333. }
  10334. /**
  10335. * Run the specific task of this node
  10336. * @abstract
  10337. * @param {SpeedyGPU} gpu
  10338. * @returns {void|SpeedyPromise<void>}
  10339. */
  10340. _run(gpu) {
  10341. throw new utils_errors/* AbstractMethodError */.aQ();
  10342. }
  10343. /**
  10344. * Initializes this node
  10345. * @param {SpeedyGPU} gpu
  10346. */
  10347. init(gpu) {
  10348. gpu.subscribe(this._allocateWorkTextures, this, gpu);
  10349. this._allocateWorkTextures(gpu);
  10350. }
  10351. /**
  10352. * Releases this node
  10353. * @param {SpeedyGPU} gpu
  10354. */
  10355. release(gpu) {
  10356. this._deallocateWorkTextures(gpu);
  10357. gpu.unsubscribe(this._allocateWorkTextures, this);
  10358. }
  10359. /**
  10360. * Clear all ports
  10361. */
  10362. clearPorts() {
  10363. let portName;
  10364. for (portName in this._inputPorts) this._inputPorts[portName].clearMessage();
  10365. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10366. }
  10367. /**
  10368. * Find all nodes that feed input to this node
  10369. * @returns {SpeedyPipelineNode[]}
  10370. */
  10371. inputNodes() {
  10372. const nodes = [];
  10373. for (const portName in this._inputPorts) {
  10374. const port = this._inputPorts[portName];
  10375. if (port.incomingLink != null) nodes.push(port.incomingLink.node);
  10376. }
  10377. return nodes;
  10378. }
  10379. /**
  10380. * Is this a source of the pipeline?
  10381. * @returns {boolean}
  10382. */
  10383. isSource() {
  10384. return false;
  10385. }
  10386. /**
  10387. * Is this a sink of the pipeline?
  10388. * @returns {boolean}
  10389. */
  10390. isSink() {
  10391. return false;
  10392. // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
  10393. //return Object.keys(this._outputPorts).length == 0;
  10394. }
  10395. /**
  10396. * Allocate work texture(s)
  10397. * @param {SpeedyGPU} gpu
  10398. */
  10399. _allocateWorkTextures(gpu) {
  10400. for (let j = 0; j < this._tex.length; j++) this._tex[j] = gpu.texturePool.allocate();
  10401. }
  10402. /**
  10403. * Deallocate work texture(s)
  10404. * @param {SpeedyGPU} gpu
  10405. */
  10406. _deallocateWorkTextures(gpu) {
  10407. for (let j = this._tex.length - 1; j >= 0; j--) this._tex[j] = gpu.texturePool.free(this._tex[j]);
  10408. }
  10409. /**
  10410. * Visually inspect a texture for debugging purposes
  10411. * @param {SpeedyGPU} gpu
  10412. * @param {SpeedyDrawableTexture} texture
  10413. */
  10414. _visualize(gpu, texture) {
  10415. const canvas = gpu.renderToCanvas(texture);
  10416. if (!_texView) {
  10417. document.body.appendChild(canvas);
  10418. _texView = true;
  10419. }
  10420. }
  10421. }
  10422. /**
  10423. * Source node (a node with no input ports)
  10424. * @abstract
  10425. */
  10426. class SpeedyPipelineSourceNode extends SpeedyPipelineNode {
  10427. /**
  10428. * Constructor
  10429. * @param {string} [name] the name of this node
  10430. * @param {number} [texCount] number of work textures
  10431. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10432. */
  10433. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10434. super(name, texCount, portBuilders);
  10435. utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
  10436. }
  10437. /**
  10438. * Is this a source of the pipeline?
  10439. * @returns {boolean}
  10440. */
  10441. isSource() {
  10442. return true;
  10443. }
  10444. }
  10445. /**
  10446. * Sink node (a node with no output ports)
  10447. * @abstract
  10448. */
  10449. class SpeedyPipelineSinkNode extends SpeedyPipelineNode {
  10450. /**
  10451. * Constructor
  10452. * @param {string} [name] the name of this node
  10453. * @param {number} [texCount] number of work textures
  10454. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10455. */
  10456. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10457. super(name, texCount, portBuilders);
  10458. utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
  10459. }
  10460. /**
  10461. * Export data from this node to the user
  10462. * @abstract
  10463. * @returns {SpeedyPromise<any>}
  10464. */
  10465. export() {
  10466. throw new utils_errors/* AbstractMethodError */.aQ();
  10467. }
  10468. /**
  10469. * Is this a sink of the pipeline?
  10470. * @returns {boolean}
  10471. */
  10472. isSink() {
  10473. return true;
  10474. }
  10475. }
  10476. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
  10477. /*
  10478. * speedy-vision.js
  10479. * GPU-accelerated Computer Vision for JavaScript
  10480. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10481. *
  10482. * Licensed under the Apache License, Version 2.0 (the "License");
  10483. * you may not use this file except in compliance with the License.
  10484. * You may obtain a copy of the License at
  10485. *
  10486. * http://www.apache.org/licenses/LICENSE-2.0
  10487. *
  10488. * Unless required by applicable law or agreed to in writing, software
  10489. * distributed under the License is distributed on an "AS IS" BASIS,
  10490. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10491. * See the License for the specific language governing permissions and
  10492. * limitations under the License.
  10493. *
  10494. * speedy-match.js
  10495. * A match between two keypoint descriptors
  10496. */
  10497. // Constants
  10498. const MATCH_NOT_FOUND = -1;
  10499. /**
  10500. * A match between two keypoint descriptors
  10501. */
  10502. class SpeedyKeypointMatch {
  10503. /**
  10504. * Constructor
  10505. * @param {number} index index of the stored keypoint, a non-negative integer
  10506. * @param {number} distance a measure of the quality of the match, a non-negative number
  10507. */
  10508. constructor(index, distance) {
  10509. const isValid = distance < globals.MATCH_MAX_DISTANCE;
  10510. /** @type {number} index of the stored keypoint */
  10511. this._index = isValid ? index | 0 : MATCH_NOT_FOUND;
  10512. /** @type {number} a measure of the quality of the match */
  10513. this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
  10514. // done!
  10515. return Object.freeze(this);
  10516. }
  10517. /**
  10518. * The index of the stored keypoint
  10519. * @returns {number}
  10520. */
  10521. get index() {
  10522. return this._index;
  10523. }
  10524. /**
  10525. * A measure of the quality of the match (lower values indicate better matches)
  10526. * @returns {number}
  10527. */
  10528. get distance() {
  10529. return this._distance;
  10530. }
  10531. /**
  10532. * A string representation of the keypoint match
  10533. * @returns {string}
  10534. */
  10535. toString() {
  10536. return `SpeedyKeypointMatch(${this.index},${this.distance})`;
  10537. }
  10538. }
  10539. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
  10540. /*
  10541. * speedy-vision.js
  10542. * GPU-accelerated Computer Vision for JavaScript
  10543. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10544. *
  10545. * Licensed under the Apache License, Version 2.0 (the "License");
  10546. * you may not use this file except in compliance with the License.
  10547. * You may obtain a copy of the License at
  10548. *
  10549. * http://www.apache.org/licenses/LICENSE-2.0
  10550. *
  10551. * Unless required by applicable law or agreed to in writing, software
  10552. * distributed under the License is distributed on an "AS IS" BASIS,
  10553. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10554. * See the License for the specific language governing permissions and
  10555. * limitations under the License.
  10556. *
  10557. * speedy-keypoint.js
  10558. * Keypoint class
  10559. */
  10560. /**
  10561. * Represents a keypoint
  10562. */
  10563. class SpeedyKeypoint {
  10564. /**
  10565. * Constructor
  10566. * @param {number} x X position
  10567. * @param {number} y Y position
  10568. * @param {number} [lod] Level-of-detail
  10569. * @param {number} [rotation] Rotation in radians
  10570. * @param {number} [score] Cornerness measure
  10571. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10572. */
  10573. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null) {
  10574. /** @type {SpeedyPoint2} keypoint position */
  10575. this._position = new SpeedyPoint2(+x, +y);
  10576. /** @type {number} level of detail */
  10577. this._lod = +lod;
  10578. /** @type {number} rotation in radians */
  10579. this._rotation = +rotation;
  10580. /** @type {number} a cornerness measure */
  10581. this._score = +score;
  10582. /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
  10583. this._descriptor = descriptor;
  10584. }
  10585. /**
  10586. * Converts this keypoint to a descriptive string
  10587. * @returns {string}
  10588. */
  10589. toString() {
  10590. return `SpeedyKeypoint(${this.x},${this.y})`;
  10591. }
  10592. /**
  10593. * The position of this keypoint
  10594. * @returns {SpeedyPoint2}
  10595. */
  10596. get position() {
  10597. return this._position;
  10598. }
  10599. /**
  10600. * The x-position of this keypoint
  10601. * @returns {number}
  10602. */
  10603. get x() {
  10604. return this._position.x;
  10605. }
  10606. /**
  10607. * The x-position of this keypoint
  10608. * @param {number} value
  10609. */
  10610. set x(value) {
  10611. this._position.x = +value;
  10612. }
  10613. /**
  10614. * The y-position of this keypoint
  10615. * @returns {number}
  10616. */
  10617. get y() {
  10618. return this._position.y;
  10619. }
  10620. /**
  10621. * The y-position of this keypoint
  10622. * @param {number} value
  10623. */
  10624. set y(value) {
  10625. this._position.y = +value;
  10626. }
  10627. /**
  10628. * The pyramid level-of-detail from which this keypoint was extracted
  10629. * @returns {number}
  10630. */
  10631. get lod() {
  10632. return this._lod;
  10633. }
  10634. /**
  10635. * Scale: 2^lod
  10636. * @returns {number}
  10637. */
  10638. get scale() {
  10639. return Math.pow(2, this._lod);
  10640. }
  10641. /**
  10642. * The orientation of the keypoint, in radians
  10643. * @returns {number} Angle in radians
  10644. */
  10645. get rotation() {
  10646. return this._rotation;
  10647. }
  10648. /**
  10649. * Score: a cornerness measure
  10650. * @returns {number} Score
  10651. */
  10652. get score() {
  10653. return this._score;
  10654. }
  10655. /**
  10656. * Keypoint descriptor
  10657. * @return {SpeedyKeypointDescriptor|null}
  10658. */
  10659. get descriptor() {
  10660. return this._descriptor;
  10661. }
  10662. }
  10663. /**
  10664. * Represents a tracked keypoint
  10665. */
  10666. class SpeedyTrackedKeypoint extends SpeedyKeypoint {
  10667. /**
  10668. * Constructor
  10669. * @param {number} x X position
  10670. * @param {number} y Y position
  10671. * @param {number} [lod] Level-of-detail
  10672. * @param {number} [rotation] Rotation in radians
  10673. * @param {number} [score] Cornerness measure
  10674. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10675. * @param {SpeedyVector2} [flow] flow vector
  10676. */
  10677. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0, 0)) {
  10678. super(x, y, lod, rotation, score, descriptor);
  10679. /** @type {SpeedyVector2} flow vector */
  10680. this._flow = flow;
  10681. }
  10682. /**
  10683. * Flow vector
  10684. * @returns {SpeedyVector2}
  10685. */
  10686. get flow() {
  10687. return this._flow;
  10688. }
  10689. }
  10690. /**
  10691. * Represents a matched keypoint
  10692. */
  10693. class SpeedyMatchedKeypoint extends SpeedyKeypoint {
  10694. /**
  10695. * Constructor
  10696. * @param {number} x X position
  10697. * @param {number} y Y position
  10698. * @param {number} [lod] Level-of-detail
  10699. * @param {number} [rotation] Rotation in radians
  10700. * @param {number} [score] Cornerness measure
  10701. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10702. * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
  10703. */
  10704. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = []) {
  10705. super(x, y, lod, rotation, score, descriptor);
  10706. /** @type {SpeedyKeypointMatch[]} keypoint matches */
  10707. this._matches = matches;
  10708. }
  10709. /**
  10710. * Keypoint matches
  10711. * @returns {SpeedyKeypointMatch[]}
  10712. */
  10713. get matches() {
  10714. return this._matches;
  10715. }
  10716. }
  10717. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
  10718. /*
  10719. * speedy-vision.js
  10720. * GPU-accelerated Computer Vision for JavaScript
  10721. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10722. *
  10723. * Licensed under the Apache License, Version 2.0 (the "License");
  10724. * you may not use this file except in compliance with the License.
  10725. * You may obtain a copy of the License at
  10726. *
  10727. * http://www.apache.org/licenses/LICENSE-2.0
  10728. *
  10729. * Unless required by applicable law or agreed to in writing, software
  10730. * distributed under the License is distributed on an "AS IS" BASIS,
  10731. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10732. * See the License for the specific language governing permissions and
  10733. * limitations under the License.
  10734. *
  10735. * pipeline.js
  10736. * A pipeline is a network of nodes in which data flows to a sink
  10737. */
  10738. /**
  10739. * A dictionary indexed by the names of the sink nodes
  10740. * @typedef {Object<string,any>} SpeedyPipelineOutput
  10741. */
  10742. /** @type {SpeedyGPU} shared GPU programs & textures */
  10743. let gpu = null;
  10744. /** @type {number} gpu reference count */
  10745. let referenceCount = 0;
  10746. /**
  10747. * A pipeline is a network of nodes in which data flows to a sink
  10748. */
  10749. class SpeedyPipeline {
  10750. /**
  10751. * Constructor
  10752. */
  10753. constructor() {
  10754. /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
  10755. this._nodes = [];
  10756. /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
  10757. this._sequence = [];
  10758. /** @type {boolean} are we running the pipeline at this moment? */
  10759. this._busy = false;
  10760. }
  10761. /**
  10762. * Find a node by its name
  10763. * @template T extends SpeedyPipelineNode
  10764. * @param {string} name
  10765. * @returns {T|null}
  10766. */
  10767. node(name) {
  10768. for (let i = 0, n = this._nodes.length; i < n; i++) {
  10769. if (this._nodes[i].name === name) return this._nodes[i];
  10770. }
  10771. return null;
  10772. }
  10773. /**
  10774. * Initialize the pipeline
  10775. * @param {...SpeedyPipelineNode} nodes
  10776. * @returns {SpeedyPipeline} this pipeline
  10777. */
  10778. init(...nodes) {
  10779. // validate
  10780. if (this._nodes.length > 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);else if (nodes.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
  10781. // create a GPU instance and increase the reference count
  10782. if (0 == referenceCount++) {
  10783. utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
  10784. gpu = new SpeedyGPU();
  10785. }
  10786. // add nodes to the network
  10787. for (let i = 0; i < nodes.length; i++) {
  10788. const node = nodes[i];
  10789. if (!this._nodes.includes(node)) this._nodes.push(node);
  10790. }
  10791. // generate the sequence of nodes
  10792. this._sequence = SpeedyPipeline._tsort(this._nodes);
  10793. SpeedyPipeline._validateSequence(this._sequence);
  10794. // initialize nodes
  10795. for (let i = 0; i < this._sequence.length; i++) this._sequence[i].init(gpu);
  10796. // done!
  10797. return this;
  10798. }
  10799. /**
  10800. * Release the resources associated with this pipeline
  10801. * @returns {null}
  10802. */
  10803. release() {
  10804. if (this._nodes.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
  10805. // release nodes
  10806. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].release(gpu);
  10807. this._sequence.length = 0;
  10808. this._nodes.length = 0;
  10809. // decrease reference count and release GPU if necessary
  10810. if (0 == --referenceCount) gpu = gpu.release();
  10811. // done!
  10812. return null;
  10813. }
  10814. /**
  10815. * Run the pipeline
  10816. * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
  10817. */
  10818. run() {
  10819. utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
  10820. // is the pipeline busy?
  10821. if (this._busy) {
  10822. // if so, we need to wait 'til it finishes
  10823. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  10824. setTimeout(() => this.run().then(resolve, reject), 0);
  10825. });
  10826. } else {
  10827. // the pipeline is now busy and won't accept concurrent tasks
  10828. // (we allocate textures using a single pool)
  10829. this._busy = true;
  10830. }
  10831. // find the sinks
  10832. const sinks = /** @type {SpeedyPipelineSinkNode[]} */this._sequence.filter(node => node.isSink());
  10833. // create output template
  10834. const template = SpeedyPipeline._createOutputTemplate(sinks);
  10835. // diagnostic log
  10836. if (settings/* Settings */.w.logging === 'diagnostic') utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
  10837. // run the pipeline
  10838. return SpeedyPipeline._runSequence(this._sequence).then(() =>
  10839. // export results
  10840. speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
  10841. // aggregate results by the names of the sinks
  10842. results.reduce((obj, val, idx) => (obj[sinks[idx].name] = val, obj), template))).finally(() => {
  10843. // clear all ports
  10844. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].clearPorts();
  10845. // the pipeline is no longer busy
  10846. this._busy = false;
  10847. // diagnostic log
  10848. if (settings/* Settings */.w.logging === 'diagnostic') {
  10849. utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
  10850. Object.keys(template).forEach(entry => {
  10851. utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
  10852. });
  10853. }
  10854. }).turbocharge();
  10855. }
  10856. /**
  10857. * @internal
  10858. *
  10859. * GPU instance
  10860. * @returns {SpeedyGPU}
  10861. */
  10862. get _gpu() {
  10863. return gpu;
  10864. }
  10865. /**
  10866. * Execute the tasks of a sequence of nodes
  10867. * @param {SpeedyPipelineNode[]} sequence sequence of nodes
  10868. * @param {number} [i] in [0,n)
  10869. * @param {number} [n] number of nodes
  10870. * @returns {SpeedyPromise<void>}
  10871. */
  10872. static _runSequence(sequence, i = 0, n = sequence.length) {
  10873. for (; i < n; i++) {
  10874. const runTask = sequence[i].execute(gpu);
  10875. // this call greatly improves performance when downloading pixel data using PBOs
  10876. gpu.gl.flush();
  10877. if (typeof runTask !== 'undefined') return runTask.then(() => SpeedyPipeline._runSequence(sequence, i + 1, n));
  10878. }
  10879. return speedy_promise/* SpeedyPromise */.i.resolve();
  10880. }
  10881. /**
  10882. * Topological sorting
  10883. * @param {SpeedyPipelineNode[]} nodes
  10884. * @returns {SpeedyPipelineNode[]}
  10885. */
  10886. static _tsort(nodes) {
  10887. /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
  10888. const outlinks = SpeedyPipeline._outlinks(nodes);
  10889. const stack = nodes.map(node => ( /** @type {StackNode} */[node, false]));
  10890. const trash = new Set();
  10891. const sorted = new Array(nodes.length);
  10892. let j = sorted.length;
  10893. while (stack.length > 0) {
  10894. const [node, done] = stack.pop();
  10895. if (!done) {
  10896. if (!trash.has(node)) {
  10897. const outnodes = outlinks.get(node);
  10898. trash.add(node);
  10899. stack.push([node, true]);
  10900. stack.push(...outnodes.map(node => ( /** @type {StackNode} */[node, false])));
  10901. if (outnodes.some(node => trash.has(node) && !sorted.includes(node))) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
  10902. }
  10903. } else sorted[--j] = node;
  10904. }
  10905. return sorted;
  10906. }
  10907. /**
  10908. * Figure out the outgoing links of all nodes
  10909. * @param {SpeedyPipelineNode[]} nodes
  10910. * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
  10911. */
  10912. static _outlinks(nodes) {
  10913. const outlinks = new Map();
  10914. for (let k = 0; k < nodes.length; k++) outlinks.set(nodes[k], []);
  10915. for (let i = 0; i < nodes.length; i++) {
  10916. const to = nodes[i];
  10917. const inputs = to.inputNodes();
  10918. for (let j = 0; j < inputs.length; j++) {
  10919. const from = inputs[j];
  10920. const links = outlinks.get(from);
  10921. if (!links) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
  10922. if (!links.includes(to)) links.push(to);
  10923. }
  10924. }
  10925. return outlinks;
  10926. }
  10927. /**
  10928. * Generate the output template by aggregating the names of the sinks
  10929. * @param {SpeedyPipelineNode[]} [sinks]
  10930. * @returns {SpeedyPipelineOutput}
  10931. */
  10932. static _createOutputTemplate(sinks = []) {
  10933. const template = Object.create(null);
  10934. for (let i = sinks.length - 1; i >= 0; i--) template[sinks[i].name] = null;
  10935. return template;
  10936. }
  10937. /**
  10938. * Validate a sequence of nodes
  10939. * @param {SpeedyPipelineNode[]} sequence
  10940. */
  10941. static _validateSequence(sequence) {
  10942. if (sequence.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);else if (!sequence[0].isSource()) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);else if (!sequence.find(node => node.isSink())) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
  10943. }
  10944. }
  10945. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
  10946. /*
  10947. * speedy-vision.js
  10948. * GPU-accelerated Computer Vision for JavaScript
  10949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10950. *
  10951. * Licensed under the Apache License, Version 2.0 (the "License");
  10952. * you may not use this file except in compliance with the License.
  10953. * You may obtain a copy of the License at
  10954. *
  10955. * http://www.apache.org/licenses/LICENSE-2.0
  10956. *
  10957. * Unless required by applicable law or agreed to in writing, software
  10958. * distributed under the License is distributed on an "AS IS" BASIS,
  10959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10960. * See the License for the specific language governing permissions and
  10961. * limitations under the License.
  10962. *
  10963. * image-input.js
  10964. * Gets an image into a pipeline
  10965. */
  10966. // Constants
  10967. const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
  10968. /**
  10969. * Gets an image into a pipeline
  10970. */
  10971. class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode {
  10972. /**
  10973. * Constructor
  10974. * @param {string} [name] name of the node
  10975. */
  10976. constructor(name = undefined) {
  10977. super(name, UPLOAD_BUFFER_SIZE, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  10978. /** @type {SpeedyMedia|null} source media */
  10979. this._media = null;
  10980. /** @type {number} texture index */
  10981. this._textureIndex = 0;
  10982. }
  10983. /**
  10984. * Source media
  10985. * @returns {SpeedyMedia|null}
  10986. */
  10987. get media() {
  10988. return this._media;
  10989. }
  10990. /**
  10991. * Source media
  10992. * @param {SpeedyMedia|null} media
  10993. */
  10994. set media(media) {
  10995. if (media !== null && !(media instanceof SpeedyMedia)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
  10996. this._media = media;
  10997. }
  10998. /**
  10999. * Run the specific task of this node
  11000. * @param {SpeedyGPU} gpu
  11001. * @returns {void|SpeedyPromise<void>}
  11002. */
  11003. _run(gpu) {
  11004. if (this._media == null) throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
  11005. // use round-robin to mitigate WebGL's implicit synchronization
  11006. // and maybe minimize texture upload times
  11007. this._textureIndex = (this._textureIndex + 1) % this._tex.length;
  11008. // upload texture
  11009. const outputTexture = this._tex[this._textureIndex];
  11010. gpu.upload(this._media._source, outputTexture);
  11011. this.output().swrite(outputTexture, this._media._format);
  11012. }
  11013. }
  11014. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
  11015. /*
  11016. * speedy-vision.js
  11017. * GPU-accelerated Computer Vision for JavaScript
  11018. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11019. *
  11020. * Licensed under the Apache License, Version 2.0 (the "License");
  11021. * you may not use this file except in compliance with the License.
  11022. * You may obtain a copy of the License at
  11023. *
  11024. * http://www.apache.org/licenses/LICENSE-2.0
  11025. *
  11026. * Unless required by applicable law or agreed to in writing, software
  11027. * distributed under the License is distributed on an "AS IS" BASIS,
  11028. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11029. * See the License for the specific language governing permissions and
  11030. * limitations under the License.
  11031. *
  11032. * image-output.js
  11033. * Gets an image out of a pipeline
  11034. */
  11035. /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
  11036. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
  11037. const DEFAULT_MEDIA_TYPE = "bitmap";
  11038. /**
  11039. * Gets an image out of a pipeline
  11040. */
  11041. class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode {
  11042. /**
  11043. * Constructor
  11044. * @param {string} [name] name of the node
  11045. */
  11046. constructor(name = 'image') {
  11047. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11048. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
  11049. this._mediaType = DEFAULT_MEDIA_TYPE;
  11050. /** @type {ImageBitmap} output bitmap */
  11051. this._bitmap = null;
  11052. /** @type {ImageData} output pixel data */
  11053. this._data = null;
  11054. /** @type {ImageFormat} output format */
  11055. this._format = types/* ImageFormat */.f5.RGBA;
  11056. /** @type {SpeedyTextureReader} texture reader */
  11057. this._textureReader = new SpeedyTextureReader(1);
  11058. }
  11059. /**
  11060. * The media type that is exported from this node
  11061. * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
  11062. */
  11063. get mediaType() {
  11064. return this._mediaType;
  11065. }
  11066. /**
  11067. * The media type that is exported from this node
  11068. * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
  11069. */
  11070. set mediaType(value) {
  11071. if (value != 'bitmap' && value != 'data') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
  11072. this._mediaType = value;
  11073. }
  11074. /**
  11075. * Initializes this node
  11076. * @param {SpeedyGPU} gpu
  11077. */
  11078. init(gpu) {
  11079. super.init(gpu);
  11080. this._textureReader.init(gpu);
  11081. }
  11082. /**
  11083. * Releases this node
  11084. * @param {SpeedyGPU} gpu
  11085. */
  11086. release(gpu) {
  11087. this._textureReader.release(gpu);
  11088. super.release(gpu);
  11089. }
  11090. /**
  11091. * Export data from this node to the user
  11092. * @returns {SpeedyPromise<SpeedyMedia>}
  11093. */
  11094. export() {
  11095. const bitmapOrData = this._mediaType != 'data' ? this._bitmap : this._data;
  11096. utils/* Utils */.A.assert(bitmapOrData != null);
  11097. return SpeedyMedia.load(bitmapOrData, {
  11098. format: this._format
  11099. }, false);
  11100. }
  11101. /**
  11102. * Run the specific task of this node
  11103. * @param {SpeedyGPU} gpu
  11104. * @returns {void|SpeedyPromise<void>}
  11105. */
  11106. _run(gpu) {
  11107. const {
  11108. image,
  11109. format
  11110. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11111. if (this._mediaType != 'data') {
  11112. /* Create an ImageBitmap (default) */
  11113. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  11114. const canvas = gpu.renderToCanvas(image);
  11115. createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
  11116. this._bitmap = bitmap;
  11117. this._format = format;
  11118. this._data = null;
  11119. resolve();
  11120. });
  11121. });
  11122. } else {
  11123. /* Create an ImageData */
  11124. return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
  11125. const dataArray = new Uint8ClampedArray(pixels.buffer);
  11126. this._data = new ImageData(dataArray, image.width, image.height);
  11127. this._format = format;
  11128. this._bitmap = null;
  11129. });
  11130. }
  11131. }
  11132. }
  11133. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
  11134. /*
  11135. * speedy-vision.js
  11136. * GPU-accelerated Computer Vision for JavaScript
  11137. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11138. *
  11139. * Licensed under the Apache License, Version 2.0 (the "License");
  11140. * you may not use this file except in compliance with the License.
  11141. * You may obtain a copy of the License at
  11142. *
  11143. * http://www.apache.org/licenses/LICENSE-2.0
  11144. *
  11145. * Unless required by applicable law or agreed to in writing, software
  11146. * distributed under the License is distributed on an "AS IS" BASIS,
  11147. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11148. * See the License for the specific language governing permissions and
  11149. * limitations under the License.
  11150. *
  11151. * multiplexer.js
  11152. * Image multiplexer
  11153. */
  11154. /** @type {string[]} the names of the input ports indexed by their number */
  11155. const INPUT_PORT = ['in0', 'in1'];
  11156. /**
  11157. * Image multiplexer
  11158. */
  11159. class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode {
  11160. /**
  11161. * Constructor
  11162. * @param {string} [name] name of the node
  11163. */
  11164. constructor(name = undefined) {
  11165. super(name, 0, [...INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image)), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11166. /** @type {number} which port should be linked to the output? */
  11167. this._port = 0;
  11168. }
  11169. /**
  11170. * The number of the port that should be linked to the output
  11171. * @returns {number}
  11172. */
  11173. get port() {
  11174. return this._port;
  11175. }
  11176. /**
  11177. * The number of the port that should be linked to the output
  11178. * @param {number} port
  11179. */
  11180. set port(port) {
  11181. if (port < 0 || port >= INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  11182. this._port = port | 0;
  11183. }
  11184. /**
  11185. * Run the specific task of this node
  11186. * @param {SpeedyGPU} gpu
  11187. * @returns {void|SpeedyPromise<void>}
  11188. */
  11189. _run(gpu) {
  11190. const message = this.input(INPUT_PORT[this._port]).read();
  11191. this.output().write(message);
  11192. }
  11193. }
  11194. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
  11195. /*
  11196. * speedy-vision.js
  11197. * GPU-accelerated Computer Vision for JavaScript
  11198. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11199. *
  11200. * Licensed under the Apache License, Version 2.0 (the "License");
  11201. * you may not use this file except in compliance with the License.
  11202. * You may obtain a copy of the License at
  11203. *
  11204. * http://www.apache.org/licenses/LICENSE-2.0
  11205. *
  11206. * Unless required by applicable law or agreed to in writing, software
  11207. * distributed under the License is distributed on an "AS IS" BASIS,
  11208. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11209. * See the License for the specific language governing permissions and
  11210. * limitations under the License.
  11211. *
  11212. * buffer.js
  11213. * Image Buffer
  11214. */
  11215. /**
  11216. * Image Buffer: a node with memory.
  11217. * At time t, it outputs the image received at time t-1
  11218. */
  11219. class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode {
  11220. /**
  11221. * Constructor
  11222. * @param {string} [name] name of the node
  11223. */
  11224. constructor(name = undefined) {
  11225. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11226. /** @type {number} current page: 0 or 1 */
  11227. this._pageIndex = 0;
  11228. /** @type {boolean} first run? */
  11229. this._initialized = false;
  11230. /** @type {ImageFormat} previous image format */
  11231. this._previousFormat = types/* ImageFormat */.f5.RGBA;
  11232. /** @type {boolean} frozen buffer? */
  11233. this._frozen = false;
  11234. }
  11235. /**
  11236. * A frozen buffer discards the input, effectively increasing the buffering time
  11237. * @returns {boolean}
  11238. */
  11239. get frozen() {
  11240. return this._frozen;
  11241. }
  11242. /**
  11243. * A frozen buffer discards the input, effectively increasing the buffering time
  11244. * @param {boolean} value
  11245. */
  11246. set frozen(value) {
  11247. this._frozen = Boolean(value);
  11248. }
  11249. /**
  11250. * Releases this node
  11251. * @param {SpeedyGPU} gpu
  11252. */
  11253. release(gpu) {
  11254. this._initialized = false;
  11255. super.release(gpu);
  11256. }
  11257. /**
  11258. * Run the specific task of this node
  11259. * @param {SpeedyGPU} gpu
  11260. * @returns {void|SpeedyPromise<void>}
  11261. */
  11262. _run(gpu) {
  11263. const {
  11264. image,
  11265. format
  11266. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11267. const previousFormat = this._previousFormat;
  11268. const page = this._tex;
  11269. const previousInputTexture = page[1 - this._pageIndex];
  11270. const outputTexture = page[this._pageIndex];
  11271. // can't store pyramids
  11272. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
  11273. // bufferize
  11274. if (!this._frozen || !this._initialized) {
  11275. // store input
  11276. this._previousFormat = format;
  11277. previousInputTexture.resize(image.width, image.height);
  11278. image.copyTo(previousInputTexture);
  11279. // page flipping
  11280. this._pageIndex = 1 - this._pageIndex;
  11281. }
  11282. // first run?
  11283. if (!this._initialized) {
  11284. this._initialized = true;
  11285. this.output().swrite(previousInputTexture, format);
  11286. return;
  11287. }
  11288. // done!
  11289. this.output().swrite(outputTexture, previousFormat);
  11290. }
  11291. }
  11292. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
  11293. /*
  11294. * speedy-vision.js
  11295. * GPU-accelerated Computer Vision for JavaScript
  11296. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11297. *
  11298. * Licensed under the Apache License, Version 2.0 (the "License");
  11299. * you may not use this file except in compliance with the License.
  11300. * You may obtain a copy of the License at
  11301. *
  11302. * http://www.apache.org/licenses/LICENSE-2.0
  11303. *
  11304. * Unless required by applicable law or agreed to in writing, software
  11305. * distributed under the License is distributed on an "AS IS" BASIS,
  11306. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11307. * See the License for the specific language governing permissions and
  11308. * limitations under the License.
  11309. *
  11310. * pyramid.js
  11311. * Generate pyramid
  11312. */
  11313. // Constants
  11314. const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
  11315. const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
  11316. /**
  11317. * Generate pyramid
  11318. */
  11319. class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode {
  11320. /**
  11321. * Constructor
  11322. * @param {string} [name] name of the node
  11323. */
  11324. constructor(name = undefined) {
  11325. super(name, MAX_TEXTURES + 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11326. }
  11327. /**
  11328. * Run the specific task of this node
  11329. * @param {SpeedyGPU} gpu
  11330. * @returns {void|SpeedyPromise<void>}
  11331. */
  11332. _run(gpu) {
  11333. const {
  11334. image,
  11335. format
  11336. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11337. const outputTexture = this._tex[0];
  11338. const pyramids = gpu.programs.pyramids;
  11339. let width = image.width,
  11340. height = image.height;
  11341. // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  11342. const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
  11343. // get work textures
  11344. const mip = new Array(MAX_TEXTURES + 1);
  11345. for (let i = MAX_TEXTURES; i >= 1; i--) mip[i - 1] = this._tex[i];
  11346. // get a copy of the input image
  11347. mip[0].resize(width, height);
  11348. image.copyTo(mip[0]);
  11349. // generate gaussian pyramid
  11350. const numLevels = Math.min(mipLevels, MAX_LEVELS);
  11351. for (let level = 1; level < numLevels; level++) {
  11352. // use max(1, floor(size / 2^lod)), in accordance to
  11353. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  11354. const halfWidth = Math.max(1, width >>> 1);
  11355. const halfHeight = Math.max(1, height >>> 1);
  11356. // reduce operation
  11357. const tmp = level - 1 + MAX_LEVELS;
  11358. pyramids.smoothX.outputs(width, height, mip[tmp])(mip[level - 1]);
  11359. pyramids.smoothY.outputs(width, height, mip[level - 1])(mip[tmp]);
  11360. pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level])(mip[level - 1]);
  11361. /*
  11362. (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
  11363. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
  11364. */
  11365. // flush
  11366. gpu.gl.flush();
  11367. // next level
  11368. width = halfWidth;
  11369. height = halfHeight;
  11370. /*
  11371. // debug: view pyramid
  11372. const view = mip[level-1];
  11373. const canvas = gpu.renderToCanvas(view);
  11374. if(!window._ww) document.body.appendChild(canvas);
  11375. window._ww = 1;
  11376. */
  11377. }
  11378. // copy to output & set mipmap
  11379. outputTexture.resize(image.width, image.height);
  11380. outputTexture.clear();
  11381. image.copyTo(outputTexture);
  11382. outputTexture.generateMipmaps(mip.slice(0, numLevels));
  11383. // done!
  11384. this.output().swrite(outputTexture, format);
  11385. }
  11386. }
  11387. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
  11388. /*
  11389. * speedy-vision.js
  11390. * GPU-accelerated Computer Vision for JavaScript
  11391. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11392. *
  11393. * Licensed under the Apache License, Version 2.0 (the "License");
  11394. * you may not use this file except in compliance with the License.
  11395. * You may obtain a copy of the License at
  11396. *
  11397. * http://www.apache.org/licenses/LICENSE-2.0
  11398. *
  11399. * Unless required by applicable law or agreed to in writing, software
  11400. * distributed under the License is distributed on an "AS IS" BASIS,
  11401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11402. * See the License for the specific language governing permissions and
  11403. * limitations under the License.
  11404. *
  11405. * mixer.js
  11406. * Image Mixer
  11407. */
  11408. /**
  11409. * Image Mixer
  11410. */
  11411. class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode {
  11412. /**
  11413. * Constructor
  11414. * @param {string} [name] name of the node
  11415. */
  11416. constructor(name = undefined) {
  11417. super(name, 1, [InputPort('in0').expects(SpeedyPipelineMessageType.Image), InputPort('in1').expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11418. /** @type {number} alpha coefficient (applied to image0) */
  11419. this._alpha = 0.5;
  11420. /** @type {number} beta coefficient (applied to image1) */
  11421. this._beta = 0.5;
  11422. /** @type {number} gamma coefficient (brightness control) */
  11423. this._gamma = 0.0;
  11424. }
  11425. /**
  11426. * Alpha coefficient (applied to image0)
  11427. * @returns {number}
  11428. */
  11429. get alpha() {
  11430. return this._alpha;
  11431. }
  11432. /**
  11433. * Alpha coefficient (applied to image0)
  11434. * @param {number} value
  11435. */
  11436. set alpha(value) {
  11437. this._alpha = +value;
  11438. }
  11439. /**
  11440. * Beta coefficient (applied to image1)
  11441. * @returns {number}
  11442. */
  11443. get beta() {
  11444. return this._beta;
  11445. }
  11446. /**
  11447. * Beta coefficient (applied to image1)
  11448. * @param {number} value
  11449. */
  11450. set beta(value) {
  11451. this._beta = +value;
  11452. }
  11453. /**
  11454. * Gamma coefficient (brightness control)
  11455. * @returns {number}
  11456. */
  11457. get gamma() {
  11458. return this._gamma;
  11459. }
  11460. /**
  11461. * Gamma coefficient (brightness control)
  11462. * @param {number} value
  11463. */
  11464. set gamma(value) {
  11465. this._gamma = +value;
  11466. }
  11467. /**
  11468. * Run the specific task of this node
  11469. * @param {SpeedyGPU} gpu
  11470. * @returns {void|SpeedyPromise<void>}
  11471. */
  11472. _run(gpu) {
  11473. const in0 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in0').read();
  11474. const in1 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in1').read();
  11475. const image0 = in0.image,
  11476. image1 = in1.image;
  11477. const format0 = in0.format,
  11478. format1 = in1.format;
  11479. const width = Math.max(image0.width, image1.width);
  11480. const height = Math.max(image0.height, image1.height);
  11481. const alpha = this._alpha,
  11482. beta = this._beta,
  11483. gamma = this._gamma;
  11484. const outputTexture = this._tex[0];
  11485. if (format0 != format1) throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
  11486. gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
  11487. gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
  11488. this.output().swrite(outputTexture, format0);
  11489. }
  11490. }
  11491. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
  11492. /*
  11493. * speedy-vision.js
  11494. * GPU-accelerated Computer Vision for JavaScript
  11495. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11496. *
  11497. * Licensed under the Apache License, Version 2.0 (the "License");
  11498. * you may not use this file except in compliance with the License.
  11499. * You may obtain a copy of the License at
  11500. *
  11501. * http://www.apache.org/licenses/LICENSE-2.0
  11502. *
  11503. * Unless required by applicable law or agreed to in writing, software
  11504. * distributed under the License is distributed on an "AS IS" BASIS,
  11505. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11506. * See the License for the specific language governing permissions and
  11507. * limitations under the License.
  11508. *
  11509. * portal.js
  11510. * Image Portals
  11511. */
  11512. /**
  11513. * A sink of an Image Portal
  11514. * This is not a pipeline sink - it doesn't export any data!
  11515. */
  11516. class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode {
  11517. /**
  11518. * Constructor
  11519. * @param {string} [name] name of the node
  11520. */
  11521. constructor(name = undefined) {
  11522. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11523. /** @type {ImageFormat} stored image format */
  11524. this._format = types/* ImageFormat */.f5.RGBA;
  11525. /** @type {boolean} is this node initialized? */
  11526. this._initialized = false;
  11527. }
  11528. /**
  11529. * Stored image
  11530. * @returns {SpeedyTexture}
  11531. */
  11532. get image() {
  11533. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11534. return this._tex[0];
  11535. }
  11536. /**
  11537. * Stored image format
  11538. * @returns {ImageFormat}
  11539. */
  11540. get format() {
  11541. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11542. return this._format;
  11543. }
  11544. /**
  11545. * Initializes this node
  11546. * @param {SpeedyGPU} gpu
  11547. */
  11548. init(gpu) {
  11549. super.init(gpu);
  11550. this._tex[0].resize(1, 1).clear(); // initial texture
  11551. this._format = types/* ImageFormat */.f5.RGBA;
  11552. this._initialized = true;
  11553. }
  11554. /**
  11555. * Releases this node
  11556. * @param {SpeedyGPU} gpu
  11557. */
  11558. release(gpu) {
  11559. this._initialized = false;
  11560. super.release(gpu);
  11561. }
  11562. /**
  11563. * Run the specific task of this node
  11564. * @param {SpeedyGPU} gpu
  11565. * @returns {void|SpeedyPromise<void>}
  11566. */
  11567. _run(gpu) {
  11568. const {
  11569. image,
  11570. format
  11571. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11572. const tex = this._tex[0];
  11573. // can't store pyramids
  11574. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
  11575. // copy input
  11576. this._format = format;
  11577. tex.resize(image.width, image.height);
  11578. image.copyTo(tex);
  11579. }
  11580. }
  11581. /**
  11582. * A source of an Image Portal
  11583. */
  11584. class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode {
  11585. /**
  11586. * Constructor
  11587. * @param {string} [name] name of the node
  11588. */
  11589. constructor(name = undefined) {
  11590. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11591. /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
  11592. this._source = null;
  11593. }
  11594. /**
  11595. * Data source
  11596. * @returns {SpeedyPipelineNodeImagePortalSink|null}
  11597. */
  11598. get source() {
  11599. return this._source;
  11600. }
  11601. /**
  11602. * Data source
  11603. * @param {SpeedyPipelineNodeImagePortalSink|null} node
  11604. */
  11605. set source(node) {
  11606. if (node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  11607. this._source = node;
  11608. }
  11609. /**
  11610. * Run the specific task of this node
  11611. * @param {SpeedyGPU} gpu
  11612. * @returns {void|SpeedyPromise<void>}
  11613. */
  11614. _run(gpu) {
  11615. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  11616. this.output().swrite(this._source.image, this._source.format);
  11617. }
  11618. }
  11619. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
  11620. /*
  11621. * speedy-vision.js
  11622. * GPU-accelerated Computer Vision for JavaScript
  11623. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11624. *
  11625. * Licensed under the Apache License, Version 2.0 (the "License");
  11626. * you may not use this file except in compliance with the License.
  11627. * You may obtain a copy of the License at
  11628. *
  11629. * http://www.apache.org/licenses/LICENSE-2.0
  11630. *
  11631. * Unless required by applicable law or agreed to in writing, software
  11632. * distributed under the License is distributed on an "AS IS" BASIS,
  11633. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11634. * See the License for the specific language governing permissions and
  11635. * limitations under the License.
  11636. *
  11637. * image-factory.js
  11638. * Image-related nodes
  11639. */
  11640. /**
  11641. * Portal nodes
  11642. */
  11643. class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11644. /**
  11645. * Create an image portal source
  11646. * @param {string} [name] name of the node
  11647. * @returns {SpeedyPipelineNodeImagePortalSource}
  11648. */
  11649. static Source(name = undefined) {
  11650. return new SpeedyPipelineNodeImagePortalSource(name);
  11651. }
  11652. /**
  11653. * Create an image portal sink
  11654. * @param {string} [name] name of the node
  11655. * @returns {SpeedyPipelineNodeImagePortalSink}
  11656. */
  11657. static Sink(name = undefined) {
  11658. return new SpeedyPipelineNodeImagePortalSink(name);
  11659. }
  11660. }
  11661. /**
  11662. * Image nodes
  11663. */
  11664. class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11665. /**
  11666. * Create an image source
  11667. * @param {string} [name] name of the node
  11668. * @returns {SpeedyPipelineNodeImageSource}
  11669. */
  11670. static Source(name = undefined) {
  11671. return new SpeedyPipelineNodeImageSource(name);
  11672. }
  11673. /**
  11674. * Create an image sink
  11675. * @param {string} [name] name of the node
  11676. * @returns {SpeedyPipelineNodeImageSink}
  11677. */
  11678. static Sink(name = undefined) {
  11679. return new SpeedyPipelineNodeImageSink(name);
  11680. }
  11681. /**
  11682. * Create an image multiplexer
  11683. * @param {string} [name] name of the node
  11684. * @returns {SpeedyPipelineNodeImageMultiplexer}
  11685. */
  11686. static Multiplexer(name = undefined) {
  11687. return new SpeedyPipelineNodeImageMultiplexer(name);
  11688. }
  11689. /**
  11690. * Create an image buffer
  11691. * @param {string} [name] name of the node
  11692. * @returns {SpeedyPipelineNodeImageBuffer}
  11693. */
  11694. static Buffer(name = undefined) {
  11695. return new SpeedyPipelineNodeImageBuffer(name);
  11696. }
  11697. /**
  11698. * Image Pyramid
  11699. * @param {string} [name] name of the node
  11700. * @returns {SpeedyPipelineNodeImagePyramid}
  11701. */
  11702. static Pyramid(name = undefined) {
  11703. return new SpeedyPipelineNodeImagePyramid(name);
  11704. }
  11705. /**
  11706. * Image Mixer (blending)
  11707. * @param {string} [name] name of the node
  11708. * @returns {SpeedyPipelineNodeImageMixer}
  11709. */
  11710. static Mixer(name = undefined) {
  11711. return new SpeedyPipelineNodeImageMixer(name);
  11712. }
  11713. /**
  11714. * Image Portals
  11715. * @returns {typeof SpeedyPipelineImagePortalFactory}
  11716. */
  11717. static get Portal() {
  11718. return SpeedyPipelineImagePortalFactory;
  11719. }
  11720. }
  11721. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
  11722. /*
  11723. * speedy-vision.js
  11724. * GPU-accelerated Computer Vision for JavaScript
  11725. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11726. *
  11727. * Licensed under the Apache License, Version 2.0 (the "License");
  11728. * you may not use this file except in compliance with the License.
  11729. * You may obtain a copy of the License at
  11730. *
  11731. * http://www.apache.org/licenses/LICENSE-2.0
  11732. *
  11733. * Unless required by applicable law or agreed to in writing, software
  11734. * distributed under the License is distributed on an "AS IS" BASIS,
  11735. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11736. * See the License for the specific language governing permissions and
  11737. * limitations under the License.
  11738. *
  11739. * greyscale.js
  11740. * Convert an image to greyscale
  11741. */
  11742. /**
  11743. * Convert an image to greyscale
  11744. */
  11745. class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode {
  11746. /**
  11747. * Constructor
  11748. * @param {string} [name] name of the node
  11749. */
  11750. constructor(name = undefined) {
  11751. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11752. }
  11753. /**
  11754. * Run the specific task of this node
  11755. * @param {SpeedyGPU} gpu
  11756. * @returns {void|SpeedyPromise<void>}
  11757. */
  11758. _run(gpu) {
  11759. const {
  11760. image,
  11761. format
  11762. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11763. const width = image.width,
  11764. height = image.height;
  11765. const outputTexture = this._tex[0];
  11766. const filters = gpu.programs.filters;
  11767. filters.rgb2grey.outputs(width, height, outputTexture);
  11768. filters.rgb2grey(image);
  11769. this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
  11770. }
  11771. }
  11772. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
  11773. /*
  11774. * speedy-vision.js
  11775. * GPU-accelerated Computer Vision for JavaScript
  11776. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11777. *
  11778. * Licensed under the Apache License, Version 2.0 (the "License");
  11779. * you may not use this file except in compliance with the License.
  11780. * You may obtain a copy of the License at
  11781. *
  11782. * http://www.apache.org/licenses/LICENSE-2.0
  11783. *
  11784. * Unless required by applicable law or agreed to in writing, software
  11785. * distributed under the License is distributed on an "AS IS" BASIS,
  11786. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11787. * See the License for the specific language governing permissions and
  11788. * limitations under the License.
  11789. *
  11790. * gaussian-blur.js
  11791. * Gaussian Blur
  11792. */
  11793. /**
  11794. * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
  11795. * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
  11796. * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
  11797. * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
  11798. */
  11799. const DEFAULT_KERNEL = Object.freeze({
  11800. 3: [0.27901008925473514, 0.44197982149052983, 0.27901008925473514],
  11801. // 1D convolution (sigma = 1)
  11802. 5: [0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021],
  11803. // 1D convolution (separable kernel)
  11804. 7: [0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274],
  11805. 9: [0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988],
  11806. 11: [0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346],
  11807. 13: [0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363],
  11808. 15: [0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383]
  11809. //3: [ 0.25, 0.5, 0.25 ],
  11810. //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
  11811. });
  11812. /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
  11813. const DEFAULT_SIGMA = new SpeedyVector2(0, 0);
  11814. /** convolution programs (x-axis) */
  11815. const CONVOLUTION_X = Object.freeze({
  11816. 3: 'convolution3x',
  11817. 5: 'convolution5x',
  11818. 7: 'convolution7x',
  11819. 9: 'convolution9x',
  11820. 11: 'convolution11x',
  11821. 13: 'convolution13x',
  11822. 15: 'convolution15x'
  11823. });
  11824. /** convolution programs (y-axis) */
  11825. const CONVOLUTION_Y = Object.freeze({
  11826. 3: 'convolution3y',
  11827. 5: 'convolution5y',
  11828. 7: 'convolution7y',
  11829. 9: 'convolution9y',
  11830. 11: 'convolution11y',
  11831. 13: 'convolution13y',
  11832. 15: 'convolution15y'
  11833. });
  11834. /**
  11835. * @typedef {object} SeparableConvolutionKernel
  11836. * @property {number[]} x
  11837. * @property {number[]} y
  11838. */
  11839. /**
  11840. * Gaussian Blur
  11841. */
  11842. class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode {
  11843. /**
  11844. * Constructor
  11845. * @param {string} [name] name of the node
  11846. */
  11847. constructor(name = undefined) {
  11848. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11849. /** @type {SpeedySize} size of the kernel */
  11850. this._kernelSize = new SpeedySize(5, 5);
  11851. /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
  11852. this._sigma = DEFAULT_SIGMA;
  11853. /** @type {SeparableConvolutionKernel} convolution kernel */
  11854. this._kernel = {
  11855. x: DEFAULT_KERNEL[this._kernelSize.width],
  11856. y: DEFAULT_KERNEL[this._kernelSize.height]
  11857. };
  11858. }
  11859. /**
  11860. * Size of the kernel
  11861. * @returns {SpeedySize}
  11862. */
  11863. get kernelSize() {
  11864. return this._kernelSize;
  11865. }
  11866. /**
  11867. * Size of the kernel
  11868. * @param {SpeedySize} kernelSize
  11869. */
  11870. set kernelSize(kernelSize) {
  11871. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  11872. const kw = kernelSize.width,
  11873. kh = kernelSize.height;
  11874. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  11875. this._kernelSize = kernelSize;
  11876. this._updateKernel();
  11877. }
  11878. /**
  11879. * Sigma of the Gaussian kernel
  11880. * @returns {SpeedyVector2}
  11881. */
  11882. get sigma() {
  11883. return this._sigma;
  11884. }
  11885. /**
  11886. * Sigma of the Gaussian kernel
  11887. * @param {SpeedyVector2} sigma
  11888. */
  11889. set sigma(sigma) {
  11890. utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
  11891. utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
  11892. this._sigma = sigma;
  11893. this._updateKernel();
  11894. }
  11895. /**
  11896. * Run the specific task of this node
  11897. * @param {SpeedyGPU} gpu
  11898. * @returns {void|SpeedyPromise<void>}
  11899. */
  11900. _run(gpu) {
  11901. const {
  11902. image,
  11903. format
  11904. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11905. const width = image.width,
  11906. height = image.height;
  11907. const kernX = this._kernel.x;
  11908. const kernY = this._kernel.y;
  11909. const convX = CONVOLUTION_X[this._kernelSize.width];
  11910. const convY = CONVOLUTION_Y[this._kernelSize.height];
  11911. const tex = this._tex[0];
  11912. const outputTexture = this._tex[1];
  11913. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  11914. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  11915. this.output().swrite(outputTexture, format);
  11916. }
  11917. /**
  11918. * Update the internal kernel to match
  11919. * sigma and kernelSize
  11920. */
  11921. _updateKernel() {
  11922. if (this._sigma.x == DEFAULT_SIGMA.x) this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];else this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
  11923. if (this._sigma.y == DEFAULT_SIGMA.y) this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];else this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
  11924. }
  11925. }
  11926. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
  11927. /*
  11928. * speedy-vision.js
  11929. * GPU-accelerated Computer Vision for JavaScript
  11930. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11931. *
  11932. * Licensed under the Apache License, Version 2.0 (the "License");
  11933. * you may not use this file except in compliance with the License.
  11934. * You may obtain a copy of the License at
  11935. *
  11936. * http://www.apache.org/licenses/LICENSE-2.0
  11937. *
  11938. * Unless required by applicable law or agreed to in writing, software
  11939. * distributed under the License is distributed on an "AS IS" BASIS,
  11940. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11941. * See the License for the specific language governing permissions and
  11942. * limitations under the License.
  11943. *
  11944. * simple-blur.js
  11945. * Simple Blur (Box Filter)
  11946. */
  11947. /** 1D convolution filters */
  11948. const BOX_FILTER = Object.freeze({
  11949. 3: new Array(3).fill(1 / 3),
  11950. 5: new Array(5).fill(1 / 5),
  11951. 7: new Array(7).fill(1 / 7),
  11952. 9: new Array(9).fill(1 / 9),
  11953. 11: new Array(11).fill(1 / 11),
  11954. 13: new Array(13).fill(1 / 13),
  11955. 15: new Array(15).fill(1 / 15)
  11956. });
  11957. /** convolution programs (x-axis) */
  11958. const simple_blur_CONVOLUTION_X = Object.freeze({
  11959. 3: 'convolution3x',
  11960. 5: 'convolution5x',
  11961. 7: 'convolution7x',
  11962. 9: 'convolution9x',
  11963. 11: 'convolution11x',
  11964. 13: 'convolution13x',
  11965. 15: 'convolution15x'
  11966. });
  11967. /** convolution programs (y-axis) */
  11968. const simple_blur_CONVOLUTION_Y = Object.freeze({
  11969. 3: 'convolution3y',
  11970. 5: 'convolution5y',
  11971. 7: 'convolution7y',
  11972. 9: 'convolution9y',
  11973. 11: 'convolution11y',
  11974. 13: 'convolution13y',
  11975. 15: 'convolution15y'
  11976. });
  11977. /**
  11978. * @typedef {object} SeparableConvolutionKernel
  11979. * @property {number[]} x
  11980. * @property {number[]} y
  11981. */
  11982. /**
  11983. * Simple Blur (Box Filter)
  11984. */
  11985. class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode {
  11986. /**
  11987. * Constructor
  11988. * @param {string} [name] name of the node
  11989. */
  11990. constructor(name = undefined) {
  11991. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11992. /** @type {SpeedySize} size of the kernel */
  11993. this._kernelSize = new SpeedySize(5, 5);
  11994. /** @type {SeparableConvolutionKernel} convolution kernel */
  11995. this._kernel = {
  11996. x: BOX_FILTER[this._kernelSize.width],
  11997. y: BOX_FILTER[this._kernelSize.height]
  11998. };
  11999. }
  12000. /**
  12001. * Size of the kernel
  12002. * @returns {SpeedySize}
  12003. */
  12004. get kernelSize() {
  12005. return this._kernelSize;
  12006. }
  12007. /**
  12008. * Size of the kernel
  12009. * @param {SpeedySize} kernelSize
  12010. */
  12011. set kernelSize(kernelSize) {
  12012. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  12013. const kw = kernelSize.width,
  12014. kh = kernelSize.height;
  12015. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  12016. this._kernelSize = kernelSize;
  12017. this._kernel.x = BOX_FILTER[this._kernelSize.width];
  12018. this._kernel.y = BOX_FILTER[this._kernelSize.height];
  12019. }
  12020. /**
  12021. * Run the specific task of this node
  12022. * @param {SpeedyGPU} gpu
  12023. * @returns {void|SpeedyPromise<void>}
  12024. */
  12025. _run(gpu) {
  12026. const {
  12027. image,
  12028. format
  12029. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12030. const width = image.width,
  12031. height = image.height;
  12032. const kernX = this._kernel.x;
  12033. const kernY = this._kernel.y;
  12034. const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
  12035. const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
  12036. const tex = this._tex[0];
  12037. const outputTexture = this._tex[1];
  12038. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  12039. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  12040. this.output().swrite(outputTexture, format);
  12041. }
  12042. }
  12043. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
  12044. /*
  12045. * speedy-vision.js
  12046. * GPU-accelerated Computer Vision for JavaScript
  12047. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12048. *
  12049. * Licensed under the Apache License, Version 2.0 (the "License");
  12050. * you may not use this file except in compliance with the License.
  12051. * You may obtain a copy of the License at
  12052. *
  12053. * http://www.apache.org/licenses/LICENSE-2.0
  12054. *
  12055. * Unless required by applicable law or agreed to in writing, software
  12056. * distributed under the License is distributed on an "AS IS" BASIS,
  12057. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12058. * See the License for the specific language governing permissions and
  12059. * limitations under the License.
  12060. *
  12061. * median-blur.js
  12062. * Median Blur
  12063. */
  12064. // Median programs
  12065. const MEDIAN = {
  12066. 3: 'median3',
  12067. 5: 'median5',
  12068. 7: 'median7'
  12069. };
  12070. /**
  12071. * Median Blur
  12072. */
  12073. class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode {
  12074. /**
  12075. * Constructor
  12076. * @param {string} [name] name of the node
  12077. */
  12078. constructor(name = undefined) {
  12079. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12080. /** @type {SpeedySize} size of the kernel (assumed to be square) */
  12081. this._kernelSize = new SpeedySize(5, 5);
  12082. }
  12083. /**
  12084. * Size of the kernel
  12085. * @returns {SpeedySize}
  12086. */
  12087. get kernelSize() {
  12088. return this._kernelSize;
  12089. }
  12090. /**
  12091. * Size of the kernel
  12092. * @param {SpeedySize} kernelSize
  12093. */
  12094. set kernelSize(kernelSize) {
  12095. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  12096. const ksize = kernelSize.width;
  12097. if (!(ksize == 3 || ksize == 5 || ksize == 7)) throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);else if (kernelSize.width != kernelSize.height) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
  12098. this._kernelSize = kernelSize;
  12099. }
  12100. /**
  12101. * Run the specific task of this node
  12102. * @param {SpeedyGPU} gpu
  12103. * @returns {void|SpeedyPromise<void>}
  12104. */
  12105. _run(gpu) {
  12106. const {
  12107. image,
  12108. format
  12109. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12110. const width = image.width,
  12111. height = image.height;
  12112. const ksize = this._kernelSize.width;
  12113. const med = MEDIAN[ksize];
  12114. const outputTexture = this._tex[0];
  12115. gpu.programs.filters[med].outputs(width, height, outputTexture)(image);
  12116. this.output().swrite(outputTexture, format);
  12117. }
  12118. }
  12119. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
  12120. /*
  12121. * speedy-vision.js
  12122. * GPU-accelerated Computer Vision for JavaScript
  12123. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12124. *
  12125. * Licensed under the Apache License, Version 2.0 (the "License");
  12126. * you may not use this file except in compliance with the License.
  12127. * You may obtain a copy of the License at
  12128. *
  12129. * http://www.apache.org/licenses/LICENSE-2.0
  12130. *
  12131. * Unless required by applicable law or agreed to in writing, software
  12132. * distributed under the License is distributed on an "AS IS" BASIS,
  12133. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12134. * See the License for the specific language governing permissions and
  12135. * limitations under the License.
  12136. *
  12137. * convolution.js
  12138. * Image convolution
  12139. */
  12140. // 2D convolution programs
  12141. const CONVOLUTION = {
  12142. 3: 'convolution3',
  12143. 5: 'convolution5',
  12144. 7: 'convolution7'
  12145. };
  12146. /**
  12147. * Image convolution
  12148. */
  12149. class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode {
  12150. /**
  12151. * Constructor
  12152. * @param {string} [name] name of the node
  12153. */
  12154. constructor(name = undefined) {
  12155. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12156. /** @type {SpeedyMatrix} convolution kernel (square matrix) */
  12157. this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
  12158. }
  12159. /**
  12160. * Convolution kernel
  12161. * @returns {SpeedyMatrix}
  12162. */
  12163. get kernel() {
  12164. return this._kernel;
  12165. }
  12166. /**
  12167. * Convolution kernel
  12168. * @param {SpeedyMatrix} kernel
  12169. */
  12170. set kernel(kernel) {
  12171. if (kernel.rows != kernel.columns) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);else if (!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7)) throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
  12172. this._kernel = kernel;
  12173. }
  12174. /**
  12175. * Run the specific task of this node
  12176. * @param {SpeedyGPU} gpu
  12177. * @returns {void|SpeedyPromise<void>}
  12178. */
  12179. _run(gpu) {
  12180. const {
  12181. image,
  12182. format
  12183. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12184. const width = image.width,
  12185. height = image.height;
  12186. const outputTexture = this._tex[0];
  12187. const ksize = this._kernel.rows;
  12188. const conv = CONVOLUTION[ksize];
  12189. const kernel = this._kernel.read();
  12190. gpu.programs.filters[conv].outputs(width, height, outputTexture)(image, kernel);
  12191. this.output().swrite(outputTexture, format);
  12192. }
  12193. }
  12194. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
  12195. /*
  12196. * speedy-vision.js
  12197. * GPU-accelerated Computer Vision for JavaScript
  12198. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12199. *
  12200. * Licensed under the Apache License, Version 2.0 (the "License");
  12201. * you may not use this file except in compliance with the License.
  12202. * You may obtain a copy of the License at
  12203. *
  12204. * http://www.apache.org/licenses/LICENSE-2.0
  12205. *
  12206. * Unless required by applicable law or agreed to in writing, software
  12207. * distributed under the License is distributed on an "AS IS" BASIS,
  12208. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12209. * See the License for the specific language governing permissions and
  12210. * limitations under the License.
  12211. *
  12212. * nightvision.js
  12213. * Nightvision filter
  12214. */
  12215. /**
  12216. * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
  12217. */
  12218. /**
  12219. * Nightvision filter: "see in the dark"
  12220. */
  12221. class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode {
  12222. /**
  12223. * Constructor
  12224. * @param {string} [name] name of the node
  12225. */
  12226. constructor(name = undefined) {
  12227. super(name, 3, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.RGBA || msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12228. /** @type {number} a value typically in [0,1]: larger number => higher contrast */
  12229. this._gain = 0.5;
  12230. /** @type {number} a value typically in [0,1]: controls brightness */
  12231. this._offset = 0.5;
  12232. /** @type {number} gain decay, a value in [0,1] */
  12233. this._decay = 0.0;
  12234. /** @type {NightvisionQualityLevel} quality level */
  12235. this._quality = 'medium';
  12236. }
  12237. /**
  12238. * Gain, a value typically in [0,1]: larger number => higher contrast
  12239. * @returns {number}
  12240. */
  12241. get gain() {
  12242. return this._gain;
  12243. }
  12244. /**
  12245. * Gain, a value typically in [0,1]: larger number => higher contrast
  12246. * @param {number} gain
  12247. */
  12248. set gain(gain) {
  12249. this._gain = +gain;
  12250. }
  12251. /**
  12252. * Offset, a value typically in [0,1] that controls the brightness
  12253. * @returns {number}
  12254. */
  12255. get offset() {
  12256. return this._offset;
  12257. }
  12258. /**
  12259. * Offset, a value typically in [0,1] that controls the brightness
  12260. * @param {number} offset
  12261. */
  12262. set offset(offset) {
  12263. this._offset = +offset;
  12264. }
  12265. /**
  12266. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12267. * @returns {number}
  12268. */
  12269. get decay() {
  12270. return this._decay;
  12271. }
  12272. /**
  12273. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12274. * @param {number} decay
  12275. */
  12276. set decay(decay) {
  12277. this._decay = Math.max(0.0, Math.min(+decay, 1.0));
  12278. }
  12279. /**
  12280. * Quality level of the filter
  12281. * @returns {NightvisionQualityLevel}
  12282. */
  12283. get quality() {
  12284. return this._quality;
  12285. }
  12286. /**
  12287. * Quality level of the filter
  12288. * @param {NightvisionQualityLevel} quality
  12289. */
  12290. set quality(quality) {
  12291. if (quality === 'high' || quality === 'medium' || quality === 'low') this._quality = quality;else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
  12292. }
  12293. /**
  12294. * Run the specific task of this node
  12295. * @param {SpeedyGPU} gpu
  12296. * @returns {void|SpeedyPromise<void>}
  12297. */
  12298. _run(gpu) {
  12299. const {
  12300. image,
  12301. format
  12302. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12303. const width = image.width,
  12304. height = image.height;
  12305. const gain = this._gain;
  12306. const offset = this._offset;
  12307. const decay = this._decay;
  12308. const quality = this._quality;
  12309. const filters = gpu.programs.filters;
  12310. const tmp = this._tex[0];
  12311. const illuminationMap = this._tex[1];
  12312. const outputTexture = this._tex[2];
  12313. // compute illumination map
  12314. if (quality == 'medium') {
  12315. filters.illuminationMapX.outputs(width, height, tmp);
  12316. filters.illuminationMapY.outputs(width, height, illuminationMap);
  12317. filters.illuminationMapX(image);
  12318. filters.illuminationMapY(tmp);
  12319. } else if (quality == 'high') {
  12320. filters.illuminationMapHiX.outputs(width, height, tmp);
  12321. filters.illuminationMapHiY.outputs(width, height, illuminationMap);
  12322. filters.illuminationMapHiX(image);
  12323. filters.illuminationMapHiY(tmp);
  12324. } else if (quality == 'low') {
  12325. filters.illuminationMapLoX.outputs(width, height, tmp);
  12326. filters.illuminationMapLoY.outputs(width, height, illuminationMap);
  12327. filters.illuminationMapLoX(image);
  12328. filters.illuminationMapLoY(tmp);
  12329. }
  12330. // run nightvision
  12331. if (format === types/* ImageFormat */.f5.GREY) {
  12332. filters.nightvisionGreyscale.outputs(width, height, outputTexture);
  12333. filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
  12334. } else if (format === types/* ImageFormat */.f5.RGBA) {
  12335. filters.nightvision.outputs(width, height, outputTexture);
  12336. filters.nightvision(image, illuminationMap, gain, offset, decay);
  12337. }
  12338. // done!
  12339. this.output().swrite(outputTexture, format);
  12340. }
  12341. }
  12342. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
  12343. /*
  12344. * speedy-vision.js
  12345. * GPU-accelerated Computer Vision for JavaScript
  12346. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12347. *
  12348. * Licensed under the Apache License, Version 2.0 (the "License");
  12349. * you may not use this file except in compliance with the License.
  12350. * You may obtain a copy of the License at
  12351. *
  12352. * http://www.apache.org/licenses/LICENSE-2.0
  12353. *
  12354. * Unless required by applicable law or agreed to in writing, software
  12355. * distributed under the License is distributed on an "AS IS" BASIS,
  12356. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12357. * See the License for the specific language governing permissions and
  12358. * limitations under the License.
  12359. *
  12360. * normalize.js
  12361. * Normalize image to a range
  12362. */
  12363. /**
  12364. * Normalize image to a range
  12365. */
  12366. class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode {
  12367. /**
  12368. * Constructor
  12369. * @param {string} [name] name of the node
  12370. */
  12371. constructor(name = undefined) {
  12372. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12373. /** @type {number} a value in [0,255] */
  12374. this._minValue = 0;
  12375. /** @type {number} a value in [0,255] */
  12376. this._maxValue = 255;
  12377. }
  12378. /**
  12379. * Minimum intensity in the output image, a value in [0,255]
  12380. * @returns {number}
  12381. */
  12382. get minValue() {
  12383. return this._minValue;
  12384. }
  12385. /**
  12386. * Minimum intensity in the output image, a value in [0,255]
  12387. * @param {number} minValue
  12388. */
  12389. set minValue(minValue) {
  12390. this._minValue = Math.max(0, Math.min(+minValue, 255));
  12391. }
  12392. /**
  12393. * Maximum intensity in the output image, a value in [0,255]
  12394. * @returns {number}
  12395. */
  12396. get maxValue() {
  12397. return this._maxValue;
  12398. }
  12399. /**
  12400. * Maximum intensity in the output image, a value in [0,255]
  12401. * @param {number} maxValue
  12402. */
  12403. set maxValue(maxValue) {
  12404. this._maxValue = Math.max(0, Math.min(+maxValue, 255));
  12405. }
  12406. /**
  12407. * Run the specific task of this node
  12408. * @param {SpeedyGPU} gpu
  12409. * @returns {void|SpeedyPromise<void>}
  12410. */
  12411. _run(gpu) {
  12412. const {
  12413. image,
  12414. format
  12415. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12416. const width = image.width,
  12417. height = image.height;
  12418. const outputTexture = this._tex[3];
  12419. let minValue = this._minValue;
  12420. let maxValue = this._maxValue;
  12421. if (minValue > maxValue) minValue = maxValue = (minValue + maxValue) / 2;
  12422. const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
  12423. gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
  12424. gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
  12425. this.output().swrite(outputTexture, format);
  12426. }
  12427. /**
  12428. * Scan a single component in all pixels of the image and find the min & max intensities
  12429. * @param {SpeedyGPU} gpu
  12430. * @param {SpeedyTexture} image input image
  12431. * @param {PixelComponent} pixelComponent a single PixelComponent flag
  12432. * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
  12433. */
  12434. _scanMinMax(gpu, image, pixelComponent) {
  12435. const tex = this._tex;
  12436. const program = gpu.programs.utils;
  12437. const width = image.width,
  12438. height = image.height;
  12439. const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
  12440. utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
  12441. program.copyComponents.outputs(width, height, tex[2]);
  12442. program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
  12443. let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
  12444. for (let i = 0; i < numIterations; i++) texture = program.scanMinMax2D(texture, i);
  12445. return texture;
  12446. }
  12447. }
  12448. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
  12449. /*
  12450. * speedy-vision.js
  12451. * GPU-accelerated Computer Vision for JavaScript
  12452. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12453. *
  12454. * Licensed under the Apache License, Version 2.0 (the "License");
  12455. * you may not use this file except in compliance with the License.
  12456. * You may obtain a copy of the License at
  12457. *
  12458. * http://www.apache.org/licenses/LICENSE-2.0
  12459. *
  12460. * Unless required by applicable law or agreed to in writing, software
  12461. * distributed under the License is distributed on an "AS IS" BASIS,
  12462. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12463. * See the License for the specific language governing permissions and
  12464. * limitations under the License.
  12465. *
  12466. * filter-factory.js
  12467. * Image filters
  12468. */
  12469. /**
  12470. * Image filters
  12471. */
  12472. class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12473. /**
  12474. * Convert image to greyscale
  12475. * @param {string} [name]
  12476. * @returns {SpeedyPipelineNodeGreyscale}
  12477. */
  12478. static Greyscale(name = undefined) {
  12479. return new SpeedyPipelineNodeGreyscale(name);
  12480. }
  12481. /**
  12482. * Gaussian Blur
  12483. * @param {string} [name]
  12484. * @returns {SpeedyPipelineNodeGaussianBlur}
  12485. */
  12486. static GaussianBlur(name = undefined) {
  12487. return new SpeedyPipelineNodeGaussianBlur(name);
  12488. }
  12489. /**
  12490. * Simple Blur (Box Filter)
  12491. * @param {string} [name]
  12492. * @returns {SpeedyPipelineNodeSimpleBlur}
  12493. */
  12494. static SimpleBlur(name = undefined) {
  12495. return new SpeedyPipelineNodeSimpleBlur(name);
  12496. }
  12497. /**
  12498. * Median Blur
  12499. * @param {string} [name]
  12500. * @returns {SpeedyPipelineNodeMedianBlur}
  12501. */
  12502. static MedianBlur(name = undefined) {
  12503. return new SpeedyPipelineNodeMedianBlur(name);
  12504. }
  12505. /**
  12506. * Image Convolution
  12507. * @param {string} [name]
  12508. * @returns {SpeedyPipelineNodeConvolution}
  12509. */
  12510. static Convolution(name = undefined) {
  12511. return new SpeedyPipelineNodeConvolution(name);
  12512. }
  12513. /**
  12514. * Nightvision
  12515. * @param {string} [name]
  12516. * @returns {SpeedyPipelineNodeNightvision}
  12517. */
  12518. static Nightvision(name = undefined) {
  12519. return new SpeedyPipelineNodeNightvision(name);
  12520. }
  12521. /**
  12522. * Normalize image
  12523. * @param {string} [name]
  12524. * @returns {SpeedyPipelineNodeNormalize}
  12525. */
  12526. static Normalize(name = undefined) {
  12527. return new SpeedyPipelineNodeNormalize(name);
  12528. }
  12529. }
  12530. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
  12531. /*
  12532. * speedy-vision.js
  12533. * GPU-accelerated Computer Vision for JavaScript
  12534. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12535. *
  12536. * Licensed under the Apache License, Version 2.0 (the "License");
  12537. * you may not use this file except in compliance with the License.
  12538. * You may obtain a copy of the License at
  12539. *
  12540. * http://www.apache.org/licenses/LICENSE-2.0
  12541. *
  12542. * Unless required by applicable law or agreed to in writing, software
  12543. * distributed under the License is distributed on an "AS IS" BASIS,
  12544. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12545. * See the License for the specific language governing permissions and
  12546. * limitations under the License.
  12547. *
  12548. * perspective-warp.js
  12549. * Warp an image using a perspective transformation
  12550. */
  12551. // Used when an invalid matrix is provided
  12552. const SINGULAR_MATRIX = [0, 0, 0, 0, 0, 0, 0, 0, 1];
  12553. /**
  12554. * Warp an image using a perspective transformation
  12555. */
  12556. class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode {
  12557. /**
  12558. * Constructor
  12559. * @param {string} [name] name of the node
  12560. */
  12561. constructor(name = undefined) {
  12562. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12563. /** @type {SpeedyMatrix} perspective transformation */
  12564. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  12565. }
  12566. /**
  12567. * Perspective transform, a 3x3 homography matrix
  12568. * @returns {SpeedyMatrix}
  12569. */
  12570. get transform() {
  12571. return this._transform;
  12572. }
  12573. /**
  12574. * Perspective transform, a 3x3 homography matrix
  12575. * @param {SpeedyMatrix} transform
  12576. */
  12577. set transform(transform) {
  12578. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  12579. this._transform = transform;
  12580. }
  12581. /**
  12582. * Run the specific task of this node
  12583. * @param {SpeedyGPU} gpu
  12584. * @returns {void|SpeedyPromise<void>}
  12585. */
  12586. _run(gpu) {
  12587. const {
  12588. image,
  12589. format
  12590. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12591. const width = image.width,
  12592. height = image.height;
  12593. const outputTexture = this._tex[0];
  12594. const homography = this._transform.read();
  12595. const inverseHomography = this._inverse3(homography);
  12596. const isValidHomography = !Number.isNaN(inverseHomography[0]);
  12597. gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
  12598. gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
  12599. this.output().swrite(outputTexture, format);
  12600. }
  12601. /**
  12602. * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
  12603. * @param {number[]} mat 3x3 matrix in column-major format
  12604. * @param {number} [eps] epsilon
  12605. * @returns {number[]} 3x3 inverse matrix in column-major format
  12606. */
  12607. _inverse3(mat, eps = 1e-6) {
  12608. // read the entries of the matrix
  12609. const a11 = mat[0];
  12610. const a21 = mat[1];
  12611. const a31 = mat[2];
  12612. const a12 = mat[3];
  12613. const a22 = mat[4];
  12614. const a32 = mat[5];
  12615. const a13 = mat[6];
  12616. const a23 = mat[7];
  12617. const a33 = mat[8];
  12618. // compute cofactors
  12619. const b1 = a33 * a22 - a32 * a23; // b11
  12620. const b2 = a33 * a12 - a32 * a13; // b21
  12621. const b3 = a23 * a12 - a22 * a13; // b31
  12622. // compute the determinant
  12623. const det = a11 * b1 - a21 * b2 + a31 * b3;
  12624. // set up the inverse
  12625. if (!(Math.abs(det) < eps)) {
  12626. const d = 1.0 / det;
  12627. mat[0] = b1 * d;
  12628. mat[1] = -(a33 * a21 - a31 * a23) * d;
  12629. mat[2] = (a32 * a21 - a31 * a22) * d;
  12630. mat[3] = -b2 * d;
  12631. mat[4] = (a33 * a11 - a31 * a13) * d;
  12632. mat[5] = -(a32 * a11 - a31 * a12) * d;
  12633. mat[6] = b3 * d;
  12634. mat[7] = -(a23 * a11 - a21 * a13) * d;
  12635. mat[8] = (a22 * a11 - a21 * a12) * d;
  12636. } else mat.fill(Number.NaN, 0, 9);
  12637. // done!
  12638. return mat;
  12639. }
  12640. }
  12641. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
  12642. /*
  12643. * speedy-vision.js
  12644. * GPU-accelerated Computer Vision for JavaScript
  12645. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12646. *
  12647. * Licensed under the Apache License, Version 2.0 (the "License");
  12648. * you may not use this file except in compliance with the License.
  12649. * You may obtain a copy of the License at
  12650. *
  12651. * http://www.apache.org/licenses/LICENSE-2.0
  12652. *
  12653. * Unless required by applicable law or agreed to in writing, software
  12654. * distributed under the License is distributed on an "AS IS" BASIS,
  12655. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12656. * See the License for the specific language governing permissions and
  12657. * limitations under the License.
  12658. *
  12659. * resize.js
  12660. * Resize image
  12661. */
  12662. /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
  12663. /**
  12664. * Resize image
  12665. */
  12666. class SpeedyPipelineNodeResize extends SpeedyPipelineNode {
  12667. /**
  12668. * Constructor
  12669. * @param {string} [name] name of the node
  12670. */
  12671. constructor(name = undefined) {
  12672. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12673. /** @type {SpeedySize} size of the output image, in pixels */
  12674. this._size = new SpeedySize(0, 0);
  12675. /** @type {SpeedyVector2} size of the output relative to the size of the input */
  12676. this._scale = new SpeedyVector2(1, 1);
  12677. /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
  12678. this._method = 'bilinear';
  12679. }
  12680. /**
  12681. * Size of the output image, in pixels (use 0 to use scale)
  12682. * @returns {SpeedySize}
  12683. */
  12684. get size() {
  12685. return this._size;
  12686. }
  12687. /**
  12688. * Size of the output image, in pixels (use 0 to use scale)
  12689. * @param {SpeedySize} size
  12690. */
  12691. set size(size) {
  12692. this._size = size;
  12693. }
  12694. /**
  12695. * Size of the output image relative to the size of the input image
  12696. * @returns {SpeedyVector2}
  12697. */
  12698. get scale() {
  12699. return this._scale;
  12700. }
  12701. /**
  12702. * Size of the output image relative to the size of the input image
  12703. * @param {SpeedyVector2} scale
  12704. */
  12705. set scale(scale) {
  12706. this._scale = scale;
  12707. }
  12708. /**
  12709. * Interpolation method
  12710. * @returns {SpeedyPipelineNodeResizeMethod}
  12711. */
  12712. get method() {
  12713. return this._method;
  12714. }
  12715. /**
  12716. * Interpolation method
  12717. * @param {SpeedyPipelineNodeResizeMethod} method
  12718. */
  12719. set method(method) {
  12720. if (method !== 'nearest' && method !== 'bilinear') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
  12721. this._method = method;
  12722. }
  12723. /**
  12724. * Run the specific task of this node
  12725. * @param {SpeedyGPU} gpu
  12726. * @returns {void|SpeedyPromise<void>}
  12727. */
  12728. _run(gpu) {
  12729. const {
  12730. image,
  12731. format
  12732. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12733. const width = image.width,
  12734. height = image.height;
  12735. const outputTexture = this._tex[0];
  12736. const method = this._method;
  12737. const newWidth = this._size.width || Math.max(1, this._scale.x * width);
  12738. const newHeight = this._size.height || Math.max(1, this._scale.y * height);
  12739. if (method == 'bilinear') {
  12740. gpu.programs.transforms.resizeBilinear.outputs(newWidth, newHeight, outputTexture)(image);
  12741. } else if (method == 'nearest') {
  12742. gpu.programs.transforms.resizeNearest.outputs(newWidth, newHeight, outputTexture)(image);
  12743. }
  12744. this.output().swrite(outputTexture, format);
  12745. }
  12746. }
  12747. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
  12748. /*
  12749. * speedy-vision.js
  12750. * GPU-accelerated Computer Vision for JavaScript
  12751. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12752. *
  12753. * Licensed under the Apache License, Version 2.0 (the "License");
  12754. * you may not use this file except in compliance with the License.
  12755. * You may obtain a copy of the License at
  12756. *
  12757. * http://www.apache.org/licenses/LICENSE-2.0
  12758. *
  12759. * Unless required by applicable law or agreed to in writing, software
  12760. * distributed under the License is distributed on an "AS IS" BASIS,
  12761. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12762. * See the License for the specific language governing permissions and
  12763. * limitations under the License.
  12764. *
  12765. * transform-factory.js
  12766. * Image transforms
  12767. */
  12768. /**
  12769. * Image transforms
  12770. */
  12771. class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12772. /**
  12773. * Resize image
  12774. * @param {string} [name]
  12775. * @returns {SpeedyPipelineNodeResize}
  12776. */
  12777. static Resize(name = undefined) {
  12778. return new SpeedyPipelineNodeResize(name);
  12779. }
  12780. /**
  12781. * Warp an image using a perspective transformation
  12782. * @param {string} [name]
  12783. * @returns {SpeedyPipelineNodePerspectiveWarp}
  12784. */
  12785. static PerspectiveWarp(name = undefined) {
  12786. return new SpeedyPipelineNodePerspectiveWarp(name);
  12787. }
  12788. }
  12789. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
  12790. /*
  12791. * speedy-vision.js
  12792. * GPU-accelerated Computer Vision for JavaScript
  12793. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12794. *
  12795. * Licensed under the Apache License, Version 2.0 (the "License");
  12796. * you may not use this file except in compliance with the License.
  12797. * You may obtain a copy of the License at
  12798. *
  12799. * http://www.apache.org/licenses/LICENSE-2.0
  12800. *
  12801. * Unless required by applicable law or agreed to in writing, software
  12802. * distributed under the License is distributed on an "AS IS" BASIS,
  12803. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12804. * See the License for the specific language governing permissions and
  12805. * limitations under the License.
  12806. *
  12807. * detector.js
  12808. * Abstract keypoint detectors
  12809. */
  12810. // Constants
  12811. const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
  12812. const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
  12813. const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
  12814. const NUMBER_OF_RGBA16_TEXTURES = 2;
  12815. // legacy constants
  12816. const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
  12817. const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
  12818. const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
  12819. /**
  12820. * Abstract keypoint detector
  12821. * @abstract
  12822. */
  12823. class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode {
  12824. /**
  12825. * Constructor
  12826. * @param {string} [name] name of the node
  12827. * @param {number} [texCount] number of work textures
  12828. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  12829. */
  12830. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  12831. super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
  12832. /** @type {number} encoder capacity */
  12833. this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
  12834. /** @type {GLint} auxiliary storage */
  12835. this._oldWrapS = 0;
  12836. /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
  12837. this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
  12838. }
  12839. /**
  12840. * Initialize this node
  12841. * @param {SpeedyGPU} gpu
  12842. */
  12843. init(gpu) {
  12844. // initialize
  12845. super.init(gpu);
  12846. // encodeKeypointSkipOffsets() relies on this
  12847. this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
  12848. // allocate RGBA16 textures
  12849. this._allocateTex16(gpu);
  12850. gpu.subscribe(this._allocateTex16, this, gpu);
  12851. }
  12852. /**
  12853. * Release this node
  12854. * @param {SpeedyGPU} gpu
  12855. */
  12856. release(gpu) {
  12857. // deallocate RGBA16 textures
  12858. gpu.unsubscribe(this._allocateTex16, this);
  12859. this._deallocateTex16(gpu);
  12860. // we need to restore the texture parameter because textures come from a pool!
  12861. this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
  12862. // release
  12863. super.release(gpu);
  12864. }
  12865. /**
  12866. * Set a parameter of the special texture
  12867. * @param {GLenum} pname
  12868. * @param {GLint} param new value
  12869. * @returns {GLint} old value of param
  12870. */
  12871. _setupSpecialTexture(pname, param) {
  12872. if (NUMBER_OF_INTERNAL_TEXTURES == 0) return;
  12873. // legacy code
  12874. const texture = this._tex[this._tex.length - 1];
  12875. const gl = texture.gl;
  12876. gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
  12877. const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
  12878. gl.texParameteri(gl.TEXTURE_2D, pname, param);
  12879. gl.bindTexture(gl.TEXTURE_2D, null);
  12880. return oldval;
  12881. }
  12882. /**
  12883. * We can encode up to this many keypoints. If you find a
  12884. * tight bound for this, download times will be faster.
  12885. * @returns {number}
  12886. */
  12887. get capacity() {
  12888. return this._capacity;
  12889. }
  12890. /**
  12891. * We can encode up to this many keypoints. If you find a
  12892. * tight bound for this, download times will be faster.
  12893. * @param {number} capacity
  12894. */
  12895. set capacity(capacity) {
  12896. this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
  12897. }
  12898. /**
  12899. * Create a tiny texture with encoded keypoints out of
  12900. * an encoded corners texture
  12901. * @param {SpeedyGPU} gpu
  12902. * @param {SpeedyTexture} corners input
  12903. * @param {SpeedyDrawableTexture} encodedKeypoints output
  12904. * @param {number} [descriptorSize] in bytes
  12905. * @param {number} [extraSize] in bytes
  12906. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12907. */
  12908. _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12909. const encoderCapacity = this._capacity;
  12910. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
  12911. const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
  12912. const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
  12913. //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
  12914. const maxSize = Math.max(width, height);
  12915. const keypoints = gpu.programs.keypoints;
  12916. // prepare programs
  12917. keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
  12918. keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
  12919. keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12920. // compute lookup table
  12921. let lookupTable = keypoints.initLookupTable(corners);
  12922. for (let b = 1; b < maxSize; b *= 2) lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
  12923. /*
  12924. // debug: view texture
  12925. const lookupView = (keypoints.viewLookupTable.outputs(
  12926. width, height, this._tex[0]
  12927. ))(lookupTable);
  12928. const canvas = gpu.renderToCanvas(lookupView);
  12929. if(!this._ww) document.body.appendChild(canvas);
  12930. this._ww = 1;
  12931. */
  12932. // encode keypoints
  12933. return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
  12934. }
  12935. _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12936. const capacity = this._capacity;
  12937. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12938. const width = corners.width,
  12939. height = corners.height;
  12940. const imageSize = [width, height];
  12941. const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
  12942. const keypoints = gpu.programs.keypoints;
  12943. const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
  12944. // prepare programs
  12945. keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
  12946. keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
  12947. keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
  12948. keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
  12949. // copy the input corners to a special texture
  12950. // that is needed by encodeKeypointSkipOffsets()
  12951. corners = gpu.programs.utils.copy.outputs(width, height, specialTexture)(corners);
  12952. // encode skip offsets
  12953. let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
  12954. for (let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) {
  12955. // to boost performance
  12956. // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
  12957. // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
  12958. offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
  12959. }
  12960. /*
  12961. // debug: view corners
  12962. let cornerview = offsets;
  12963. const canvas = gpu.renderToCanvas(cornerview);
  12964. if(!window._ww) document.body.appendChild(canvas);
  12965. window._ww = 1;
  12966. */
  12967. // encode keypoint positions
  12968. let encodedKps = tex[3].clear();
  12969. for (let j = 0; j < ENCODER_PASSES; j++) encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
  12970. // encode keypoint properties
  12971. return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
  12972. }
  12973. /**
  12974. * Create a tiny texture with zero encoded keypoints
  12975. * @param {SpeedyGPU} gpu
  12976. * @param {SpeedyDrawableTexture} encodedKeypoints output texture
  12977. * @param {number} [descriptorSize] in bytes
  12978. * @param {number} [extraSize] in bytes
  12979. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12980. */
  12981. _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12982. const capacity = 0;
  12983. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12984. const keypoints = gpu.programs.keypoints;
  12985. keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12986. return keypoints.encodeNullKeypoints();
  12987. }
  12988. /**
  12989. * Allocate RGBA16 textures
  12990. * @param {SpeedyGPU} gpu
  12991. */
  12992. _allocateTex16(gpu) {
  12993. const gl = gpu.gl;
  12994. // RGBA16UI is color renderable according to the OpenGL ES 3 spec
  12995. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
  12996. }
  12997. /**
  12998. * Deallocate RGBA16 textures
  12999. * @param {SpeedyGPU} gpu
  13000. */
  13001. _deallocateTex16(gpu) {
  13002. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = this._tex16[i].release();
  13003. }
  13004. /**
  13005. * Compute the length of the keypoint encoder, given its capacity
  13006. * @param {number} encoderCapacity how many keypoints can we fit?
  13007. * @param {number} descriptorSize in bytes
  13008. * @param {number} extraSize in bytes
  13009. */
  13010. static encoderLength(encoderCapacity, descriptorSize, extraSize) {
  13011. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  13012. const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
  13013. return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
  13014. }
  13015. /**
  13016. * The maximum number of keypoints we can store using
  13017. * a particular configuration of a keypoint encoder
  13018. * @param {number} descriptorSize in bytes
  13019. * @param {number} extraSize in bytes
  13020. * @param {number} encoderLength
  13021. */
  13022. static encoderCapacity(descriptorSize, extraSize, encoderLength) {
  13023. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  13024. const numberOfPixels = encoderLength * encoderLength;
  13025. return Math.floor(numberOfPixels / pixelsPerKeypoint);
  13026. }
  13027. }
  13028. /**
  13029. * Abstract scale-space keypoint detector
  13030. * @abstract
  13031. */
  13032. class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector {
  13033. /**
  13034. * Constructor
  13035. * @param {string} [name] name of the node
  13036. * @param {number} [texCount] number of work textures
  13037. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  13038. */
  13039. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  13040. super(name, texCount, portBuilders);
  13041. /** @type {number} number of pyramid levels */
  13042. this._levels = 1;
  13043. /** @type {number} scale factor between two pyramid levels */
  13044. this._scaleFactor = DEFAULT_SCALE_FACTOR;
  13045. }
  13046. /**
  13047. * Number of pyramid levels
  13048. * @returns {number}
  13049. */
  13050. get levels() {
  13051. return this._levels;
  13052. }
  13053. /**
  13054. * Number of pyramid levels
  13055. * @param {number} levels
  13056. */
  13057. set levels(levels) {
  13058. this._levels = Math.max(1, levels | 0);
  13059. }
  13060. /**
  13061. * Scale factor between two pyramid levels
  13062. * @returns {number}
  13063. */
  13064. get scaleFactor() {
  13065. return this._scaleFactor;
  13066. }
  13067. /**
  13068. * Scale factor between two pyramid levels
  13069. * @param {number} scaleFactor should be greater than 1
  13070. */
  13071. set scaleFactor(scaleFactor) {
  13072. this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
  13073. }
  13074. }
  13075. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
  13076. /*
  13077. * speedy-vision.js
  13078. * GPU-accelerated Computer Vision for JavaScript
  13079. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13080. *
  13081. * Licensed under the Apache License, Version 2.0 (the "License");
  13082. * you may not use this file except in compliance with the License.
  13083. * You may obtain a copy of the License at
  13084. *
  13085. * http://www.apache.org/licenses/LICENSE-2.0
  13086. *
  13087. * Unless required by applicable law or agreed to in writing, software
  13088. * distributed under the License is distributed on an "AS IS" BASIS,
  13089. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13090. * See the License for the specific language governing permissions and
  13091. * limitations under the License.
  13092. *
  13093. * source.js
  13094. * Gets keypoints into the pipeline
  13095. */
  13096. // Constants
  13097. const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
  13098. const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
  13099. const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
  13100. /**
  13101. * Gets keypoints into the pipeline
  13102. */
  13103. class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode {
  13104. /**
  13105. * Constructor
  13106. * @param {string} [name] name of the node
  13107. */
  13108. constructor(name = undefined) {
  13109. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13110. /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
  13111. this._keypoints = [];
  13112. /** @type {Float32Array} upload buffer (UBO) */
  13113. this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
  13114. /** @type {number} maximum number of keypoints */
  13115. this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
  13116. }
  13117. /**
  13118. * Keypoints to be uploaded
  13119. * @returns {SpeedyKeypoint[]}
  13120. */
  13121. get keypoints() {
  13122. return this._keypoints;
  13123. }
  13124. /**
  13125. * Keypoints to be uploaded
  13126. * @param {SpeedyKeypoint[]} keypoints
  13127. */
  13128. set keypoints(keypoints) {
  13129. if (!Array.isArray(keypoints)) throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
  13130. this._keypoints = keypoints;
  13131. }
  13132. /**
  13133. * The maximum number of keypoints we'll accept.
  13134. * This should be a tight bound for better performance.
  13135. * @returns {number}
  13136. */
  13137. get capacity() {
  13138. return this._capacity;
  13139. }
  13140. /**
  13141. * The maximum number of keypoints we'll accept.
  13142. * This should be a tight bound for better performance.
  13143. * @param {number} capacity
  13144. */
  13145. set capacity(capacity) {
  13146. this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
  13147. }
  13148. /**
  13149. * Run the specific task of this node
  13150. * @param {SpeedyGPU} gpu
  13151. * @returns {void|SpeedyPromise<void>}
  13152. */
  13153. _run(gpu) {
  13154. // Orientation, descriptors and extra bytes will be lost
  13155. const descriptorSize = 0,
  13156. extraSize = 0;
  13157. const keypoints = this._keypoints;
  13158. const maxKeypoints = this._capacity;
  13159. const numKeypoints = Math.min(keypoints.length, maxKeypoints);
  13160. const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
  13161. const buffer = this._buffer;
  13162. const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
  13163. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
  13164. uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
  13165. let startIndex = 0,
  13166. encodedKeypoints = uploadKeypoints.clear();
  13167. for (let i = 0; i < numPasses; i++) {
  13168. const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
  13169. const endIndex = startIndex + n;
  13170. uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
  13171. encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
  13172. startIndex = endIndex;
  13173. }
  13174. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13175. }
  13176. /**
  13177. * Create an upload buffer
  13178. * @param {number} bufferSize number of keypoints
  13179. * @returns {Float32Array}
  13180. */
  13181. static _createUploadBuffer(bufferSize) {
  13182. const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
  13183. utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
  13184. return new Float32Array(internalBuffer);
  13185. }
  13186. /**
  13187. * Fill upload buffer with keypoint data
  13188. * @param {Float32Array} buffer
  13189. * @param {SpeedyKeypoint[]} keypoints
  13190. * @param {number} start index, inclusive
  13191. * @param {number} end index, exclusive
  13192. * @returns {Float32Array} buffer
  13193. */
  13194. static _fillUploadBuffer(buffer, keypoints, start, end) {
  13195. const n = end - start;
  13196. for (let i = 0; i < n; i++) {
  13197. const keypoint = keypoints[start + i];
  13198. const hasPos = keypoint.position !== undefined;
  13199. const j = i * 4;
  13200. // Format data as follows:
  13201. // vec4(xpos, ypos, lod, score)
  13202. buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
  13203. buffer[j + 1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
  13204. buffer[j + 2] = +keypoint.lod || 0;
  13205. buffer[j + 3] = +keypoint.score || 0;
  13206. }
  13207. // done!
  13208. return buffer;
  13209. }
  13210. }
  13211. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
  13212. /*
  13213. * speedy-vision.js
  13214. * GPU-accelerated Computer Vision for JavaScript
  13215. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13216. *
  13217. * Licensed under the Apache License, Version 2.0 (the "License");
  13218. * you may not use this file except in compliance with the License.
  13219. * You may obtain a copy of the License at
  13220. *
  13221. * http://www.apache.org/licenses/LICENSE-2.0
  13222. *
  13223. * Unless required by applicable law or agreed to in writing, software
  13224. * distributed under the License is distributed on an "AS IS" BASIS,
  13225. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13226. * See the License for the specific language governing permissions and
  13227. * limitations under the License.
  13228. *
  13229. * speedy-keypoint-descriptor.js
  13230. * Keypoint descriptor
  13231. */
  13232. /**
  13233. * Represents a keypoint descriptor
  13234. */
  13235. class SpeedyKeypointDescriptor {
  13236. /**
  13237. * Constructor
  13238. * @param {Uint8Array} data descriptor bytes
  13239. */
  13240. constructor(data) {
  13241. this._data = data;
  13242. return Object.freeze(this);
  13243. }
  13244. /**
  13245. * Descriptor data
  13246. * @returns {Uint8Array}
  13247. */
  13248. get data() {
  13249. return this._data;
  13250. }
  13251. /**
  13252. * The size of the descriptor, in bytes
  13253. * @returns {number}
  13254. */
  13255. get size() {
  13256. return this._data.byteLength;
  13257. }
  13258. /**
  13259. * A string representation of the keypoint descriptor
  13260. * @returns {string}
  13261. */
  13262. toString() {
  13263. return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
  13264. }
  13265. }
  13266. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
  13267. /*
  13268. * speedy-vision.js
  13269. * GPU-accelerated Computer Vision for JavaScript
  13270. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13271. *
  13272. * Licensed under the Apache License, Version 2.0 (the "License");
  13273. * you may not use this file except in compliance with the License.
  13274. * You may obtain a copy of the License at
  13275. *
  13276. * http://www.apache.org/licenses/LICENSE-2.0
  13277. *
  13278. * Unless required by applicable law or agreed to in writing, software
  13279. * distributed under the License is distributed on an "AS IS" BASIS,
  13280. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13281. * See the License for the specific language governing permissions and
  13282. * limitations under the License.
  13283. *
  13284. * sink.js
  13285. * Gets keypoints out of the pipeline
  13286. */
  13287. /** next power of 2 */
  13288. const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  13289. /** empty array of bytes */
  13290. const ZERO_BYTES = new Uint8Array([]);
  13291. /**
  13292. * Gets keypoints out of the pipeline
  13293. * @template {SpeedyKeypoint} T
  13294. * @abstract
  13295. */
  13296. class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode {
  13297. /**
  13298. * Constructor
  13299. * @param {string} [name] name of the node
  13300. * @param {number} [texCount]
  13301. * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
  13302. */
  13303. constructor(name = 'keypoints', texCount = 0, portBuilders = []) {
  13304. super(name, texCount + 2, portBuilders);
  13305. /** @type {Array<T|null>} keypoints (output) */
  13306. this._keypoints = [];
  13307. /** @type {SpeedyTextureReader} texture reader */
  13308. this._textureReader = new SpeedyTextureReader();
  13309. /** @type {number} page flipping index */
  13310. this._page = 0;
  13311. /** @type {boolean} accelerate GPU-CPU transfers */
  13312. this._turbo = false;
  13313. /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
  13314. this._includeDiscarded = false;
  13315. }
  13316. /**
  13317. * Accelerate GPU-CPU transfers
  13318. * @returns {boolean}
  13319. */
  13320. get turbo() {
  13321. return this._turbo;
  13322. }
  13323. /**
  13324. * Accelerate GPU-CPU transfers
  13325. * @param {boolean} value
  13326. */
  13327. set turbo(value) {
  13328. this._turbo = Boolean(value);
  13329. }
  13330. /**
  13331. * Should discarded keypoints be exported as null or dropped altogether?
  13332. * @returns {boolean}
  13333. */
  13334. get includeDiscarded() {
  13335. return this._includeDiscarded;
  13336. }
  13337. /**
  13338. * Should discarded keypoints be exported as null or dropped altogether?
  13339. * @param {boolean} value
  13340. */
  13341. set includeDiscarded(value) {
  13342. this._includeDiscarded = Boolean(value);
  13343. }
  13344. /**
  13345. * Initializes this node
  13346. * @param {SpeedyGPU} gpu
  13347. */
  13348. init(gpu) {
  13349. super.init(gpu);
  13350. this._textureReader.init(gpu);
  13351. }
  13352. /**
  13353. * Releases this node
  13354. * @param {SpeedyGPU} gpu
  13355. */
  13356. release(gpu) {
  13357. this._textureReader.release(gpu);
  13358. super.release(gpu);
  13359. }
  13360. /**
  13361. * Export data from this node to the user
  13362. * @returns {SpeedyPromise<Array<T|null>>}
  13363. */
  13364. export() {
  13365. return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
  13366. }
  13367. /**
  13368. * Run the specific task of this node
  13369. * @param {SpeedyGPU} gpu
  13370. * @returns {void|SpeedyPromise<void>}
  13371. */
  13372. _run(gpu) {
  13373. const {
  13374. encodedKeypoints,
  13375. descriptorSize,
  13376. extraSize,
  13377. encoderLength
  13378. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13379. return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13380. }
  13381. /**
  13382. * Download and decode keypoints from the GPU
  13383. * @param {SpeedyGPU} gpu
  13384. * @param {SpeedyDrawableTexture} encodedKeypoints
  13385. * @param {number} descriptorSize
  13386. * @param {number} extraSize
  13387. * @param {number} encoderLength
  13388. * @returns {SpeedyPromise<void>}
  13389. */
  13390. _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  13391. const useBufferedDownloads = this._turbo;
  13392. /*
  13393. I have found experimentally that, in Firefox, readPixelsAsync()
  13394. performs MUCH better if the width of the target texture is a power
  13395. of two. I have no idea why this is the case, nor if it's related to
  13396. some interaction with the GL drivers, somehow. This seems to make no
  13397. difference on Chrome, however. In any case, let's convert the input
  13398. texture to POT.
  13399. */
  13400. const encoderWidth = sink_nextPot(encoderLength);
  13401. //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  13402. const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
  13403. //const encoderWidth=encoderLength,encoderHeight=encoderLength;
  13404. // copy the set of keypoints to an internal texture
  13405. const copiedTexture = this._tex[this._tex.length - 1 - this._page];
  13406. gpu.programs.utils.copyKeypoints.outputs(encoderWidth, encoderHeight, copiedTexture)(encodedKeypoints);
  13407. // flip page
  13408. this._page = 1 - this._page;
  13409. // download the internal texture
  13410. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  13411. // decode the keypoints and store them in this._keypoints
  13412. this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
  13413. });
  13414. }
  13415. /**
  13416. * Decode a sequence of keypoints, given a flattened image of encoded pixels
  13417. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  13418. * @param {number} descriptorSize in bytes
  13419. * @param {number} extraSize in bytes
  13420. * @param {number} encoderWidth
  13421. * @param {number} encoderHeight
  13422. * @returns {Array<T|null>} keypoints
  13423. */
  13424. _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight) {
  13425. const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
  13426. const m = globals.LOG2_PYRAMID_MAX_SCALE,
  13427. h = globals.PYRAMID_MAX_LEVELS;
  13428. const piOver255 = Math.PI / 255.0;
  13429. const keypoints = /** @type {Array<T|null>} */[];
  13430. const includeDiscarded = this._includeDiscarded;
  13431. let descriptorBytes = ZERO_BYTES,
  13432. extraBytes = ZERO_BYTES;
  13433. let x, y, z, w, lod, rotation, score;
  13434. let keypoint;
  13435. // validate
  13436. if (descriptorSize % 4 != 0 || extraSize % 4 != 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
  13437. // how many bytes should we read?
  13438. const e2 = encoderWidth * encoderHeight * 4;
  13439. const size = pixels.byteLength;
  13440. if (size != e2) utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
  13441. // copy the data (we use shared buffers when receiving pixels[])
  13442. if (descriptorSize + extraSize > 0) pixels = new Uint8Array(pixels);
  13443. // for each encoded keypoint
  13444. for (let i = 0; i < size; i += bytesPerKeypoint) {
  13445. // extract encoded header
  13446. x = pixels[i + 1] << 8 | pixels[i];
  13447. y = pixels[i + 3] << 8 | pixels[i + 2];
  13448. z = pixels[i + 5] << 8 | pixels[i + 4];
  13449. w = pixels[i + 7] << 8 | pixels[i + 6];
  13450. // the keypoint is "null": we have reached the end of the list
  13451. if (x == 0xFFFF && y == 0xFFFF) break;
  13452. // the header is zero: discard the keypoint
  13453. if (x + y + z + w == 0) {
  13454. if (includeDiscarded) keypoints.push(null);
  13455. continue;
  13456. }
  13457. // extract extra & descriptor bytes
  13458. if (extraSize > 0) {
  13459. extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
  13460. if (extraBytes.byteLength < extraSize) {
  13461. utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
  13462. continue; // something is off here; discard
  13463. }
  13464. }
  13465. if (descriptorSize > 0) {
  13466. descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
  13467. if (descriptorBytes.byteLength < descriptorSize) {
  13468. utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
  13469. continue; // something is off here; discard
  13470. }
  13471. }
  13472. // decode position: convert from fixed-point
  13473. x /= globals.FIX_RESOLUTION;
  13474. y /= globals.FIX_RESOLUTION;
  13475. // decode level-of-detail
  13476. lod = pixels[i + 4] < 255 ? -m + (m + h) * pixels[i + 4] / 255.0 : 0.0;
  13477. // decode orientation
  13478. rotation = (2 * pixels[i + 5] - 255) * piOver255;
  13479. // decode score
  13480. score = utils/* Utils */.A.decodeFloat16(w);
  13481. // create keypoint
  13482. keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
  13483. // register keypoint
  13484. keypoints.push(keypoint);
  13485. }
  13486. // done!
  13487. return keypoints;
  13488. }
  13489. /**
  13490. * Instantiate a new keypoint
  13491. * @param {number} x
  13492. * @param {number} y
  13493. * @param {number} lod
  13494. * @param {number} rotation
  13495. * @param {number} score
  13496. * @param {Uint8Array} descriptorBytes
  13497. * @param {Uint8Array} extraBytes
  13498. * @returns {T}
  13499. */
  13500. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13501. throw new utils_errors/* AbstractMethodError */.aQ();
  13502. }
  13503. /**
  13504. * Allocate extra space
  13505. * @param {SpeedyGPU} gpu
  13506. * @param {SpeedyDrawableTexture} output output texture
  13507. * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
  13508. * @param {number} inputDescriptorSize in bytes, must be positive
  13509. * @param {number} inputExtraSize must be 0
  13510. * @param {number} outputDescriptorSize must be inputDescriptorSize
  13511. * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
  13512. * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
  13513. */
  13514. _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize) {
  13515. utils/* Utils */.A.assert(inputExtraSize === 0);
  13516. utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
  13517. const inputEncoderLength = inputEncodedKeypoints.width;
  13518. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  13519. const outputEncoderCapacity = inputEncoderCapacity;
  13520. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  13521. return gpu.programs.keypoints.allocateExtra.outputs(outputEncoderLength, outputEncoderLength, output)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  13522. }
  13523. }
  13524. /**
  13525. * Gets standard keypoints out of the pipeline
  13526. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
  13527. */
  13528. class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13529. /**
  13530. * Constructor
  13531. * @param {string} [name] name of the node
  13532. */
  13533. constructor(name = 'keypoints') {
  13534. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13535. }
  13536. /**
  13537. * Instantiate a new keypoint
  13538. * @param {number} x
  13539. * @param {number} y
  13540. * @param {number} lod
  13541. * @param {number} rotation
  13542. * @param {number} score
  13543. * @param {Uint8Array} descriptorBytes
  13544. * @param {Uint8Array} extraBytes
  13545. * @returns {SpeedyKeypoint}
  13546. */
  13547. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13548. const descriptorSize = descriptorBytes.byteLength;
  13549. // read descriptor, if any
  13550. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13551. // create keypoint
  13552. return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
  13553. }
  13554. }
  13555. /**
  13556. * Gets tracked keypoints out of the pipeline
  13557. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
  13558. */
  13559. class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13560. /**
  13561. * Constructor
  13562. * @param {string} [name] name of the node
  13563. */
  13564. constructor(name = 'keypoints') {
  13565. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  13566. }
  13567. /**
  13568. * Run the specific task of this node
  13569. * @param {SpeedyGPU} gpu
  13570. * @returns {void|SpeedyPromise<void>}
  13571. */
  13572. _run(gpu) {
  13573. const {
  13574. encodedKeypoints,
  13575. descriptorSize,
  13576. extraSize,
  13577. encoderLength
  13578. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13579. const {
  13580. vectors
  13581. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input('flow').read();
  13582. // allocate extra space
  13583. const newDescriptorSize = descriptorSize;
  13584. const newExtraSize = 4; // 1 pixel per flow vector per keypoint
  13585. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13586. // attach flow vectors
  13587. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13588. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13589. // done!
  13590. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13591. }
  13592. /**
  13593. * Instantiate a new keypoint
  13594. * @param {number} x
  13595. * @param {number} y
  13596. * @param {number} lod
  13597. * @param {number} rotation
  13598. * @param {number} score
  13599. * @param {Uint8Array} descriptorBytes
  13600. * @param {Uint8Array} extraBytes
  13601. * @returns {SpeedyTrackedKeypoint}
  13602. */
  13603. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13604. const descriptorSize = descriptorBytes.byteLength;
  13605. const extraSize = extraBytes.byteLength;
  13606. // read descriptor, if any
  13607. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13608. // read flow vector
  13609. const fx = utils/* Utils */.A.decodeFloat16(extraBytes[1] << 8 | extraBytes[0]);
  13610. const fy = utils/* Utils */.A.decodeFloat16(extraBytes[3] << 8 | extraBytes[2]);
  13611. const flow = new SpeedyVector2(fx, fy);
  13612. // create keypoint
  13613. return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
  13614. }
  13615. }
  13616. /**
  13617. * Gets matched keypoints out of the pipeline
  13618. * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
  13619. */
  13620. class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13621. /**
  13622. * Constructor
  13623. * @param {string} [name] name of the node
  13624. */
  13625. constructor(name = 'keypoints') {
  13626. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)]);
  13627. }
  13628. /**
  13629. * Run the specific task of this node
  13630. * @param {SpeedyGPU} gpu
  13631. * @returns {void|SpeedyPromise<void>}
  13632. */
  13633. _run(gpu) {
  13634. const {
  13635. encodedKeypoints,
  13636. descriptorSize,
  13637. extraSize,
  13638. encoderLength
  13639. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13640. const {
  13641. encodedMatches,
  13642. matchesPerKeypoint
  13643. } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */this.input('matches').read();
  13644. // allocate space for the matches
  13645. const newDescriptorSize = descriptorSize;
  13646. const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
  13647. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13648. // transfer matches to a new texture
  13649. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13650. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13651. // done!
  13652. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13653. }
  13654. /**
  13655. * Instantiate a new keypoint
  13656. * @param {number} x
  13657. * @param {number} y
  13658. * @param {number} lod
  13659. * @param {number} rotation
  13660. * @param {number} score
  13661. * @param {Uint8Array} descriptorBytes
  13662. * @param {Uint8Array} extraBytes
  13663. * @returns {SpeedyMatchedKeypoint}
  13664. */
  13665. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13666. const descriptorSize = descriptorBytes.byteLength;
  13667. const extraSize = extraBytes.byteLength;
  13668. // read descriptor, if any
  13669. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13670. // decode matches
  13671. const matchesPerKeypoint = extraSize / 4;
  13672. const matches = /** @type {SpeedyKeypointMatch[]} */new Array(matchesPerKeypoint);
  13673. for (let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
  13674. const base = matchIndex * 4;
  13675. const u32 = extraBytes[base] | extraBytes[base + 1] << 8 | extraBytes[base + 2] << 16 | extraBytes[base + 3] << 24;
  13676. const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
  13677. matches[matchIndex] = match;
  13678. }
  13679. // done!
  13680. return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
  13681. }
  13682. }
  13683. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
  13684. /*
  13685. * speedy-vision.js
  13686. * GPU-accelerated Computer Vision for JavaScript
  13687. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13688. *
  13689. * Licensed under the Apache License, Version 2.0 (the "License");
  13690. * you may not use this file except in compliance with the License.
  13691. * You may obtain a copy of the License at
  13692. *
  13693. * http://www.apache.org/licenses/LICENSE-2.0
  13694. *
  13695. * Unless required by applicable law or agreed to in writing, software
  13696. * distributed under the License is distributed on an "AS IS" BASIS,
  13697. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13698. * See the License for the specific language governing permissions and
  13699. * limitations under the License.
  13700. *
  13701. * clipper.js
  13702. * Keypoint clipper
  13703. */
  13704. // Constants
  13705. const LOG2_STRIDE = 5;
  13706. const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
  13707. /**
  13708. * Keypoint clipper: filters the best keypoints from a stream
  13709. */
  13710. class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode {
  13711. /**
  13712. * Constructor
  13713. * @param {string} [name] name of the node
  13714. */
  13715. constructor(name = undefined) {
  13716. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13717. /** @type {number} the maximum number of keypoints in the output */
  13718. this._size = MAX_SIZE;
  13719. }
  13720. /**
  13721. * The maximum number of keypoints in the output
  13722. * @returns {number}
  13723. */
  13724. get size() {
  13725. return this._size;
  13726. }
  13727. /**
  13728. * The maximum number of keypoints in the output
  13729. * @param {number} size
  13730. */
  13731. set size(size) {
  13732. this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
  13733. }
  13734. /**
  13735. * Run the specific task of this node
  13736. * @param {SpeedyGPU} gpu
  13737. * @returns {void|SpeedyPromise<void>}
  13738. */
  13739. _run(gpu) {
  13740. const {
  13741. encodedKeypoints,
  13742. descriptorSize,
  13743. extraSize,
  13744. encoderLength
  13745. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13746. const keypoints = gpu.programs.keypoints;
  13747. const clipValue = this._size;
  13748. const tex = this._tex;
  13749. const outputTexture = this._tex[3];
  13750. // find the minimum power of 2 pot such that pot >= capacity
  13751. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13752. //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
  13753. // find the dimensions of the sorting shaders
  13754. const stride = 1 << LOG2_STRIDE; // must be a power of 2
  13755. //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
  13756. const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
  13757. const numberOfPixels = stride * height;
  13758. // find the dimensions of the output texture
  13759. const newCapacity = Math.min(capacity, clipValue);
  13760. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
  13761. // generate permutation of keypoints
  13762. keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
  13763. let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13764. // sort permutation
  13765. const numPasses = Math.ceil(Math.log2(numberOfPixels));
  13766. keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
  13767. for (let i = 1; i <= numPasses; i++) {
  13768. const blockSize = 1 << i; // 2, 4, 8...
  13769. const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
  13770. permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
  13771. }
  13772. // apply permutation
  13773. keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
  13774. keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
  13775. /*
  13776. // debug (read the contents of the permutation)
  13777. const pixels = permutation.inspect(gpu), debug = [];
  13778. for(let i = 0; i < pixels.length; i += 4) {
  13779. let id = pixels[i] | (pixels[i+1] << 8);
  13780. let score = pixels[i+2] / 255.0;
  13781. let valid = pixels[i+3] / 255.0;
  13782. debug.push([ id, valid, score, ].join(', '));
  13783. }
  13784. console.log(debug);
  13785. */
  13786. // done!
  13787. this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
  13788. }
  13789. }
  13790. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
  13791. /*
  13792. * speedy-vision.js
  13793. * GPU-accelerated Computer Vision for JavaScript
  13794. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13795. *
  13796. * Licensed under the Apache License, Version 2.0 (the "License");
  13797. * you may not use this file except in compliance with the License.
  13798. * You may obtain a copy of the License at
  13799. *
  13800. * http://www.apache.org/licenses/LICENSE-2.0
  13801. *
  13802. * Unless required by applicable law or agreed to in writing, software
  13803. * distributed under the License is distributed on an "AS IS" BASIS,
  13804. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13805. * See the License for the specific language governing permissions and
  13806. * limitations under the License.
  13807. *
  13808. * border-clipper.js
  13809. * Keypoint Border Clipper
  13810. */
  13811. /**
  13812. * The Border Clipper removes all keypoints within a border of the edges of an image
  13813. */
  13814. class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode {
  13815. /**
  13816. * Constructor
  13817. * @param {string} [name] name of the node
  13818. */
  13819. constructor(name = undefined) {
  13820. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13821. /** @type {SpeedySize} image size, in pixels */
  13822. this._imageSize = new SpeedySize(0, 0);
  13823. /** @type {SpeedyVector2} border size, in pixels */
  13824. this._borderSize = new SpeedyVector2(0, 0);
  13825. }
  13826. /**
  13827. * Image size, in pixels
  13828. * @returns {SpeedySize}
  13829. */
  13830. get imageSize() {
  13831. return this._imageSize;
  13832. }
  13833. /**
  13834. * Image size, in pixels
  13835. * @param {SpeedySize} imageSize
  13836. */
  13837. set imageSize(imageSize) {
  13838. this._imageSize = imageSize;
  13839. }
  13840. /**
  13841. * Border size, in pixels
  13842. * @returns {SpeedyVector2}
  13843. */
  13844. get borderSize() {
  13845. return this._borderSize;
  13846. }
  13847. /**
  13848. * Border size, in pixels
  13849. * @param {SpeedyVector2} borderSize
  13850. */
  13851. set borderSize(borderSize) {
  13852. this._borderSize = borderSize;
  13853. }
  13854. /**
  13855. * Run the specific task of this node
  13856. * @param {SpeedyGPU} gpu
  13857. * @returns {void|SpeedyPromise<void>}
  13858. */
  13859. _run(gpu) {
  13860. const {
  13861. encodedKeypoints,
  13862. descriptorSize,
  13863. extraSize,
  13864. encoderLength
  13865. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13866. const keypoints = gpu.programs.keypoints;
  13867. const imageSize = this._imageSize;
  13868. const borderSize = this._borderSize;
  13869. const imageWidth = imageSize.width,
  13870. imageHeight = imageSize.height;
  13871. const borderLeft = borderSize.x,
  13872. borderRight = borderSize.x;
  13873. const borderTop = borderSize.y,
  13874. borderBottom = borderSize.y;
  13875. const tex = this._tex;
  13876. // validate
  13877. if (imageWidth == 0 || imageHeight == 0) throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
  13878. // find the capacity of the keypoint stream
  13879. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13880. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  13881. // prepare programs
  13882. keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
  13883. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13884. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  13885. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  13886. // clip keypoints
  13887. let clippedKeypoints = keypoints.clipBorder(imageWidth, imageHeight, borderTop, borderRight, borderBottom, borderLeft, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13888. // sort keypoints
  13889. let sortedKeypoints = keypoints.mixKeypointsInit(clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  13890. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  13891. clippedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13892. /*
  13893. // debug: view keypoints
  13894. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13895. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  13896. */
  13897. // done!
  13898. this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13899. }
  13900. }
  13901. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
  13902. /*
  13903. * speedy-vision.js
  13904. * GPU-accelerated Computer Vision for JavaScript
  13905. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13906. *
  13907. * Licensed under the Apache License, Version 2.0 (the "License");
  13908. * you may not use this file except in compliance with the License.
  13909. * You may obtain a copy of the License at
  13910. *
  13911. * http://www.apache.org/licenses/LICENSE-2.0
  13912. *
  13913. * Unless required by applicable law or agreed to in writing, software
  13914. * distributed under the License is distributed on an "AS IS" BASIS,
  13915. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13916. * See the License for the specific language governing permissions and
  13917. * limitations under the License.
  13918. *
  13919. * buffer.js
  13920. * Keypoint Buffer
  13921. */
  13922. /**
  13923. * Keypoint Buffer: a node with memory.
  13924. * At time t, it outputs the keypoints received at time t-1
  13925. */
  13926. class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode {
  13927. /**
  13928. * Constructor
  13929. * @param {string} [name] name of the node
  13930. */
  13931. constructor(name = undefined) {
  13932. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13933. /** @type {number} current page: 0 or 1 */
  13934. this._pageIndex = 0;
  13935. /** @type {boolean} first run? */
  13936. this._initialized = false;
  13937. /** @type {number} previous descriptor size, in bytes */
  13938. this._previousDescriptorSize = 0;
  13939. /** @type {number} previous extra size, in bytes */
  13940. this._previousExtraSize = 0;
  13941. /** @type {number} previous encoder length */
  13942. this._previousEncoderLength = 0;
  13943. /** @type {boolean} frozen buffer? */
  13944. this._frozen = false;
  13945. }
  13946. /**
  13947. * A frozen buffer discards the input, effectively increasing the buffering time
  13948. * @returns {boolean}
  13949. */
  13950. get frozen() {
  13951. return this._frozen;
  13952. }
  13953. /**
  13954. * A frozen buffer discards the input, effectively increasing the buffering time
  13955. * @param {boolean} value
  13956. */
  13957. set frozen(value) {
  13958. this._frozen = Boolean(value);
  13959. }
  13960. /**
  13961. * Releases this node
  13962. * @param {SpeedyGPU} gpu
  13963. */
  13964. release(gpu) {
  13965. this._initialized = false;
  13966. super.release(gpu);
  13967. }
  13968. /**
  13969. * Run the specific task of this node
  13970. * @param {SpeedyGPU} gpu
  13971. * @returns {void|SpeedyPromise<void>}
  13972. */
  13973. _run(gpu) {
  13974. const {
  13975. encodedKeypoints,
  13976. descriptorSize,
  13977. extraSize,
  13978. encoderLength
  13979. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13980. const previousDescriptorSize = this._previousDescriptorSize;
  13981. const previousExtraSize = this._previousExtraSize;
  13982. const previousEncoderLength = this._previousEncoderLength;
  13983. const page = this._tex;
  13984. const previousInputTexture = page[1 - this._pageIndex];
  13985. const outputTexture = page[this._pageIndex];
  13986. // bufferize
  13987. if (!this._frozen || !this._initialized) {
  13988. // store input
  13989. this._previousDescriptorSize = descriptorSize;
  13990. this._previousExtraSize = extraSize;
  13991. this._previousEncoderLength = encoderLength;
  13992. previousInputTexture.resize(encoderLength, encoderLength);
  13993. encodedKeypoints.copyTo(previousInputTexture);
  13994. // page flipping
  13995. this._pageIndex = 1 - this._pageIndex;
  13996. }
  13997. // first run?
  13998. if (!this._initialized) {
  13999. this._initialized = true;
  14000. this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
  14001. return;
  14002. }
  14003. // done!
  14004. this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
  14005. }
  14006. }
  14007. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
  14008. /*
  14009. * speedy-vision.js
  14010. * GPU-accelerated Computer Vision for JavaScript
  14011. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14012. *
  14013. * Licensed under the Apache License, Version 2.0 (the "License");
  14014. * you may not use this file except in compliance with the License.
  14015. * You may obtain a copy of the License at
  14016. *
  14017. * http://www.apache.org/licenses/LICENSE-2.0
  14018. *
  14019. * Unless required by applicable law or agreed to in writing, software
  14020. * distributed under the License is distributed on an "AS IS" BASIS,
  14021. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14022. * See the License for the specific language governing permissions and
  14023. * limitations under the License.
  14024. *
  14025. * mixer.js
  14026. * Keypoint Mixer
  14027. */
  14028. /**
  14029. * Keypoint Mixer: merges two sets of keypoints
  14030. */
  14031. class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode {
  14032. /**
  14033. * Constructor
  14034. * @param {string} [name] name of the node
  14035. */
  14036. constructor(name = undefined) {
  14037. super(name, 5, [InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints), InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14038. }
  14039. /**
  14040. * Run the specific task of this node
  14041. * @param {SpeedyGPU} gpu
  14042. * @returns {void|SpeedyPromise<void>}
  14043. */
  14044. _run(gpu) {
  14045. const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in0').read();
  14046. const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in1').read();
  14047. const descriptorSize = kps0.descriptorSize;
  14048. const extraSize = kps0.extraSize;
  14049. const keypoints = gpu.programs.keypoints;
  14050. const tex = this._tex;
  14051. // ensure that the format of kps0 equals the format of kps1
  14052. if (!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize)) throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
  14053. // find the capacity of kps0 + kps1
  14054. const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
  14055. const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
  14056. const capacity = cap0 + cap1;
  14057. // find the dimensions of the output texture
  14058. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  14059. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  14060. // prepare programs
  14061. keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
  14062. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14063. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  14064. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  14065. // mix keypoints
  14066. let mixedKeypoints = keypoints.mixKeypointsPreInit(kps0.encodedKeypoints, kps1.encodedKeypoints, kps0.encoderLength, kps1.encoderLength, cap0, cap1, descriptorSize, extraSize, encoderLength);
  14067. let sortedKeypoints = keypoints.mixKeypointsInit(mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14068. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  14069. mixedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14070. /*
  14071. // debug: view keypoints
  14072. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14073. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  14074. */
  14075. this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14076. }
  14077. }
  14078. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
  14079. /*
  14080. * speedy-vision.js
  14081. * GPU-accelerated Computer Vision for JavaScript
  14082. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14083. *
  14084. * Licensed under the Apache License, Version 2.0 (the "License");
  14085. * you may not use this file except in compliance with the License.
  14086. * You may obtain a copy of the License at
  14087. *
  14088. * http://www.apache.org/licenses/LICENSE-2.0
  14089. *
  14090. * Unless required by applicable law or agreed to in writing, software
  14091. * distributed under the License is distributed on an "AS IS" BASIS,
  14092. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14093. * See the License for the specific language governing permissions and
  14094. * limitations under the License.
  14095. *
  14096. * shuffler.js
  14097. * Keypoint Shuffler
  14098. */
  14099. /**
  14100. * The Keypoint Shuffler shuffles a list of keypoints
  14101. */
  14102. class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode {
  14103. /**
  14104. * Constructor
  14105. * @param {string} [name] name of the node
  14106. */
  14107. constructor(name = undefined) {
  14108. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14109. /** @type {number} maximum number of keypoints */
  14110. this._maxKeypoints = Number.NaN;
  14111. }
  14112. /**
  14113. * Maximum number of keypoints (optional)
  14114. * @returns {number}
  14115. */
  14116. get maxKeypoints() {
  14117. return this._maxKeypoints;
  14118. }
  14119. /**
  14120. * Maximum number of keypoints (optional)
  14121. * @param {number} value
  14122. */
  14123. set maxKeypoints(value) {
  14124. if (!Number.isNaN(value)) this._maxKeypoints = Math.max(0, value | 0);else this._maxKeypoints = Number.NaN;
  14125. }
  14126. /**
  14127. * Run the specific task of this node
  14128. * @param {SpeedyGPU} gpu
  14129. * @returns {void|SpeedyPromise<void>}
  14130. */
  14131. _run(gpu) {
  14132. let {
  14133. encodedKeypoints,
  14134. descriptorSize,
  14135. extraSize,
  14136. encoderLength
  14137. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14138. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14139. const maxKeypoints = this._maxKeypoints;
  14140. // shuffle the keypoints (including nulls)
  14141. const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
  14142. const permutationLength = Math.min(permutationMaxLength, capacity);
  14143. const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
  14144. encodedKeypoints = gpu.programs.keypoints.shuffle.setUBO('Permutation', permutation).outputs(encoderLength, encoderLength, this._tex[0])(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14145. // sort the keypoints
  14146. gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
  14147. gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
  14148. gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
  14149. let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14150. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
  14151. encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14152. // clip the output?
  14153. if (!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
  14154. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
  14155. encodedKeypoints = gpu.programs.keypoints.clip.outputs(newEncoderLength, newEncoderLength, this._tex[5])(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
  14156. encoderLength = newEncoderLength;
  14157. }
  14158. // done!
  14159. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14160. }
  14161. /**
  14162. * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
  14163. * @param {number} n positive integer
  14164. * @param {number} [bufsize] size of the output array
  14165. * @returns {Int32Array} permutation
  14166. */
  14167. _generatePermutation(n, bufsize = n) {
  14168. const array = new Int32Array(bufsize);
  14169. const p = array.subarray(0, n).fill(-1);
  14170. const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
  14171. for (let i = 0, j = 0; i < n; i++) {
  14172. if (p[i] < 0) {
  14173. do {
  14174. p[i] = q[j++];
  14175. } while (p[i] < i);
  14176. p[p[i]] = i;
  14177. }
  14178. }
  14179. return array; // padded with zeros
  14180. }
  14181. }
  14182. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
  14183. /*
  14184. * speedy-vision.js
  14185. * GPU-accelerated Computer Vision for JavaScript
  14186. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14187. *
  14188. * Licensed under the Apache License, Version 2.0 (the "License");
  14189. * you may not use this file except in compliance with the License.
  14190. * You may obtain a copy of the License at
  14191. *
  14192. * http://www.apache.org/licenses/LICENSE-2.0
  14193. *
  14194. * Unless required by applicable law or agreed to in writing, software
  14195. * distributed under the License is distributed on an "AS IS" BASIS,
  14196. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14197. * See the License for the specific language governing permissions and
  14198. * limitations under the License.
  14199. *
  14200. * multiplexer.js
  14201. * Keypoint multiplexer
  14202. */
  14203. /** @type {string[]} the names of the input ports indexed by their number */
  14204. const multiplexer_INPUT_PORT = ['in0', 'in1'];
  14205. /**
  14206. * Keypoint multiplexer
  14207. */
  14208. class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode {
  14209. /**
  14210. * Constructor
  14211. * @param {string} [name] name of the node
  14212. */
  14213. constructor(name = undefined) {
  14214. super(name, 0, [...multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints)), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14215. /** @type {number} which port should be linked to the output? */
  14216. this._port = 0;
  14217. }
  14218. /**
  14219. * The number of the port that should be linked to the output
  14220. * @returns {number}
  14221. */
  14222. get port() {
  14223. return this._port;
  14224. }
  14225. /**
  14226. * The number of the port that should be linked to the output
  14227. * @param {number} port
  14228. */
  14229. set port(port) {
  14230. if (port < 0 || port >= multiplexer_INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  14231. this._port = port | 0;
  14232. }
  14233. /**
  14234. * Run the specific task of this node
  14235. * @param {SpeedyGPU} gpu
  14236. * @returns {void|SpeedyPromise<void>}
  14237. */
  14238. _run(gpu) {
  14239. const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
  14240. this.output().write(message);
  14241. }
  14242. }
  14243. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
  14244. /*
  14245. * speedy-vision.js
  14246. * GPU-accelerated Computer Vision for JavaScript
  14247. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14248. *
  14249. * Licensed under the Apache License, Version 2.0 (the "License");
  14250. * you may not use this file except in compliance with the License.
  14251. * You may obtain a copy of the License at
  14252. *
  14253. * http://www.apache.org/licenses/LICENSE-2.0
  14254. *
  14255. * Unless required by applicable law or agreed to in writing, software
  14256. * distributed under the License is distributed on an "AS IS" BASIS,
  14257. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14258. * See the License for the specific language governing permissions and
  14259. * limitations under the License.
  14260. *
  14261. * transformer.js
  14262. * Apply a transformation matrix to a set of keypoints
  14263. */
  14264. /**
  14265. * Apply a transformation matrix to a set of keypoints
  14266. */
  14267. class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode {
  14268. /**
  14269. * Constructor
  14270. * @param {string} [name] name of the node
  14271. */
  14272. constructor(name = undefined) {
  14273. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14274. /** @type {SpeedyMatrix} transformation matrix */
  14275. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  14276. }
  14277. /**
  14278. * Transformation matrix
  14279. * @returns {SpeedyMatrix}
  14280. */
  14281. get transform() {
  14282. return this._transform;
  14283. }
  14284. /**
  14285. * Transformation matrix. Must be 3x3
  14286. * @param {SpeedyMatrix} transform
  14287. */
  14288. set transform(transform) {
  14289. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  14290. this._transform = transform;
  14291. }
  14292. /**
  14293. * Run the specific task of this node
  14294. * @param {SpeedyGPU} gpu
  14295. * @returns {void|SpeedyPromise<void>}
  14296. */
  14297. _run(gpu) {
  14298. const {
  14299. encodedKeypoints,
  14300. descriptorSize,
  14301. extraSize,
  14302. encoderLength
  14303. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14304. const outputTexture = this._tex[0];
  14305. const homography = this._transform.read();
  14306. // apply homography
  14307. gpu.programs.keypoints.applyHomography.outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14308. // done!
  14309. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  14310. }
  14311. }
  14312. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
  14313. /*
  14314. * speedy-vision.js
  14315. * GPU-accelerated Computer Vision for JavaScript
  14316. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14317. *
  14318. * Licensed under the Apache License, Version 2.0 (the "License");
  14319. * you may not use this file except in compliance with the License.
  14320. * You may obtain a copy of the License at
  14321. *
  14322. * http://www.apache.org/licenses/LICENSE-2.0
  14323. *
  14324. * Unless required by applicable law or agreed to in writing, software
  14325. * distributed under the License is distributed on an "AS IS" BASIS,
  14326. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14327. * See the License for the specific language governing permissions and
  14328. * limitations under the License.
  14329. *
  14330. * subpixel.js
  14331. * Subpixel refinement of keypoint location
  14332. */
  14333. /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
  14334. /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
  14335. const METHOD2PROGRAM = Object.freeze({
  14336. 'quadratic1d': 'subpixelQuadratic1d',
  14337. 'taylor2d': 'subpixelTaylor2d',
  14338. 'bicubic-upsample': 'subpixelBicubic',
  14339. 'bilinear-upsample': 'subpixelBilinear'
  14340. });
  14341. /**
  14342. * Subpixel refinement of keypoint location
  14343. */
  14344. class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode {
  14345. /**
  14346. * Constructor
  14347. * @param {string} [name] name of the node
  14348. */
  14349. constructor(name = undefined) {
  14350. super(name, 2, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2)]);
  14351. /** @type {SubpixelRefinementMethod} subpixel refinement method */
  14352. this._method = 'quadratic1d';
  14353. /** @type {number} max iterations for the upsampling methods */
  14354. this._maxIterations = 6;
  14355. /** @type {number} convergence threshold for the upsampling methods */
  14356. this._epsilon = 0.1;
  14357. }
  14358. /**
  14359. * Subpixel refinement method
  14360. * @returns {SubpixelRefinementMethod}
  14361. */
  14362. get method() {
  14363. return this._method;
  14364. }
  14365. /**
  14366. * Subpixel refinement method
  14367. * @param {SubpixelRefinementMethod} name
  14368. */
  14369. set method(name) {
  14370. if (!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
  14371. this._method = name;
  14372. }
  14373. /**
  14374. * Max. iterations for the upsampling methods
  14375. * @returns {number}
  14376. */
  14377. get maxIterations() {
  14378. return this._maxIterations;
  14379. }
  14380. /**
  14381. * Max. iterations for the upsampling methods
  14382. * @param {number} value
  14383. */
  14384. set maxIterations(value) {
  14385. this._maxIterations = Math.max(0, +value);
  14386. }
  14387. /**
  14388. * Convergence threshold for the upsampling methods
  14389. * @returns {number}
  14390. */
  14391. get epsilon() {
  14392. return this._epsilon;
  14393. }
  14394. /**
  14395. * Convergence threshold for the upsampling methods
  14396. * @param {number} value
  14397. */
  14398. set epsilon(value) {
  14399. this._epsilon = Math.max(0, +value);
  14400. }
  14401. /**
  14402. * Run the specific task of this node
  14403. * @param {SpeedyGPU} gpu
  14404. * @returns {void|SpeedyPromise<void>}
  14405. */
  14406. _run(gpu) {
  14407. const {
  14408. encodedKeypoints,
  14409. descriptorSize,
  14410. extraSize,
  14411. encoderLength
  14412. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14413. const {
  14414. image,
  14415. format
  14416. } = /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read();
  14417. const tex = this._tex;
  14418. const program = METHOD2PROGRAM[this._method];
  14419. const maxIterations = this._maxIterations;
  14420. const epsilon = this._epsilon;
  14421. // note: if you detected the keypoints using a pyramid,
  14422. // you need to pass that pyramid as input!
  14423. // we'll compute the offsets for each keypoint
  14424. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14425. const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
  14426. const offsets = gpu.programs.keypoints[program].outputs(offsetEncoderLength, offsetEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
  14427. // apply the offsets to the keypoints
  14428. const refinedKeypoints = gpu.programs.keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[1])(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14429. // done!
  14430. this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
  14431. this.output('displacements').swrite(offsets);
  14432. }
  14433. }
  14434. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
  14435. /*
  14436. * speedy-vision.js
  14437. * GPU-accelerated Computer Vision for JavaScript
  14438. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14439. *
  14440. * Licensed under the Apache License, Version 2.0 (the "License");
  14441. * you may not use this file except in compliance with the License.
  14442. * You may obtain a copy of the License at
  14443. *
  14444. * http://www.apache.org/licenses/LICENSE-2.0
  14445. *
  14446. * Unless required by applicable law or agreed to in writing, software
  14447. * distributed under the License is distributed on an "AS IS" BASIS,
  14448. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14449. * See the License for the specific language governing permissions and
  14450. * limitations under the License.
  14451. *
  14452. * fast.js
  14453. * FAST corner detector
  14454. */
  14455. // Constants
  14456. const DEFAULT_THRESHOLD = 20;
  14457. /**
  14458. * FAST corner detector
  14459. */
  14460. class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14461. /**
  14462. * Constructor
  14463. * @param {string} [name] name of the node
  14464. */
  14465. constructor(name = undefined) {
  14466. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14467. /** @type {number} FAST threshold in [0,255] */
  14468. this._threshold = DEFAULT_THRESHOLD;
  14469. }
  14470. /**
  14471. * FAST threshold in [0,255]
  14472. * @returns {number}
  14473. */
  14474. get threshold() {
  14475. return this._threshold;
  14476. }
  14477. /**
  14478. * FAST threshold in [0,255]
  14479. * @param {number} threshold
  14480. */
  14481. set threshold(threshold) {
  14482. this._threshold = Math.max(0, Math.min(threshold | 0, 255));
  14483. }
  14484. /**
  14485. * Run the specific task of this node
  14486. * @param {SpeedyGPU} gpu
  14487. * @returns {void|SpeedyPromise<void>}
  14488. */
  14489. _run(gpu) {
  14490. const {
  14491. image,
  14492. format
  14493. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14494. const width = image.width,
  14495. height = image.height;
  14496. const tex = this._tex;
  14497. const capacity = this._capacity;
  14498. const threshold = this._threshold;
  14499. const lodStep = Math.log2(this.scaleFactor);
  14500. const levels = this.levels;
  14501. // validate pyramid
  14502. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14503. // skip if the capacity is zero
  14504. if (capacity == 0) {
  14505. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
  14506. const encoderLength = encodedKeypoints.width;
  14507. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14508. return;
  14509. }
  14510. // FAST
  14511. gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
  14512. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
  14513. let corners = tex[1].clear();
  14514. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14515. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14516. corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
  14517. //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14518. }
  14519. // Same-scale non-maximum suppression
  14520. // *nicer results inside the loop; faster outside
  14521. // Hard to notice a difference when using FAST
  14522. corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14523. // Multi-scale non-maximum suppression
  14524. // (doesn't seem to remove many keypoints)
  14525. if (levels > 1) {
  14526. corners = gpu.programs.keypoints.nonmaxScaleSimple.outputs(width, height, tex[1])(corners, image, lodStep);
  14527. }
  14528. // encode keypoints
  14529. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
  14530. const encoderLength = encodedKeypoints.width;
  14531. // scale refinement
  14532. if (levels > 1) {
  14533. encodedKeypoints = gpu.programs.keypoints.refineScaleFAST916.outputs(encoderLength, encoderLength, tex[4])(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
  14534. }
  14535. // done!
  14536. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14537. }
  14538. }
  14539. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
  14540. /*
  14541. * speedy-vision.js
  14542. * GPU-accelerated Computer Vision for JavaScript
  14543. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14544. *
  14545. * Licensed under the Apache License, Version 2.0 (the "License");
  14546. * you may not use this file except in compliance with the License.
  14547. * You may obtain a copy of the License at
  14548. *
  14549. * http://www.apache.org/licenses/LICENSE-2.0
  14550. *
  14551. * Unless required by applicable law or agreed to in writing, software
  14552. * distributed under the License is distributed on an "AS IS" BASIS,
  14553. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14554. * See the License for the specific language governing permissions and
  14555. * limitations under the License.
  14556. *
  14557. * harris.js
  14558. * Harris corner detector
  14559. */
  14560. /** Window size helper */
  14561. const HARRIS = Object.freeze({
  14562. 1: 'harris1',
  14563. 3: 'harris3',
  14564. 5: 'harris5',
  14565. 7: 'harris7'
  14566. });
  14567. /**
  14568. * Harris corner detector
  14569. */
  14570. class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14571. /**
  14572. * Constructor
  14573. * @param {string} [name] name of the node
  14574. */
  14575. constructor(name = undefined) {
  14576. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14577. /** @type {SpeedySize} neighborhood size */
  14578. this._windowSize = new SpeedySize(3, 3);
  14579. /** @type {number} min corner quality in [0,1] */
  14580. this._quality = 0.1;
  14581. }
  14582. /**
  14583. * Minimum corner quality in [0,1] - this is a fraction of
  14584. * the largest min. eigenvalue of the autocorrelation matrix
  14585. * over the entire image
  14586. * @returns {number}
  14587. */
  14588. get quality() {
  14589. return this._quality;
  14590. }
  14591. /**
  14592. * Minimum corner quality in [0,1]
  14593. * @param {number} quality
  14594. */
  14595. set quality(quality) {
  14596. this._quality = Math.max(0.0, Math.min(+quality, 1.0));
  14597. }
  14598. /**
  14599. * Neighborhood size
  14600. * @returns {SpeedySize}
  14601. */
  14602. get windowSize() {
  14603. return this._windowSize;
  14604. }
  14605. /**
  14606. * Neighborhood size
  14607. * @param {SpeedySize} windowSize
  14608. */
  14609. set windowSize(windowSize) {
  14610. const d = windowSize.width;
  14611. if (!(d == windowSize.height && (d == 1 || d == 3 || d == 5 || d == 7))) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
  14612. this._windowSize = windowSize;
  14613. }
  14614. /**
  14615. * Run the specific task of this node
  14616. * @param {SpeedyGPU} gpu
  14617. * @returns {void|SpeedyPromise<void>}
  14618. */
  14619. _run(gpu) {
  14620. const {
  14621. image,
  14622. format
  14623. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14624. const width = image.width,
  14625. height = image.height;
  14626. const capacity = this._capacity;
  14627. const quality = this._quality;
  14628. const windowSize = this._windowSize.width;
  14629. const levels = this.levels;
  14630. const lodStep = Math.log2(this.scaleFactor);
  14631. const intFactor = levels > 1 ? this.scaleFactor : 1;
  14632. const harris = gpu.programs.keypoints[HARRIS[windowSize]];
  14633. const tex = this._tex;
  14634. // validate pyramid
  14635. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14636. // skip if the capacity is zero
  14637. if (capacity == 0) {
  14638. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
  14639. const encoderLength = encodedKeypoints.width;
  14640. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14641. return;
  14642. }
  14643. // compute corner response map
  14644. harris.outputs(width, height, tex[0], tex[1]);
  14645. gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
  14646. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
  14647. let corners = tex[1].clear();
  14648. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14649. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14650. const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
  14651. const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
  14652. corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
  14653. corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14654. }
  14655. // Same-scale non-maximum suppression
  14656. // *performs better inside the loop
  14657. //corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14658. // Multi-scale non-maximum suppression
  14659. // (doesn't seem to remove many keypoints)
  14660. if (levels > 1) {
  14661. const laplacian = gpu.programs.keypoints.laplacian.outputs(width, height, tex[0])(corners, image, lodStep, 0);
  14662. corners = gpu.programs.keypoints.nonmaxScale.outputs(width, height, tex[2])(corners, image, laplacian, lodStep);
  14663. }
  14664. // find the maximum corner response over the entire image
  14665. gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
  14666. numPasses = Math.ceil(Math.log2(Math.max(width, height)));
  14667. let maxScore = corners;
  14668. for (let j = 0; j < numPasses; j++) maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
  14669. // discard corners below a quality level
  14670. corners = gpu.programs.keypoints.harrisScoreCutoff.outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])(corners, maxScore, quality);
  14671. // encode keypoints
  14672. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
  14673. const encoderLength = encodedKeypoints.width;
  14674. // scale refinement
  14675. if (levels > 1) {
  14676. encodedKeypoints = gpu.programs.keypoints.refineScaleLoG.outputs(encoderLength, encoderLength, tex[5])(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
  14677. }
  14678. // done!
  14679. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14680. }
  14681. }
  14682. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
  14683. /*
  14684. * speedy-vision.js
  14685. * GPU-accelerated Computer Vision for JavaScript
  14686. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14687. *
  14688. * Licensed under the Apache License, Version 2.0 (the "License");
  14689. * you may not use this file except in compliance with the License.
  14690. * You may obtain a copy of the License at
  14691. *
  14692. * http://www.apache.org/licenses/LICENSE-2.0
  14693. *
  14694. * Unless required by applicable law or agreed to in writing, software
  14695. * distributed under the License is distributed on an "AS IS" BASIS,
  14696. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14697. * See the License for the specific language governing permissions and
  14698. * limitations under the License.
  14699. *
  14700. * descriptor.js
  14701. * Abstract keypoint descriptor
  14702. */
  14703. /**
  14704. * Abstract keypoint descriptor
  14705. * @abstract
  14706. */
  14707. class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode {
  14708. /**
  14709. * Constructor
  14710. * @param {string} [name] name of the node
  14711. * @param {number} [texCount] number of work textures
  14712. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14713. */
  14714. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  14715. super(name, texCount + 1, portBuilders);
  14716. }
  14717. /**
  14718. *
  14719. * Allocate space for keypoint descriptors
  14720. * @param {SpeedyGPU} gpu
  14721. * @param {number} inputDescriptorSize should be 0
  14722. * @param {number} inputExtraSize must be non-negative
  14723. * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
  14724. * @param {number} outputExtraSize must be inputExtraSize
  14725. * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
  14726. * @returns {SpeedyDrawableTexture} encodedKeypoints
  14727. */
  14728. _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints) {
  14729. utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
  14730. utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
  14731. const inputEncoderLength = inputEncodedKeypoints.width;
  14732. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  14733. const outputEncoderCapacity = inputEncoderCapacity;
  14734. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  14735. const tex = this._tex[this._tex.length - 1];
  14736. return gpu.programs.keypoints.allocateDescriptors.outputs(outputEncoderLength, outputEncoderLength, tex)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  14737. }
  14738. }
  14739. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
  14740. /*
  14741. * speedy-vision.js
  14742. * GPU-accelerated Computer Vision for JavaScript
  14743. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14744. *
  14745. * Licensed under the Apache License, Version 2.0 (the "License");
  14746. * you may not use this file except in compliance with the License.
  14747. * You may obtain a copy of the License at
  14748. *
  14749. * http://www.apache.org/licenses/LICENSE-2.0
  14750. *
  14751. * Unless required by applicable law or agreed to in writing, software
  14752. * distributed under the License is distributed on an "AS IS" BASIS,
  14753. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14754. * See the License for the specific language governing permissions and
  14755. * limitations under the License.
  14756. *
  14757. * orb.js
  14758. * ORB descriptors
  14759. */
  14760. // Constants
  14761. const DESCRIPTOR_SIZE = 32; // 256 bits
  14762. /**
  14763. * ORB descriptors
  14764. */
  14765. class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor {
  14766. /**
  14767. * Constructor
  14768. * @param {string} [name] name of the node
  14769. */
  14770. constructor(name = undefined) {
  14771. super(name, 3, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14772. }
  14773. /**
  14774. * Run the specific task of this node
  14775. * @param {SpeedyGPU} gpu
  14776. * @returns {void|SpeedyPromise<void>}
  14777. */
  14778. _run(gpu) {
  14779. const {
  14780. encodedKeypoints,
  14781. descriptorSize,
  14782. extraSize,
  14783. encoderLength
  14784. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14785. const image = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read()).image;
  14786. const tex = this._tex;
  14787. const outputTexture = this._tex[2];
  14788. // compute orientation
  14789. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14790. const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
  14791. const encodedOrientations = gpu.programs.keypoints.orbOrientation.outputs(orientationEncoderLength, orientationEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14792. const orientedKeypoints = gpu.programs.keypoints.transferOrientation.outputs(encoderLength, encoderLength, tex[1])(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14793. // allocate space
  14794. const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
  14795. const newEncoderLength = encodedKps.width;
  14796. // compute descriptors (it's a good idea to blur the image)
  14797. const describedKeypoints = gpu.programs.keypoints.orbDescriptor.outputs(newEncoderLength, newEncoderLength, outputTexture)(image, encodedKps, extraSize, newEncoderLength);
  14798. // done!
  14799. this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
  14800. }
  14801. }
  14802. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
  14803. /*
  14804. * speedy-vision.js
  14805. * GPU-accelerated Computer Vision for JavaScript
  14806. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14807. *
  14808. * Licensed under the Apache License, Version 2.0 (the "License");
  14809. * you may not use this file except in compliance with the License.
  14810. * You may obtain a copy of the License at
  14811. *
  14812. * http://www.apache.org/licenses/LICENSE-2.0
  14813. *
  14814. * Unless required by applicable law or agreed to in writing, software
  14815. * distributed under the License is distributed on an "AS IS" BASIS,
  14816. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14817. * See the License for the specific language governing permissions and
  14818. * limitations under the License.
  14819. *
  14820. * lk.js
  14821. * LK optical-flow
  14822. */
  14823. // Constants
  14824. const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
  14825. const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
  14826. const DEFAULT_NUMBER_OF_ITERATIONS = 30;
  14827. const DEFAULT_DISCARD_THRESHOLD = 0.0001;
  14828. const DEFAULT_EPSILON = 0.01;
  14829. const LK_PROGRAM = {
  14830. 3: 'lk3',
  14831. 5: 'lk5',
  14832. 7: 'lk7',
  14833. 9: 'lk9',
  14834. 11: 'lk11',
  14835. 13: 'lk13',
  14836. 15: 'lk15',
  14837. 17: 'lk17',
  14838. 19: 'lk19',
  14839. 21: 'lk21'
  14840. };
  14841. /**
  14842. * LK optical-flow
  14843. */
  14844. class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode {
  14845. /**
  14846. * Constructor
  14847. * @param {string} [name] name of the node
  14848. */
  14849. constructor(name = undefined) {
  14850. super(name, 3, [InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  14851. /** @type {SpeedySize} window size */
  14852. this._windowSize = DEFAULT_WINDOW_SIZE;
  14853. /** @type {number} number of pyramid levels to use */
  14854. this._levels = DEFAULT_DEPTH;
  14855. /** @type {number} minimum acceptable corner response */
  14856. this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
  14857. /** @type {number} number of iterations per pyramid level (termination criteria) */
  14858. this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
  14859. /** @type {number} minimum increment per iteration (termination criteria) */
  14860. this._epsilon = DEFAULT_EPSILON;
  14861. }
  14862. /**
  14863. * Window size (use odd numbers)
  14864. * @returns {SpeedySize}
  14865. */
  14866. get windowSize() {
  14867. return this._windowSize;
  14868. }
  14869. /**
  14870. * Window size (use odd numbers)
  14871. * @param {SpeedySize} windowSize must be a square window
  14872. */
  14873. set windowSize(windowSize) {
  14874. if (windowSize.width != windowSize.height) {
  14875. throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
  14876. } else if (!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
  14877. const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a, b) => a - b).map(k => k + 'x' + k).join(', ');
  14878. throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
  14879. }
  14880. this._windowSize = windowSize;
  14881. }
  14882. /**
  14883. * Number of pyramid levels to use
  14884. * @returns {number}
  14885. */
  14886. get levels() {
  14887. return this._levels;
  14888. }
  14889. /**
  14890. * Number of pyramid levels to use
  14891. * @param {number} levels
  14892. */
  14893. set levels(levels) {
  14894. utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
  14895. this._levels = levels | 0;
  14896. }
  14897. /**
  14898. * Get the discard threshold, used to discard "bad" keypoints
  14899. * @returns {number}
  14900. */
  14901. get discardThreshold() {
  14902. return this._discardThreshold;
  14903. }
  14904. /**
  14905. * Set the discard threshold, used to discard "bad" keypoints
  14906. * @param {number} value typically 10^(-4) - increase to discard more
  14907. */
  14908. set discardThreshold(value) {
  14909. utils/* Utils */.A.assert(value >= 0);
  14910. this._discardThreshold = +value;
  14911. }
  14912. /**
  14913. * Get the maximum number of iterations of the pyramidal LK algorithm
  14914. * @returns {number}
  14915. */
  14916. get numberOfIterations() {
  14917. return this._numberOfIterations;
  14918. }
  14919. /**
  14920. * Set the maximum number of iterations of the pyramidal LK algorithm
  14921. * @param {number} value
  14922. */
  14923. set numberOfIterations(value) {
  14924. utils/* Utils */.A.assert(value >= 1);
  14925. this._numberOfIterations = value | 0;
  14926. }
  14927. /**
  14928. * Get the accuracy threshold, used to stop LK iterations
  14929. * @returns {number}
  14930. */
  14931. get epsilon() {
  14932. return this._epsilon;
  14933. }
  14934. /**
  14935. * Get the accuracy threshold, used to stop LK iterations
  14936. * @param {number} value typically 0.01
  14937. */
  14938. set epsilon(value) {
  14939. utils/* Utils */.A.assert(value >= 0);
  14940. this._epsilon = +value;
  14941. }
  14942. /**
  14943. * Run the specific task of this node
  14944. * @param {SpeedyGPU} gpu
  14945. * @returns {void|SpeedyPromise<void>}
  14946. */
  14947. _run(gpu) {
  14948. const {
  14949. encodedKeypoints,
  14950. descriptorSize,
  14951. extraSize,
  14952. encoderLength
  14953. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('previousKeypoints').read();
  14954. const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('previousImage').read()).image;
  14955. const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('nextImage').read()).image;
  14956. const previousKeypoints = encodedKeypoints;
  14957. const levels = this._levels;
  14958. const windowSize = this._windowSize;
  14959. const wsize = windowSize.width; // square window
  14960. const numberOfIterations = this._numberOfIterations;
  14961. const discardThreshold = this._discardThreshold;
  14962. const epsilon = this._epsilon;
  14963. const keypoints = gpu.programs.keypoints;
  14964. const tex = this._tex;
  14965. // do we need a pyramid?
  14966. if (!(levels == 1 || previousImage.hasMipmaps() && nextImage.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);else if (previousImage.width !== nextImage.width || previousImage.height !== nextImage.height) throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
  14967. // select the appropriate program
  14968. const lk = keypoints[LK_PROGRAM[wsize]];
  14969. // find the dimensions of the flow texture (1 pixel per flow vector)
  14970. const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14971. const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
  14972. lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
  14973. // compute optical-flow
  14974. let flow = lk.clear();
  14975. for (let lod = levels - 1; lod >= 0; lod--) flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
  14976. // transfer optical-flow to nextKeypoints
  14977. keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
  14978. const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
  14979. // done!
  14980. this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
  14981. this.output('flow').swrite(flow);
  14982. }
  14983. }
  14984. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
  14985. /*
  14986. * speedy-vision.js
  14987. * GPU-accelerated Computer Vision for JavaScript
  14988. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14989. *
  14990. * Licensed under the Apache License, Version 2.0 (the "License");
  14991. * you may not use this file except in compliance with the License.
  14992. * You may obtain a copy of the License at
  14993. *
  14994. * http://www.apache.org/licenses/LICENSE-2.0
  14995. *
  14996. * Unless required by applicable law or agreed to in writing, software
  14997. * distributed under the License is distributed on an "AS IS" BASIS,
  14998. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14999. * See the License for the specific language governing permissions and
  15000. * limitations under the License.
  15001. *
  15002. * lsh-static-tables.js
  15003. * Static LSH tables
  15004. */
  15005. /**
  15006. * Static LSH tables
  15007. */
  15008. class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode {
  15009. /**
  15010. * Constructor
  15011. * @param {string} [name] name of the node
  15012. */
  15013. constructor(name = undefined) {
  15014. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.LSHTables)]);
  15015. /** @type {SpeedyKeypoint[]} "training" keypoints */
  15016. this._keypoints = [];
  15017. /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
  15018. this._keypointsCopy = [];
  15019. /** @type {number} number of tables in the LSH data structure */
  15020. this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
  15021. /** @type {number} number of bits of a hash */
  15022. this._hashSize = LSH_DEFAULT_HASH_SIZE;
  15023. /** @type {SpeedyLSH|null} LSH data structure */
  15024. this._lsh = null;
  15025. }
  15026. /**
  15027. * "Training" keypoints
  15028. * @returns {SpeedyKeypoint[]}
  15029. */
  15030. get keypoints() {
  15031. return this._keypoints;
  15032. }
  15033. /**
  15034. * "Training" keypoints
  15035. * @param {SpeedyKeypoint[]} keypoints
  15036. */
  15037. set keypoints(keypoints) {
  15038. if (!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint))) throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
  15039. if (this._keypoints !== keypoints) {
  15040. this._keypoints = keypoints; // update internal pointer
  15041. this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
  15042. this._lsh = null; // (re)train the model
  15043. }
  15044. }
  15045. /**
  15046. * Number of tables in the LSH data structure
  15047. * @returns {number}
  15048. */
  15049. get numberOfTables() {
  15050. return this._numberOfTables;
  15051. }
  15052. /**
  15053. * Number of tables in the LSH data structure
  15054. * @param {number} n
  15055. */
  15056. set numberOfTables(n) {
  15057. if (!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
  15058. if (n !== this._numberOfTables) {
  15059. this._numberOfTables = n | 0;
  15060. this._lsh = null; // need to retrain the model
  15061. }
  15062. }
  15063. /**
  15064. * Number of bits of a hash
  15065. * @returns {number}
  15066. */
  15067. get hashSize() {
  15068. return this._hashSize;
  15069. }
  15070. /**
  15071. * Number of bits of a hash
  15072. * @param {number} h
  15073. */
  15074. set hashSize(h) {
  15075. if (!LSH_ACCEPTABLE_HASH_SIZES.includes(h)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
  15076. if (h !== this._hashSize) {
  15077. this._hashSize = h | 0;
  15078. this._lsh = null; // need to retrain the model
  15079. }
  15080. }
  15081. /**
  15082. * Run the specific task of this node
  15083. * @param {SpeedyGPU} gpu
  15084. * @returns {void|SpeedyPromise<void>}
  15085. */
  15086. _run(gpu) {
  15087. // Need to train the model?
  15088. if (this._lsh == null) {
  15089. // internal work textures are only available after initialization,
  15090. // i.e., after calling this._init()
  15091. this._lsh = this._train();
  15092. }
  15093. // Pass it forward
  15094. this.output().swrite(this._lsh);
  15095. }
  15096. /**
  15097. * Train the model
  15098. * @returns {SpeedyLSH}
  15099. */
  15100. _train() {
  15101. const keypoints = this._keypointsCopy;
  15102. const numberOfTables = this._numberOfTables;
  15103. const hashSize = this._hashSize;
  15104. if (keypoints.find(keypoint => keypoint.descriptor == null)) throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
  15105. const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
  15106. const lshTables = this._tex[0];
  15107. const descriptorDB = this._tex[1];
  15108. return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
  15109. }
  15110. }
  15111. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
  15112. /*
  15113. * speedy-vision.js
  15114. * GPU-accelerated Computer Vision for JavaScript
  15115. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15116. *
  15117. * Licensed under the Apache License, Version 2.0 (the "License");
  15118. * you may not use this file except in compliance with the License.
  15119. * You may obtain a copy of the License at
  15120. *
  15121. * http://www.apache.org/licenses/LICENSE-2.0
  15122. *
  15123. * Unless required by applicable law or agreed to in writing, software
  15124. * distributed under the License is distributed on an "AS IS" BASIS,
  15125. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15126. * See the License for the specific language governing permissions and
  15127. * limitations under the License.
  15128. *
  15129. * lsh-knn.js
  15130. * K approximate nearest neighbors matcher
  15131. */
  15132. /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
  15133. /** @type {number} how many neighbors to search for, by default */
  15134. const DEFAULT_K = 1;
  15135. /** @type {LSHKNNQualityLevel} default quality level */
  15136. const DEFAULT_QUALITY = 'default';
  15137. /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
  15138. const NUMBER_OF_BIT_SWAPS = {
  15139. 'fastest': 0,
  15140. 'default': 1,
  15141. 'demanding': 2
  15142. };
  15143. /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
  15144. const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o, d) => (o[d] = fd(d), o), {}))(d => (fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o, h) => (o[h] = fh(h), o), {}))(h => (fl => [0, 1, 2].reduce((o, l) => (o[l] = fl(l), o), {}))(l => `lshKnn${d}h${h}lv${l}`)));
  15145. /**
  15146. * K approximate nearest neighbors matcher
  15147. */
  15148. class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode {
  15149. /**
  15150. * Constructor
  15151. * @param {string} [name] name of the node
  15152. */
  15153. constructor(name = undefined) {
  15154. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15155. /** @type {number} how many neighbors do you want? */
  15156. this._k = DEFAULT_K;
  15157. /** @type {LSHKNNQualityLevel} quality of the matching */
  15158. this._quality = DEFAULT_QUALITY;
  15159. }
  15160. /**
  15161. * How many neighbors do you want?
  15162. * @returns {number}
  15163. */
  15164. get k() {
  15165. return this._k;
  15166. }
  15167. /**
  15168. * How many neighbors do you want?
  15169. * @param {number} k number of neighbors
  15170. */
  15171. set k(k) {
  15172. this._k = Math.max(1, k | 0);
  15173. }
  15174. /**
  15175. * Quality of the matching
  15176. * @returns {LSHKNNQualityLevel}
  15177. */
  15178. get quality() {
  15179. return this._quality;
  15180. }
  15181. /**
  15182. * Quality of the matching
  15183. * @param {LSHKNNQualityLevel} quality
  15184. */
  15185. set quality(quality) {
  15186. if (!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
  15187. this._quality = quality;
  15188. }
  15189. /**
  15190. * Run the specific task of this node
  15191. * @param {SpeedyGPU} gpu
  15192. * @returns {void|SpeedyPromise<void>}
  15193. */
  15194. _run(gpu) {
  15195. const {
  15196. encodedKeypoints,
  15197. descriptorSize,
  15198. extraSize,
  15199. encoderLength
  15200. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15201. /** @type {SpeedyLSH} */
  15202. const lsh = this.input('lsh').read().lsh;
  15203. const keypoints = gpu.programs.keypoints;
  15204. const tables = lsh.tables;
  15205. const descriptorDB = lsh.descriptorDB;
  15206. const tablesStride = tables.width;
  15207. const descriptorDBStride = descriptorDB.width;
  15208. const tableCount = lsh.tableCount;
  15209. const hashSize = lsh.hashSize;
  15210. const bucketCapacity = lsh.bucketCapacity;
  15211. const bucketsPerTable = lsh.bucketsPerTable;
  15212. const sequences = lsh.sequences;
  15213. const candidatesA = this._tex[0];
  15214. const candidatesB = this._tex[1];
  15215. const candidatesC = this._tex[2];
  15216. const filters = this._tex[3];
  15217. const transferA = this._tex[4];
  15218. const transferB = this._tex[5];
  15219. const level = NUMBER_OF_BIT_SWAPS[this._quality];
  15220. const matchesPerKeypoint = this._k;
  15221. // validate parameters
  15222. if (descriptorSize !== lsh.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
  15223. utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
  15224. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
  15225. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
  15226. // configure the output texture
  15227. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15228. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15229. let encodedMatches = transferB;
  15230. keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
  15231. // prepare the LSH matching
  15232. const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15233. keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
  15234. keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
  15235. const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
  15236. lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
  15237. lshKnn.setUBO('LSHSequences', sequences);
  15238. // match keypoints
  15239. encodedMatches.clear();
  15240. keypoints.lshKnnInitFilters();
  15241. for (let i = 0; i < matchesPerKeypoint; i++) {
  15242. // find the (i+1)-th best match
  15243. let candidates = keypoints.lshKnnInitCandidates();
  15244. for (let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
  15245. candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15246. gpu.gl.flush();
  15247. }
  15248. candidates.copyTo(filters);
  15249. // transfer matches to an encoded matches texture
  15250. encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
  15251. }
  15252. // done
  15253. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15254. /*
  15255. // debug
  15256. let data = filters.inspect32(gpu), debug = [];
  15257. for(let i = 0; i < data.length; i++) {
  15258. const bits = MATCH_INDEX_BITS;
  15259. const mask = (1 << bits) - 1;
  15260. const u32 = data[i];
  15261. const index = u32 & mask, distance = u32 >>> bits;
  15262. //debug.push('|'+[ u32 ].toString());
  15263. debug.push('|'+[ index, distance ].toString());
  15264. }
  15265. console.log(debug.join(','));
  15266. */
  15267. }
  15268. }
  15269. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
  15270. /*
  15271. * speedy-vision.js
  15272. * GPU-accelerated Computer Vision for JavaScript
  15273. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15274. *
  15275. * Licensed under the Apache License, Version 2.0 (the "License");
  15276. * you may not use this file except in compliance with the License.
  15277. * You may obtain a copy of the License at
  15278. *
  15279. * http://www.apache.org/licenses/LICENSE-2.0
  15280. *
  15281. * Unless required by applicable law or agreed to in writing, software
  15282. * distributed under the License is distributed on an "AS IS" BASIS,
  15283. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15284. * See the License for the specific language governing permissions and
  15285. * limitations under the License.
  15286. *
  15287. * bf-knn.js
  15288. * Brute Force KNN Keypoint Matcher
  15289. */
  15290. /** @type {Object<number,string>} program name indexed by descriptor size */
  15291. const PROGRAM_NAME = {
  15292. 32: 'bfMatcher32',
  15293. 64: 'bfMatcher64'
  15294. };
  15295. /**
  15296. * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
  15297. * invoking this (use a database of 50 keypoints or so - your mileage may vary)
  15298. */
  15299. class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode {
  15300. /**
  15301. * Constructor
  15302. * @param {string} [name] name of the node
  15303. */
  15304. constructor(name = undefined) {
  15305. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15306. /** @type {number} number of matches per keypoint (the "k" of knn) */
  15307. this._matchesPerKeypoint = 1;
  15308. }
  15309. /**
  15310. * Number of matches per keypoint
  15311. * @returns {number}
  15312. */
  15313. get k() {
  15314. return this._matchesPerKeypoint;
  15315. }
  15316. /**
  15317. * Number of matches per keypoint
  15318. * @param {number} value
  15319. */
  15320. set k(value) {
  15321. this._matchesPerKeypoint = Math.max(1, value | 0);
  15322. }
  15323. /**
  15324. * Run the specific task of this node
  15325. * @param {SpeedyGPU} gpu
  15326. * @returns {void|SpeedyPromise<void>}
  15327. */
  15328. _run(gpu) {
  15329. const {
  15330. encodedKeypoints,
  15331. descriptorSize,
  15332. extraSize,
  15333. encoderLength
  15334. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15335. const database = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('database').read();
  15336. const candidatesA = this._tex[0];
  15337. const candidatesB = this._tex[1];
  15338. const candidatesC = this._tex[2];
  15339. const encodedFiltersA = this._tex[3];
  15340. const encodedMatchesA = this._tex[4];
  15341. const encodedMatchesB = this._tex[5];
  15342. const matchesPerKeypoint = this._matchesPerKeypoint;
  15343. const keypoints = gpu.programs.keypoints;
  15344. // validate parameters
  15345. if (descriptorSize !== database.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);else if (!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
  15346. // prepare the brute force matching
  15347. const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
  15348. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15349. const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
  15350. const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
  15351. const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
  15352. const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15353. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15354. keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
  15355. keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
  15356. keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
  15357. bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
  15358. // match keypoints
  15359. let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
  15360. let encodedFilters = keypoints.bfMatcherInitFilters();
  15361. for (let k = 0; k < matchesPerKeypoint; k++) {
  15362. let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
  15363. // find the (k+1)-th best match
  15364. for (let passId = 0; passId < numberOfPasses; passId++) {
  15365. encodedPartialMatches = bfMatcher(encodedPartialMatches, encodedFilters, partialMatcherLength, database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength, encodedKeypoints, descriptorSize, extraSize, encoderLength, passId);
  15366. gpu.gl.flush();
  15367. }
  15368. //gpu.gl.flush();
  15369. // copy the (k+1)-th best match to the filter
  15370. if (matchesPerKeypoint > 1) encodedPartialMatches.copyTo(encodedFilters);
  15371. // aggregate matches
  15372. encodedMatches = keypoints.bfMatcherTransfer(encodedMatches, encodedPartialMatches, matchesPerKeypoint, k);
  15373. }
  15374. // done!
  15375. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15376. }
  15377. }
  15378. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
  15379. /*
  15380. * speedy-vision.js
  15381. * GPU-accelerated Computer Vision for JavaScript
  15382. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15383. *
  15384. * Licensed under the Apache License, Version 2.0 (the "License");
  15385. * you may not use this file except in compliance with the License.
  15386. * You may obtain a copy of the License at
  15387. *
  15388. * http://www.apache.org/licenses/LICENSE-2.0
  15389. *
  15390. * Unless required by applicable law or agreed to in writing, software
  15391. * distributed under the License is distributed on an "AS IS" BASIS,
  15392. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15393. * See the License for the specific language governing permissions and
  15394. * limitations under the License.
  15395. *
  15396. * distance-filter.js
  15397. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15398. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15399. */
  15400. /**
  15401. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15402. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15403. *
  15404. * The pairs of keypoints are provided as two separate sets, "in" and
  15405. * "reference". Keypoints that are kept will have their data extracted
  15406. * from the "in" set.
  15407. */
  15408. class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode {
  15409. /**
  15410. * Constructor
  15411. * @param {string} [name] name of the node
  15412. */
  15413. constructor(name = undefined) {
  15414. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15415. /** @type {number} maximum accepted distance */
  15416. this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
  15417. }
  15418. /**
  15419. * Maximum accepted distance
  15420. * @returns {number}
  15421. */
  15422. get threshold() {
  15423. return this._threshold;
  15424. }
  15425. /**
  15426. * Maximum accepted distance
  15427. * @param {number} value
  15428. */
  15429. set threshold(value) {
  15430. this._threshold = Math.max(0, +value);
  15431. }
  15432. /**
  15433. * Run the specific task of this node
  15434. * @param {SpeedyGPU} gpu
  15435. * @returns {void|SpeedyPromise<void>}
  15436. */
  15437. _run(gpu) {
  15438. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15439. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15440. const threshold = this._threshold;
  15441. // validate shapes
  15442. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
  15443. // calculate the shape of the output
  15444. const outputTexture = this._tex[0];
  15445. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15446. const descriptorSize = set0.descriptorSize;
  15447. const extraSize = set0.extraSize;
  15448. // apply the distance filter
  15449. gpu.programs.keypoints.distanceFilter.outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15450. // done!
  15451. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15452. }
  15453. }
  15454. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
  15455. /*
  15456. * speedy-vision.js
  15457. * GPU-accelerated Computer Vision for JavaScript
  15458. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15459. *
  15460. * Licensed under the Apache License, Version 2.0 (the "License");
  15461. * you may not use this file except in compliance with the License.
  15462. * You may obtain a copy of the License at
  15463. *
  15464. * http://www.apache.org/licenses/LICENSE-2.0
  15465. *
  15466. * Unless required by applicable law or agreed to in writing, software
  15467. * distributed under the License is distributed on an "AS IS" BASIS,
  15468. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15469. * See the License for the specific language governing permissions and
  15470. * limitations under the License.
  15471. *
  15472. * hamming-distance-filter.js
  15473. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15474. * distance (of descriptor) is above a user-defined threshold
  15475. */
  15476. /** @type {Object<number,string>} Program names */
  15477. const hamming_distance_filter_PROGRAM_NAME = {
  15478. 32: 'hammingDistanceFilter32',
  15479. 64: 'hammingDistanceFilter64'
  15480. };
  15481. /**
  15482. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15483. * distance (of descriptor) is above a user-defined threshold
  15484. *
  15485. * The pairs of keypoints are provided as two separate sets, "in" and
  15486. * "reference". Keypoints that are kept will have their data extracted
  15487. * from the "in" set.
  15488. */
  15489. class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode {
  15490. /**
  15491. * Constructor
  15492. * @param {string} [name] name of the node
  15493. */
  15494. constructor(name = undefined) {
  15495. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15496. /** @type {number} distance threshold, an integer */
  15497. this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
  15498. }
  15499. /**
  15500. * Distance threshold, an integer
  15501. * @returns {number}
  15502. */
  15503. get threshold() {
  15504. return this._threshold;
  15505. }
  15506. /**
  15507. * Distance threshold, an integer
  15508. * @param {number} value
  15509. */
  15510. set threshold(value) {
  15511. this._threshold = Math.max(0, value | 0);
  15512. }
  15513. /**
  15514. * Run the specific task of this node
  15515. * @param {SpeedyGPU} gpu
  15516. * @returns {void|SpeedyPromise<void>}
  15517. */
  15518. _run(gpu) {
  15519. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15520. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15521. const threshold = this._threshold;
  15522. // validate shapes
  15523. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
  15524. // validate descriptor size
  15525. if (!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
  15526. // calculate the shape of the output
  15527. const outputTexture = this._tex[0];
  15528. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15529. const descriptorSize = set0.descriptorSize;
  15530. const extraSize = set0.extraSize;
  15531. // apply the distance filter
  15532. const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
  15533. gpu.programs.keypoints[program].outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15534. // done!
  15535. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15536. }
  15537. }
  15538. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
  15539. /*
  15540. * speedy-vision.js
  15541. * GPU-accelerated Computer Vision for JavaScript
  15542. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15543. *
  15544. * Licensed under the Apache License, Version 2.0 (the "License");
  15545. * you may not use this file except in compliance with the License.
  15546. * You may obtain a copy of the License at
  15547. *
  15548. * http://www.apache.org/licenses/LICENSE-2.0
  15549. *
  15550. * Unless required by applicable law or agreed to in writing, software
  15551. * distributed under the License is distributed on an "AS IS" BASIS,
  15552. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15553. * See the License for the specific language governing permissions and
  15554. * limitations under the License.
  15555. *
  15556. * portal.js
  15557. * Keypoint Portals
  15558. */
  15559. /**
  15560. * A sink of a Keypoint Portal
  15561. * This is not a pipeline sink - it doesn't export any data!
  15562. */
  15563. class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode {
  15564. /**
  15565. * Constructor
  15566. * @param {string} [name] name of the node
  15567. */
  15568. constructor(name = undefined) {
  15569. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15570. /** @type {number} descriptor size, in bytes */
  15571. this._descriptorSize = 0;
  15572. /** @type {number} extra size, in bytes */
  15573. this._extraSize = 0;
  15574. /** @type {number} extra size */
  15575. this._encoderLength = 0;
  15576. /** @type {boolean} is this node initialized? */
  15577. this._initialized = false;
  15578. }
  15579. /**
  15580. * Encoded keypoints
  15581. * @returns {SpeedyTexture}
  15582. */
  15583. get encodedKeypoints() {
  15584. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15585. return this._tex[0];
  15586. }
  15587. /**
  15588. * Descriptor size, in bytes
  15589. * @returns {number}
  15590. */
  15591. get descriptorSize() {
  15592. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15593. return this._descriptorSize;
  15594. }
  15595. /**
  15596. * Extra size, in bytes
  15597. * @returns {number}
  15598. */
  15599. get extraSize() {
  15600. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15601. return this._extraSize;
  15602. }
  15603. /**
  15604. * Encoder length
  15605. * @returns {number}
  15606. */
  15607. get encoderLength() {
  15608. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15609. return this._encoderLength;
  15610. }
  15611. /**
  15612. * Initializes this node
  15613. * @param {SpeedyGPU} gpu
  15614. */
  15615. init(gpu) {
  15616. super.init(gpu);
  15617. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
  15618. this._tex[0].resize(encoderLength, encoderLength).clearToColor(1, 1, 1, 1); // initial texture
  15619. this._descriptorSize = this._extraSize = 0;
  15620. this._encoderLength = encoderLength;
  15621. this._initialized = true;
  15622. }
  15623. /**
  15624. * Releases this node
  15625. * @param {SpeedyGPU} gpu
  15626. */
  15627. release(gpu) {
  15628. this._initialized = false;
  15629. super.release(gpu);
  15630. }
  15631. /**
  15632. * Run the specific task of this node
  15633. * @param {SpeedyGPU} gpu
  15634. * @returns {void|SpeedyPromise<void>}
  15635. */
  15636. _run(gpu) {
  15637. const {
  15638. encodedKeypoints,
  15639. descriptorSize,
  15640. extraSize,
  15641. encoderLength
  15642. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  15643. const tex = this._tex[0];
  15644. // copy input
  15645. tex.resize(encodedKeypoints.width, encodedKeypoints.height);
  15646. encodedKeypoints.copyTo(tex);
  15647. this._descriptorSize = descriptorSize;
  15648. this._extraSize = extraSize;
  15649. this._encoderLength = encoderLength;
  15650. }
  15651. }
  15652. /**
  15653. * A source of a Keypoint Portal
  15654. */
  15655. class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode {
  15656. /**
  15657. * Constructor
  15658. * @param {string} [name] name of the node
  15659. */
  15660. constructor(name = undefined) {
  15661. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15662. /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
  15663. this._source = null;
  15664. }
  15665. /**
  15666. * Data source
  15667. * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
  15668. */
  15669. get source() {
  15670. return this._source;
  15671. }
  15672. /**
  15673. * Data source
  15674. * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
  15675. */
  15676. set source(node) {
  15677. if (node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  15678. this._source = node;
  15679. }
  15680. /**
  15681. * Run the specific task of this node
  15682. * @param {SpeedyGPU} gpu
  15683. * @returns {void|SpeedyPromise<void>}
  15684. */
  15685. _run(gpu) {
  15686. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  15687. this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
  15688. }
  15689. }
  15690. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
  15691. /*
  15692. * speedy-vision.js
  15693. * GPU-accelerated Computer Vision for JavaScript
  15694. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15695. *
  15696. * Licensed under the Apache License, Version 2.0 (the "License");
  15697. * you may not use this file except in compliance with the License.
  15698. * You may obtain a copy of the License at
  15699. *
  15700. * http://www.apache.org/licenses/LICENSE-2.0
  15701. *
  15702. * Unless required by applicable law or agreed to in writing, software
  15703. * distributed under the License is distributed on an "AS IS" BASIS,
  15704. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15705. * See the License for the specific language governing permissions and
  15706. * limitations under the License.
  15707. *
  15708. * keypoint-factory.js
  15709. * Keypoint-related nodes
  15710. */
  15711. /**
  15712. * Keypoint detectors
  15713. */
  15714. class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15715. /**
  15716. * FAST corner detector
  15717. * @param {string} [name]
  15718. * @returns {SpeedyPipelineNodeFASTKeypointDetector}
  15719. */
  15720. static FAST(name = undefined) {
  15721. return new SpeedyPipelineNodeFASTKeypointDetector(name);
  15722. }
  15723. /**
  15724. * Harris corner detector
  15725. * @param {string} [name]
  15726. * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
  15727. */
  15728. static Harris(name = undefined) {
  15729. return new SpeedyPipelineNodeHarrisKeypointDetector(name);
  15730. }
  15731. }
  15732. /**
  15733. * Keypoint descriptors
  15734. */
  15735. class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15736. /**
  15737. * ORB descriptors
  15738. * @param {string} [name]
  15739. * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
  15740. */
  15741. static ORB(name = undefined) {
  15742. return new SpeedyPipelineNodeORBKeypointDescriptor(name);
  15743. }
  15744. }
  15745. /**
  15746. * Keypoint trackers
  15747. */
  15748. class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15749. /**
  15750. * LK optical-flow
  15751. * @param {string} [name]
  15752. * @returns {SpeedyPipelineNodeLKKeypointTracker}
  15753. */
  15754. static LK(name = undefined) {
  15755. return new SpeedyPipelineNodeLKKeypointTracker(name);
  15756. }
  15757. }
  15758. /**
  15759. * Keypoint matchers
  15760. */
  15761. class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15762. /**
  15763. * Static LSH tables
  15764. * @param {string} [name]
  15765. * @returns {SpeedyPipelineNodeStaticLSHTables}
  15766. */
  15767. static StaticLSHTables(name = undefined) {
  15768. return new SpeedyPipelineNodeStaticLSHTables(name);
  15769. }
  15770. /**
  15771. * LSH-based K-approximate nearest neighbors
  15772. * @param {string} [name]
  15773. * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
  15774. */
  15775. static LSHKNN(name = undefined) {
  15776. return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
  15777. }
  15778. /**
  15779. * Brute-force K-nearest neighbors keypoint matcher
  15780. * @param {string} [name]
  15781. * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
  15782. */
  15783. static BFKNN(name = undefined) {
  15784. return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
  15785. }
  15786. }
  15787. /**
  15788. * Portal nodes
  15789. */
  15790. class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15791. /**
  15792. * Create an image portal source
  15793. * @param {string} [name] name of the node
  15794. * @returns {SpeedyPipelineNodeKeypointPortalSource}
  15795. */
  15796. static Source(name = undefined) {
  15797. return new SpeedyPipelineNodeKeypointPortalSource(name);
  15798. }
  15799. /**
  15800. * Create an image portal sink
  15801. * @param {string} [name] name of the node
  15802. * @returns {SpeedyPipelineNodeKeypointPortalSink}
  15803. */
  15804. static Sink(name = undefined) {
  15805. return new SpeedyPipelineNodeKeypointPortalSink(name);
  15806. }
  15807. }
  15808. /**
  15809. * Keypoint-related nodes
  15810. */
  15811. class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15812. /**
  15813. * Keypoint detectors
  15814. * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
  15815. */
  15816. static get Detector() {
  15817. return SpeedyPipelineKeypointDetectorFactory;
  15818. }
  15819. /**
  15820. * Keypoint descriptors
  15821. * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
  15822. */
  15823. static get Descriptor() {
  15824. return SpeedyPipelineKeypointDescriptorFactory;
  15825. }
  15826. /**
  15827. * Keypoint trackers
  15828. * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
  15829. */
  15830. static get Tracker() {
  15831. return SpeedyPipelineKeypointTrackerFactory;
  15832. }
  15833. /**
  15834. * Keypoint matchers
  15835. * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
  15836. */
  15837. static get Matcher() {
  15838. return SpeedyPipelineKeypointMatcherFactory;
  15839. }
  15840. /**
  15841. * Keypoint Portals
  15842. * @returns {typeof SpeedyPipelineKeypointPortalFactory}
  15843. */
  15844. static get Portal() {
  15845. return SpeedyPipelineKeypointPortalFactory;
  15846. }
  15847. /**
  15848. * Create a keypoint source
  15849. * @param {string} [name]
  15850. * @returns {SpeedyPipelineNodeKeypointSource}
  15851. */
  15852. static Source(name = undefined) {
  15853. return new SpeedyPipelineNodeKeypointSource(name);
  15854. }
  15855. /**
  15856. * Create a keypoint sink
  15857. * @param {string} [name]
  15858. * @returns {SpeedyPipelineNodeKeypointSink}
  15859. */
  15860. static Sink(name = undefined) {
  15861. return new SpeedyPipelineNodeKeypointSink(name);
  15862. }
  15863. /**
  15864. * Create a sink of tracked keypoints
  15865. * @param {string} [name]
  15866. * @returns {SpeedyPipelineNodeTrackedKeypointSink}
  15867. */
  15868. static SinkOfTrackedKeypoints(name = undefined) {
  15869. return new SpeedyPipelineNodeTrackedKeypointSink(name);
  15870. }
  15871. /**
  15872. * Create a sink of matched keypoints
  15873. * @param {string} [name]
  15874. * @returns {SpeedyPipelineNodeMatchedKeypointSink}
  15875. */
  15876. static SinkOfMatchedKeypoints(name = undefined) {
  15877. return new SpeedyPipelineNodeMatchedKeypointSink(name);
  15878. }
  15879. /**
  15880. * Keypoint clipper
  15881. * @param {string} [name]
  15882. * @returns {SpeedyPipelineNodeKeypointClipper}
  15883. */
  15884. static Clipper(name = undefined) {
  15885. return new SpeedyPipelineNodeKeypointClipper(name);
  15886. }
  15887. /**
  15888. * Border Clipper
  15889. * @param {string} [name]
  15890. * @returns {SpeedyPipelineNodeKeypointBorderClipper}
  15891. */
  15892. static BorderClipper(name = undefined) {
  15893. return new SpeedyPipelineNodeKeypointBorderClipper(name);
  15894. }
  15895. /**
  15896. * Create a keypoint buffer
  15897. * @param {string} [name]
  15898. * @returns {SpeedyPipelineNodeKeypointBuffer}
  15899. */
  15900. static Buffer(name = undefined) {
  15901. return new SpeedyPipelineNodeKeypointBuffer(name);
  15902. }
  15903. /**
  15904. * Create a keypoint mixer
  15905. * @param {string} [name]
  15906. * @returns {SpeedyPipelineNodeKeypointMixer}
  15907. */
  15908. static Mixer(name = undefined) {
  15909. return new SpeedyPipelineNodeKeypointMixer(name);
  15910. }
  15911. /**
  15912. * Create a keypoint shuffler
  15913. * @param {string} [name]
  15914. * @returns {SpeedyPipelineNodeKeypointShuffler}
  15915. */
  15916. static Shuffler(name = undefined) {
  15917. return new SpeedyPipelineNodeKeypointShuffler(name);
  15918. }
  15919. /**
  15920. * Create a keypoint multiplexer
  15921. * @param {string} [name]
  15922. * @returns {SpeedyPipelineNodeKeypointMultiplexer}
  15923. */
  15924. static Multiplexer(name = undefined) {
  15925. return new SpeedyPipelineNodeKeypointMultiplexer(name);
  15926. }
  15927. /**
  15928. * Create a keypoint transformer
  15929. * @param {string} [name]
  15930. * @returns {SpeedyPipelineNodeKeypointTransformer}
  15931. */
  15932. static Transformer(name = undefined) {
  15933. return new SpeedyPipelineNodeKeypointTransformer(name);
  15934. }
  15935. /**
  15936. * Create a subpixel refiner of keypoint locations
  15937. * @param {string} [name]
  15938. * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
  15939. */
  15940. static SubpixelRefiner(name = undefined) {
  15941. return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
  15942. }
  15943. /**
  15944. * Distance filter
  15945. * @param {string} [name]
  15946. * @returns {SpeedyPipelineNodeDistanceFilter}
  15947. */
  15948. static DistanceFilter(name = undefined) {
  15949. return new SpeedyPipelineNodeKeypointDistanceFilter(name);
  15950. }
  15951. /**
  15952. * Hamming distance filter
  15953. * @param {string} [name]
  15954. * @returns {SpeedyPipelineNodeHammingDistanceFilter}
  15955. */
  15956. static HammingDistanceFilter(name = undefined) {
  15957. return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
  15958. }
  15959. }
  15960. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
  15961. /*
  15962. * speedy-vision.js
  15963. * GPU-accelerated Computer Vision for JavaScript
  15964. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15965. *
  15966. * Licensed under the Apache License, Version 2.0 (the "License");
  15967. * you may not use this file except in compliance with the License.
  15968. * You may obtain a copy of the License at
  15969. *
  15970. * http://www.apache.org/licenses/LICENSE-2.0
  15971. *
  15972. * Unless required by applicable law or agreed to in writing, software
  15973. * distributed under the License is distributed on an "AS IS" BASIS,
  15974. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15975. * See the License for the specific language governing permissions and
  15976. * limitations under the License.
  15977. *
  15978. * sink.js
  15979. * Gets keypoints out of the pipeline
  15980. */
  15981. // next power of 2
  15982. const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  15983. /**
  15984. * Gets 2D vectors out of the pipeline
  15985. */
  15986. class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode {
  15987. /**
  15988. * Constructor
  15989. * @param {string} [name] name of the node
  15990. */
  15991. constructor(name = 'vec2') {
  15992. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Vector2)]);
  15993. /** @type {SpeedyVector2[]} 2D vectors (output) */
  15994. this._vectors = [];
  15995. /** @type {SpeedyTextureReader} texture reader */
  15996. this._textureReader = new SpeedyTextureReader();
  15997. /** @type {number} page flipping index */
  15998. this._page = 0;
  15999. /** @type {boolean} accelerate GPU-CPU transfers */
  16000. this._turbo = false;
  16001. }
  16002. /**
  16003. * Accelerate GPU-CPU transfers
  16004. * @returns {boolean}
  16005. */
  16006. get turbo() {
  16007. return this._turbo;
  16008. }
  16009. /**
  16010. * Accelerate GPU-CPU transfers
  16011. * @param {boolean} value
  16012. */
  16013. set turbo(value) {
  16014. this._turbo = Boolean(value);
  16015. }
  16016. /**
  16017. * Initializes this node
  16018. * @param {SpeedyGPU} gpu
  16019. */
  16020. init(gpu) {
  16021. super.init(gpu);
  16022. this._textureReader.init(gpu);
  16023. }
  16024. /**
  16025. * Releases this node
  16026. * @param {SpeedyGPU} gpu
  16027. */
  16028. release(gpu) {
  16029. this._textureReader.release(gpu);
  16030. super.release(gpu);
  16031. }
  16032. /**
  16033. * Export data from this node to the user
  16034. * @returns {SpeedyPromise<SpeedyVector2[]>}
  16035. */
  16036. export() {
  16037. return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
  16038. }
  16039. /**
  16040. * Run the specific task of this node
  16041. * @param {SpeedyGPU} gpu
  16042. * @returns {void|SpeedyPromise<void>}
  16043. */
  16044. _run(gpu) {
  16045. const {
  16046. vectors
  16047. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input().read();
  16048. const useBufferedDownloads = this._turbo;
  16049. const encoderLength = vectors.width;
  16050. /*
  16051. I have found experimentally that, in Firefox, readPixelsAsync()
  16052. performs MUCH better if the width of the target texture is a power
  16053. of two. I have no idea why this is the case, nor if it's related to
  16054. some interaction with the GL drivers, somehow. This seems to make no
  16055. difference on Chrome, however. In any case, let's convert the input
  16056. texture to POT.
  16057. */
  16058. const encoderWidth = vector2_sink_nextPot(encoderLength);
  16059. const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  16060. //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
  16061. // copy the set of vectors to an internal texture
  16062. const copiedTexture = this._tex[this._page];
  16063. gpu.programs.utils.copy2DVectors.outputs(encoderWidth, encoderHeight, copiedTexture)(vectors);
  16064. // flip page
  16065. this._page = 1 - this._page;
  16066. // download the internal texture
  16067. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  16068. this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
  16069. });
  16070. }
  16071. /**
  16072. * Decode a sequence of vectors, given a flattened image of encoded pixels
  16073. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  16074. * @param {number} encoderWidth
  16075. * @param {number} encoderHeight
  16076. * @returns {SpeedyVector2[]} vectors
  16077. */
  16078. static _decode(pixels, encoderWidth, encoderHeight) {
  16079. const bytesPerVector = 4; // 1 pixel per vector
  16080. const vectors = [];
  16081. let hi = 0,
  16082. lo = 0;
  16083. let x = 0,
  16084. y = 0;
  16085. // how many bytes should we read?
  16086. const e2 = encoderWidth * encoderHeight * bytesPerVector;
  16087. const size = Math.min(pixels.length, e2);
  16088. // for each encoded vector
  16089. for (let i = 0; i < size; i += bytesPerVector) {
  16090. // extract 16-bit words
  16091. lo = pixels[i + 1] << 8 | pixels[i];
  16092. hi = pixels[i + 3] << 8 | pixels[i + 2];
  16093. // the vector is "null": we have reached the end of the list
  16094. if (lo == 0xFFFF && hi == 0xFFFF) break;
  16095. // the vector must be discarded
  16096. if (lo == 0xFF00 && hi == 0xFF00) continue;
  16097. // decode floats
  16098. x = utils/* Utils */.A.decodeFloat16(lo);
  16099. y = utils/* Utils */.A.decodeFloat16(hi);
  16100. // register vector
  16101. vectors.push(new SpeedyVector2(x, y));
  16102. }
  16103. // done!
  16104. return vectors;
  16105. }
  16106. }
  16107. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
  16108. /*
  16109. * speedy-vision.js
  16110. * GPU-accelerated Computer Vision for JavaScript
  16111. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16112. *
  16113. * Licensed under the Apache License, Version 2.0 (the "License");
  16114. * you may not use this file except in compliance with the License.
  16115. * You may obtain a copy of the License at
  16116. *
  16117. * http://www.apache.org/licenses/LICENSE-2.0
  16118. *
  16119. * Unless required by applicable law or agreed to in writing, software
  16120. * distributed under the License is distributed on an "AS IS" BASIS,
  16121. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16122. * See the License for the specific language governing permissions and
  16123. * limitations under the License.
  16124. *
  16125. * vector2-factory.js
  16126. * 2D vectors
  16127. */
  16128. /**
  16129. * 2D vectors
  16130. */
  16131. class SpeedyPipelineVector2Factory extends Function {
  16132. /**
  16133. * Constructor
  16134. */
  16135. constructor() {
  16136. // This factory can be invoked as a function
  16137. super('...args', 'return this._create(...args)');
  16138. return this.bind(this);
  16139. }
  16140. /**
  16141. * @private
  16142. *
  16143. * Create a 2D vector
  16144. * @param {number} x x-coordinate
  16145. * @param {number} y y-coordinate
  16146. * @returns {SpeedyVector2}
  16147. */
  16148. _create(x, y) {
  16149. return new SpeedyVector2(x, y);
  16150. }
  16151. /**
  16152. * Create a Vector2 sink
  16153. * @param {string} [name]
  16154. * @returns {SpeedyPipelineNodeVector2Sink}
  16155. */
  16156. Sink(name = undefined) {
  16157. return new SpeedyPipelineNodeVector2Sink(name);
  16158. }
  16159. }
  16160. ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
  16161. /*
  16162. * speedy-vision.js
  16163. * GPU-accelerated Computer Vision for JavaScript
  16164. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16165. *
  16166. * Licensed under the Apache License, Version 2.0 (the "License");
  16167. * you may not use this file except in compliance with the License.
  16168. * You may obtain a copy of the License at
  16169. *
  16170. * http://www.apache.org/licenses/LICENSE-2.0
  16171. *
  16172. * Unless required by applicable law or agreed to in writing, software
  16173. * distributed under the License is distributed on an "AS IS" BASIS,
  16174. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16175. * See the License for the specific language governing permissions and
  16176. * limitations under the License.
  16177. *
  16178. * fps-counter.js
  16179. * A FPS counter
  16180. */
  16181. /** @const {number} update interval in milliseconds */
  16182. const UPDATE_INTERVAL = 500;
  16183. /** @type {FPSCounter|null} Singleton */
  16184. let instance = null;
  16185. /**
  16186. * FPS counter
  16187. */
  16188. class FPSCounter {
  16189. /**
  16190. * Creates a new FPSCounter
  16191. * @private
  16192. */
  16193. constructor() {
  16194. /** @type {number} current FPS rate */
  16195. this._fps = 60;
  16196. /** @type {number} frame counter */
  16197. this._frames = 0;
  16198. /** @type {number} update interval in milliseconds */
  16199. this._updateInterval = UPDATE_INTERVAL;
  16200. /** @type {number} time of the last update */
  16201. this._lastUpdate = performance.now();
  16202. /** @type {function(): void} bound update function */
  16203. this._boundUpdate = this._update.bind(this);
  16204. // this should never happen...
  16205. if (instance !== null) throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
  16206. // start FPS counter
  16207. this._boundUpdate();
  16208. }
  16209. /**
  16210. * Gets an instance of the FPS counter.
  16211. * We use lazy loading, i.e., we will not
  16212. * create a FPS counter unless we need to!
  16213. * @returns {FPSCounter}
  16214. */
  16215. static get instance() {
  16216. if (instance === null) instance = new FPSCounter();
  16217. return instance;
  16218. }
  16219. /**
  16220. * Get the FPS rate
  16221. * @returns {number} frames per second
  16222. */
  16223. get fps() {
  16224. return this._fps;
  16225. }
  16226. /**
  16227. * Updates the FPS counter
  16228. */
  16229. _update() {
  16230. const now = performance.now();
  16231. const deltaTime = now - this._lastUpdate;
  16232. if (deltaTime >= this._updateInterval) {
  16233. this._fps = Math.round(this._frames / (deltaTime * 0.001));
  16234. this._frames = 0;
  16235. this._lastUpdate = now;
  16236. }
  16237. this._frames++;
  16238. requestAnimationFrame(this._boundUpdate);
  16239. }
  16240. }
  16241. ;// CONCATENATED MODULE: ./src/main.js
  16242. /*
  16243. * speedy-vision.js
  16244. * GPU-accelerated Computer Vision for JavaScript
  16245. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16246. *
  16247. * Licensed under the Apache License, Version 2.0 (the "License");
  16248. * you may not use this file except in compliance with the License.
  16249. * You may obtain a copy of the License at
  16250. *
  16251. * http://www.apache.org/licenses/LICENSE-2.0
  16252. *
  16253. * Unless required by applicable law or agreed to in writing, software
  16254. * distributed under the License is distributed on an "AS IS" BASIS,
  16255. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16256. * See the License for the specific language governing permissions and
  16257. * limitations under the License.
  16258. *
  16259. * main.js
  16260. * The entry point of the library
  16261. */
  16262. /* eslint-disable no-undef */
  16263. /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  16264. /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
  16265. /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
  16266. /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  16267. // Constants
  16268. /** @type {SpeedyMatrixFactory} */
  16269. const matrixFactory = new SpeedyMatrixFactory();
  16270. /** @type {SpeedyPipelineVector2Factory} */
  16271. const vector2Factory = new SpeedyPipelineVector2Factory();
  16272. /**
  16273. * GPU-accelerated Computer Vision for JavaScript
  16274. */
  16275. class Speedy {
  16276. /**
  16277. * The version of the library
  16278. * @returns {string}
  16279. */
  16280. static get version() {
  16281. if (false) {}else return "0.9.1";
  16282. }
  16283. /**
  16284. * Checks if Speedy can be executed in this machine & browser
  16285. * @returns {boolean}
  16286. */
  16287. static isSupported() {
  16288. return typeof WebAssembly !== 'undefined' && typeof WebGL2RenderingContext !== 'undefined' && speedy_gl/* SpeedyGL */.c.instance.gl != null;
  16289. }
  16290. /**
  16291. * Global settings
  16292. * @returns {typeof Settings}
  16293. */
  16294. static get Settings() {
  16295. return settings/* Settings */.w;
  16296. }
  16297. /**
  16298. * Create a 2D vector
  16299. * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
  16300. */
  16301. static get Vector2() {
  16302. return vector2Factory;
  16303. }
  16304. /**
  16305. * Create a 2D point
  16306. * @param {number} x
  16307. * @param {number} y
  16308. * @returns {SpeedyPoint2}
  16309. */
  16310. static Point2(x, y) {
  16311. return new SpeedyPoint2(x, y);
  16312. }
  16313. /**
  16314. * Create a new size object
  16315. * @param {number} width
  16316. * @param {number} height
  16317. * @returns {SpeedySize}
  16318. */
  16319. static Size(width, height) {
  16320. return new SpeedySize(width, height);
  16321. }
  16322. /**
  16323. * Create a Matrix (entries are given in column-major format)
  16324. * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
  16325. */
  16326. static get Matrix() {
  16327. return matrixFactory;
  16328. }
  16329. /**
  16330. * Speedy Promises
  16331. * @returns {typeof SpeedyPromise}
  16332. */
  16333. static get Promise() {
  16334. return speedy_promise/* SpeedyPromise */.i;
  16335. }
  16336. /**
  16337. * Create a new Pipeline
  16338. * @returns {SpeedyPipeline}
  16339. */
  16340. static Pipeline() {
  16341. return new SpeedyPipeline();
  16342. }
  16343. /**
  16344. * Image-related nodes
  16345. * @returns {typeof SpeedyPipelineImageFactory}
  16346. */
  16347. static get Image() {
  16348. return SpeedyPipelineImageFactory;
  16349. }
  16350. /**
  16351. * Image filters
  16352. * @returns {typeof SpeedyPipelineFilterFactory}
  16353. */
  16354. static get Filter() {
  16355. return SpeedyPipelineFilterFactory;
  16356. }
  16357. /**
  16358. * Image transforms
  16359. * @returns {typeof SpeedyPipelineTransformFactory}
  16360. */
  16361. static get Transform() {
  16362. return SpeedyPipelineTransformFactory;
  16363. }
  16364. /**
  16365. * Keypoint-related nodes
  16366. * @returns {typeof SpeedyPipelineKeypointFactory}
  16367. */
  16368. static get Keypoint() {
  16369. return SpeedyPipelineKeypointFactory;
  16370. }
  16371. /**
  16372. * Loads a SpeedyMedia object based on the provided source element
  16373. * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
  16374. * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
  16375. * @returns {SpeedyPromise<SpeedyMedia>}
  16376. */
  16377. static load(sourceElement, options = {}) {
  16378. return SpeedyMedia.load(sourceElement, options);
  16379. }
  16380. /**
  16381. * Loads a camera stream
  16382. * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
  16383. * @param {number} [height] height of the stream
  16384. * @returns {SpeedyPromise<SpeedyMedia>}
  16385. */
  16386. static camera(widthOrConstraints = 640, height = 360) {
  16387. const constraints = typeof widthOrConstraints === 'object' ? widthOrConstraints : {
  16388. audio: false,
  16389. video: {
  16390. width: widthOrConstraints | 0,
  16391. height: height | 0
  16392. }
  16393. };
  16394. return utils/* Utils */.A.requestCameraStream(constraints).then(video => SpeedyMedia.load(video));
  16395. }
  16396. /**
  16397. * Utilities to query information about the graphics driver
  16398. * @returns {typeof SpeedyPlatform}
  16399. */
  16400. static get Platform() {
  16401. return SpeedyPlatform;
  16402. }
  16403. /**
  16404. * The FPS rate
  16405. * @returns {number} Frames per second (FPS)
  16406. */
  16407. static get fps() {
  16408. return FPSCounter.instance.fps;
  16409. }
  16410. }
  16411. // Freeze the namespace
  16412. Object.freeze(Speedy);
  16413. // Display a notice
  16414. utils/* Utils */.A.log(`Speedy Vision version ${Speedy.version}. ` + `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` + "https://github.com/alemart/speedy-vision");
  16415. // Big-endian machine? Currently untested.
  16416. if (!globals.LITTLE_ENDIAN) utils/* Utils */.A.warning('Running on a big-endian machine');
  16417. })();
  16418. __nested_webpack_exports__ = __nested_webpack_exports__["default"];
  16419. /******/ return __nested_webpack_exports__;
  16420. /******/ })()
  16421. ;
  16422. });
  16423. /***/ })
  16424. /******/ });
  16425. /************************************************************************/
  16426. /******/ // The module cache
  16427. /******/ var __webpack_module_cache__ = {};
  16428. /******/
  16429. /******/ // The require function
  16430. /******/ function __webpack_require__(moduleId) {
  16431. /******/ // Check if module is in cache
  16432. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  16433. /******/ if (cachedModule !== undefined) {
  16434. /******/ return cachedModule.exports;
  16435. /******/ }
  16436. /******/ // Create a new module (and put it into the cache)
  16437. /******/ var module = __webpack_module_cache__[moduleId] = {
  16438. /******/ // no module.id needed
  16439. /******/ // no module.loaded needed
  16440. /******/ exports: {}
  16441. /******/ };
  16442. /******/
  16443. /******/ // Execute the module function
  16444. /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
  16445. /******/
  16446. /******/ // Return the exports of the module
  16447. /******/ return module.exports;
  16448. /******/ }
  16449. /******/
  16450. /************************************************************************/
  16451. /******/ /* webpack/runtime/compat get default export */
  16452. /******/ (() => {
  16453. /******/ // getDefaultExport function for compatibility with non-harmony modules
  16454. /******/ __webpack_require__.n = (module) => {
  16455. /******/ var getter = module && module.__esModule ?
  16456. /******/ () => (module['default']) :
  16457. /******/ () => (module);
  16458. /******/ __webpack_require__.d(getter, { a: getter });
  16459. /******/ return getter;
  16460. /******/ };
  16461. /******/ })();
  16462. /******/
  16463. /******/ /* webpack/runtime/define property getters */
  16464. /******/ (() => {
  16465. /******/ // define getter functions for harmony exports
  16466. /******/ __webpack_require__.d = (exports, definition) => {
  16467. /******/ for(var key in definition) {
  16468. /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
  16469. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  16470. /******/ }
  16471. /******/ }
  16472. /******/ };
  16473. /******/ })();
  16474. /******/
  16475. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  16476. /******/ (() => {
  16477. /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  16478. /******/ })();
  16479. /******/
  16480. /************************************************************************/
  16481. var __webpack_exports__ = {};
  16482. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  16483. (() => {
  16484. "use strict";
  16485. // EXPORTS
  16486. __webpack_require__.d(__webpack_exports__, {
  16487. "default": () => (/* binding */ AR)
  16488. });
  16489. // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
  16490. var speedy_vision = __webpack_require__(774);
  16491. var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
  16492. ;// CONCATENATED MODULE: ./src/utils/errors.ts
  16493. /*
  16494. * encantar.js
  16495. * GPU-accelerated Augmented Reality for the web
  16496. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16497. *
  16498. * This program is free software: you can redistribute it and/or modify
  16499. * it under the terms of the GNU Lesser General Public License as published
  16500. * by the Free Software Foundation, either version 3 of the License, or
  16501. * (at your option) any later version.
  16502. *
  16503. * This program is distributed in the hope that it will be useful,
  16504. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16505. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16506. * GNU Lesser General Public License for more details.
  16507. *
  16508. * You should have received a copy of the GNU Lesser General Public License
  16509. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16510. *
  16511. * errors.ts
  16512. * Error classes
  16513. */
  16514. /**
  16515. * Base error class
  16516. */
  16517. class ARError extends Error {
  16518. /**
  16519. * Constructor
  16520. * @param message error message
  16521. * @param cause cause of the error
  16522. */
  16523. constructor(message = '', cause = null) {
  16524. super(message);
  16525. this.cause = cause;
  16526. }
  16527. /*{
  16528. // incorrect when minified
  16529. //return this.constructor.name;
  16530. }*/
  16531. /**
  16532. * Convert to string
  16533. */
  16534. toString() {
  16535. const extendedMessage = this.cause ? '\n-> ' + this.cause.toString() : '';
  16536. if (this.message != '')
  16537. return this.name + ': ' + this.message + extendedMessage;
  16538. else
  16539. return this.name + extendedMessage;
  16540. }
  16541. }
  16542. /**
  16543. * A method has received one or more illegal arguments
  16544. */
  16545. class IllegalArgumentError extends ARError {
  16546. get name() {
  16547. return 'IllegalArgumentError';
  16548. }
  16549. }
  16550. /**
  16551. * The method arguments are valid, but the method can't be called due to the
  16552. * current state of the object
  16553. */
  16554. class IllegalOperationError extends ARError {
  16555. get name() {
  16556. return 'IllegalOperationError';
  16557. }
  16558. }
  16559. /**
  16560. * The requested operation is not supported
  16561. */
  16562. class NotSupportedError extends ARError {
  16563. get name() {
  16564. return 'NotSupportedError';
  16565. }
  16566. }
  16567. /**
  16568. * Access denied
  16569. */
  16570. class AccessDeniedError extends ARError {
  16571. get name() {
  16572. return 'AccessDeniedError';
  16573. }
  16574. }
  16575. /**
  16576. * Timeout
  16577. */
  16578. class TimeoutError extends ARError {
  16579. get name() {
  16580. return 'TimeoutError';
  16581. }
  16582. }
  16583. /**
  16584. * Assertion error
  16585. */
  16586. class AssertionError extends ARError {
  16587. get name() {
  16588. return 'AssertionError';
  16589. }
  16590. }
  16591. /**
  16592. * Numerical error
  16593. */
  16594. class NumericalError extends ARError {
  16595. get name() {
  16596. return 'NumericalError';
  16597. }
  16598. }
  16599. /**
  16600. * Tracking error
  16601. */
  16602. class TrackingError extends ARError {
  16603. get name() {
  16604. return 'TrackingError';
  16605. }
  16606. }
  16607. /**
  16608. * Detection error
  16609. */
  16610. class DetectionError extends ARError {
  16611. get name() {
  16612. return 'DetectionError';
  16613. }
  16614. }
  16615. /**
  16616. * Training error
  16617. */
  16618. class TrainingError extends ARError {
  16619. get name() {
  16620. return 'TrainingError';
  16621. }
  16622. }
  16623. ;// CONCATENATED MODULE: ./src/utils/resolution.ts
  16624. /*
  16625. * encantar.js
  16626. * GPU-accelerated Augmented Reality for the web
  16627. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16628. *
  16629. * This program is free software: you can redistribute it and/or modify
  16630. * it under the terms of the GNU Lesser General Public License as published
  16631. * by the Free Software Foundation, either version 3 of the License, or
  16632. * (at your option) any later version.
  16633. *
  16634. * This program is distributed in the hope that it will be useful,
  16635. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16636. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16637. * GNU Lesser General Public License for more details.
  16638. *
  16639. * You should have received a copy of the GNU Lesser General Public License
  16640. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16641. *
  16642. * resolution.ts
  16643. * Resolution utilities
  16644. */
  16645. /** A regex that identifies custom resolutions */
  16646. const CUSTOM_RESOLUTION_REGEX = /^[1-9][0-9]?[0-9][02468]p$/;
  16647. /** Reference heights when in landscape mode, measured in pixels, for all aliases */
  16648. const ALIAS_TO_HEIGHT = {
  16649. 'xs': 120,
  16650. 'xs+': 144,
  16651. 'sm': 240,
  16652. 'sm+': 288,
  16653. 'md': 320,
  16654. 'md+': 360,
  16655. 'lg': 480,
  16656. 'lg+': 600,
  16657. 'xl': 720,
  16658. 'xl+': 900,
  16659. 'xxl': 1080,
  16660. };
  16661. /**
  16662. * Convert a resolution type to a (width, height) pair
  16663. * @param resolution resolution type
  16664. * @param aspectRatio desired width / height ratio
  16665. * @returns size in pixels
  16666. */
  16667. function computeResolution(resolution, aspectRatio) {
  16668. const referenceHeight = parseHeight(resolution);
  16669. let width = 0, height = 0;
  16670. if (Number.isNaN(referenceHeight))
  16671. throw new IllegalArgumentError('Invalid resolution: ' + resolution);
  16672. else if (aspectRatio <= 0)
  16673. throw new IllegalArgumentError('Invalid aspect ratio: ' + aspectRatio);
  16674. if (aspectRatio >= 1) {
  16675. // landscape
  16676. height = referenceHeight;
  16677. width = Math.floor(height * aspectRatio);
  16678. width += width % 2;
  16679. }
  16680. else {
  16681. // portrait
  16682. width = referenceHeight;
  16683. height = Math.floor(width / aspectRatio);
  16684. height += height % 2;
  16685. }
  16686. return speedy_vision_default().Size(width, height);
  16687. }
  16688. /**
  16689. * Get the height in pixels of a resolution
  16690. * @param resolution resolution type
  16691. * @returns the height in pixels, or NaN on error
  16692. */
  16693. function parseHeight(resolution) {
  16694. if (ALIAS_TO_HEIGHT.hasOwnProperty(resolution))
  16695. return ALIAS_TO_HEIGHT[resolution];
  16696. //if(CUSTOM_RESOLUTION_REGEX.test(resolution)) // really needed? is it fast?
  16697. if (resolution.endsWith('p')) {
  16698. const r = resolution[0];
  16699. if (r >= '1' && r <= '9')
  16700. return parseInt(resolution);
  16701. }
  16702. return Number.NaN;
  16703. }
  16704. ;// CONCATENATED MODULE: ./src/utils/utils.ts
  16705. /*
  16706. * encantar.js
  16707. * GPU-accelerated Augmented Reality for the web
  16708. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16709. *
  16710. * This program is free software: you can redistribute it and/or modify
  16711. * it under the terms of the GNU Lesser General Public License as published
  16712. * by the Free Software Foundation, either version 3 of the License, or
  16713. * (at your option) any later version.
  16714. *
  16715. * This program is distributed in the hope that it will be useful,
  16716. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16717. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16718. * GNU Lesser General Public License for more details.
  16719. *
  16720. * You should have received a copy of the GNU Lesser General Public License
  16721. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16722. *
  16723. * utils.ts
  16724. * Generic utilities
  16725. */
  16726. /**
  16727. * Generic utilities
  16728. */
  16729. class Utils {
  16730. /**
  16731. * Log a message
  16732. * @param message
  16733. * @param args optional additional messages
  16734. */
  16735. static log(message, ...args) {
  16736. console.log('[encantar-js]', message, ...args);
  16737. }
  16738. /**
  16739. * Display a warning
  16740. * @param message
  16741. * @param args optional additional messages
  16742. */
  16743. static warning(message, ...args) {
  16744. console.warn('[encantar-js]', message, ...args);
  16745. }
  16746. /**
  16747. * Display an error message
  16748. * @param message
  16749. * @param args optional additional messages
  16750. */
  16751. static error(message, ...args) {
  16752. console.error('[encantar-js]', message, ...args);
  16753. }
  16754. /**
  16755. * Assertion
  16756. * @param expr expression
  16757. * @param errorMessage optional error message
  16758. * @throws {AssertionError}
  16759. */
  16760. static assert(expr, errorMessage = '') {
  16761. if (!expr)
  16762. throw new AssertionError(errorMessage);
  16763. }
  16764. /**
  16765. * Returns a range [0, 1, ..., n-1]
  16766. * @param n non-negative integer
  16767. * @returns range from 0 to n-1, inclusive
  16768. */
  16769. static range(n) {
  16770. if ((n |= 0) < 0)
  16771. throw new IllegalArgumentError();
  16772. return Array.from({ length: n }, (_, i) => i);
  16773. }
  16774. /**
  16775. * Wait a few milliseconds
  16776. * @param milliseconds how long should we wait?
  16777. * @returns a promise that is resolved soon after the specified time
  16778. */
  16779. static wait(milliseconds) {
  16780. return new (speedy_vision_default()).Promise(resolve => {
  16781. setTimeout(resolve, milliseconds);
  16782. });
  16783. }
  16784. /**
  16785. * Run SpeedyPromises sequentially
  16786. * @param promises an array of SpeedyPromises
  16787. * @returns a promise that is resolved as soon as all input promises are
  16788. * resolved, or that is rejected as soon as an input promise is rejected
  16789. */
  16790. static runInSequence(promises) {
  16791. return promises.reduce((prev, curr) => prev.then(() => curr), speedy_vision_default().Promise.resolve());
  16792. }
  16793. /**
  16794. * Convert a resolution type to a resolution measured in pixels
  16795. * @param resolution resolution type
  16796. * @param aspectRatio width / height ratio
  16797. * @returns resolution measured in pixels
  16798. */
  16799. static resolution(resolution, aspectRatio) {
  16800. return computeResolution(resolution, aspectRatio);
  16801. }
  16802. /**
  16803. * Returns a string containing platform brand information
  16804. * @returns platform brand information
  16805. */
  16806. static platformString() {
  16807. return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
  16808. navigator.userAgentData.platform : // use only low entropy data
  16809. navigator.platform // navigator.platform is deprecated
  16810. )(navigator);
  16811. }
  16812. /**
  16813. * Checks if we're on iOS
  16814. * @returns true if we're on iOS
  16815. */
  16816. static isIOS() {
  16817. // at the time of this writing, navigator.userAgentData is not yet
  16818. // compatible with Safari. navigator.platform is deprecated, but
  16819. // predictable.
  16820. if (/(iOS|iPhone|iPad|iPod)/i.test(navigator.platform))
  16821. return true;
  16822. if (/Mac/i.test(navigator.platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
  16823. return navigator.maxTouchPoints > 2;
  16824. return false;
  16825. }
  16826. /**
  16827. * Checks if we're on a WebKit-based browser
  16828. * @returns true if we're on a WebKit-based browser
  16829. */
  16830. static isWebKit() {
  16831. // note: navigator.vendor is deprecated
  16832. if (/Apple/.test(navigator.vendor))
  16833. return true;
  16834. // Can a non WebKit-based browser pass this test?
  16835. // Test masked GL_RENDERER == "Apple GPU" (valid since Feb 2020)
  16836. // https://bugs.webkit.org/show_bug.cgi?id=207608
  16837. /*
  16838. if(Speedy.Platform.renderer == 'Apple GPU' && Speedy.Platform.vendor == 'Apple Inc.')
  16839. return true;
  16840. */
  16841. // Desktop and Mobile Safari, Epiphany on Linux
  16842. if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
  16843. return true;
  16844. // Chrome, Firefox, Edge on iOS
  16845. if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
  16846. return true;
  16847. // not WebKit
  16848. return false;
  16849. }
  16850. /**
  16851. * Device-specific information for debugging purposes
  16852. */
  16853. static deviceInfo() {
  16854. return 'Device info: ' + JSON.stringify({
  16855. isIOS: Utils.isIOS(),
  16856. isWebKit: Utils.isWebKit(),
  16857. renderer: (speedy_vision_default()).Platform.renderer,
  16858. vendor: (speedy_vision_default()).Platform.vendor,
  16859. screen: [screen.width, screen.height].join('x'),
  16860. platform: [navigator.platform, navigator.vendor].join('; '),
  16861. userAgent: navigator.userAgent,
  16862. userAgentData: navigator.userAgentData || null,
  16863. }, null, 2);
  16864. }
  16865. }
  16866. ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
  16867. /*
  16868. * encantar.js
  16869. * GPU-accelerated Augmented Reality for the web
  16870. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16871. *
  16872. * This program is free software: you can redistribute it and/or modify
  16873. * it under the terms of the GNU Lesser General Public License as published
  16874. * by the Free Software Foundation, either version 3 of the License, or
  16875. * (at your option) any later version.
  16876. *
  16877. * This program is distributed in the hope that it will be useful,
  16878. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16879. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16880. * GNU Lesser General Public License for more details.
  16881. *
  16882. * You should have received a copy of the GNU Lesser General Public License
  16883. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16884. *
  16885. * ar-events.ts
  16886. * AR-related Events
  16887. */
  16888. /**
  16889. * AR Event
  16890. */
  16891. class AREvent extends Event {
  16892. /**
  16893. * Constructor
  16894. * @param type event type
  16895. */
  16896. constructor(type) {
  16897. super(type);
  16898. }
  16899. /**
  16900. * Event type
  16901. */
  16902. get type() {
  16903. return super.type;
  16904. }
  16905. }
  16906. /**
  16907. * AR Event Target
  16908. */
  16909. class AREventTarget {
  16910. /**
  16911. * Constructor
  16912. */
  16913. constructor() {
  16914. this._delegate = new EventTarget();
  16915. }
  16916. /**
  16917. * Add event listener
  16918. * @param type event type
  16919. * @param callback
  16920. */
  16921. addEventListener(type, callback) {
  16922. this._delegate.addEventListener(type, callback);
  16923. }
  16924. /**
  16925. * Remove event listener
  16926. * @param type event type
  16927. * @param callback
  16928. */
  16929. removeEventListener(type, callback) {
  16930. this._delegate.removeEventListener(type, callback);
  16931. }
  16932. /**
  16933. * Synchronously trigger an event
  16934. * @param event
  16935. * @returns same value as a standard event target
  16936. * @internal
  16937. */
  16938. dispatchEvent(event) {
  16939. return this._delegate.dispatchEvent(event);
  16940. }
  16941. }
  16942. ;// CONCATENATED MODULE: ./src/core/stats.ts
  16943. /*
  16944. * encantar.js
  16945. * GPU-accelerated Augmented Reality for the web
  16946. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16947. *
  16948. * This program is free software: you can redistribute it and/or modify
  16949. * it under the terms of the GNU Lesser General Public License as published
  16950. * by the Free Software Foundation, either version 3 of the License, or
  16951. * (at your option) any later version.
  16952. *
  16953. * This program is distributed in the hope that it will be useful,
  16954. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16955. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16956. * GNU Lesser General Public License for more details.
  16957. *
  16958. * You should have received a copy of the GNU Lesser General Public License
  16959. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16960. *
  16961. * stats.ts
  16962. * Stats for performance measurements
  16963. */
  16964. /** update interval, given in seconds */
  16965. const UPDATE_INTERVAL = 0.5;
  16966. /**
  16967. * Stats for performance measurements
  16968. */
  16969. class Stats {
  16970. /**
  16971. * Constructor
  16972. */
  16973. constructor() {
  16974. this._timeOfLastUpdate = this._now();
  16975. this._partialCycleCount = 0;
  16976. this._cyclesPerSecond = 0;
  16977. }
  16978. /**
  16979. * Update stats - call every frame
  16980. */
  16981. update() {
  16982. const now = this._now();
  16983. ++this._partialCycleCount;
  16984. if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
  16985. this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
  16986. this._partialCycleCount = 0;
  16987. this._timeOfLastUpdate = now;
  16988. }
  16989. }
  16990. /**
  16991. * Reset stats
  16992. */
  16993. reset() {
  16994. this._timeOfLastUpdate = this._now();
  16995. this._partialCycleCount = 0;
  16996. this._cyclesPerSecond = 0;
  16997. }
  16998. /**
  16999. * Number of cycles per second
  17000. */
  17001. get cyclesPerSecond() {
  17002. return this._cyclesPerSecond;
  17003. }
  17004. /**
  17005. * A measurement of time, in milliseconds
  17006. * @returns time in ms
  17007. */
  17008. _now() {
  17009. return performance.now();
  17010. }
  17011. }
  17012. ;// CONCATENATED MODULE: ./src/ui/stats-panel.ts
  17013. /*
  17014. * encantar.js
  17015. * GPU-accelerated Augmented Reality for the web
  17016. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17017. *
  17018. * This program is free software: you can redistribute it and/or modify
  17019. * it under the terms of the GNU Lesser General Public License as published
  17020. * by the Free Software Foundation, either version 3 of the License, or
  17021. * (at your option) any later version.
  17022. *
  17023. * This program is distributed in the hope that it will be useful,
  17024. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17025. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17026. * GNU Lesser General Public License for more details.
  17027. *
  17028. * You should have received a copy of the GNU Lesser General Public License
  17029. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17030. *
  17031. * stats-panel.ts
  17032. * Stats panel used for development purposes
  17033. */
  17034. /** Update interval, in ms */
  17035. const stats_panel_UPDATE_INTERVAL = 500;
  17036. /** Icons for different power profiles */
  17037. const POWER_ICON = Object.freeze({
  17038. 'default': '',
  17039. 'low-power': '&#x1F50B',
  17040. 'high-performance': '&#x26A1'
  17041. });
  17042. /**
  17043. * Stats panel used for development purposes
  17044. */
  17045. class StatsPanel {
  17046. /**
  17047. * Constructor
  17048. * @param viewport Viewport
  17049. */
  17050. constructor(viewport) {
  17051. this._viewport = viewport;
  17052. this._lastUpdate = 0;
  17053. this._container = this._createContainer();
  17054. viewport.hud.container.appendChild(this._container);
  17055. }
  17056. /**
  17057. * Release the panel
  17058. */
  17059. release() {
  17060. this._container.remove();
  17061. }
  17062. /**
  17063. * A method to be called in the update loop
  17064. * @param time current time in ms
  17065. * @param trackers the trackers attached to the session
  17066. * @param sources the sources of media linked to the session
  17067. * @param gpu GPU cycles per second
  17068. * @param fps frames per second
  17069. */
  17070. update(time, trackers, sources, gpu, fps) {
  17071. if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
  17072. this._lastUpdate = time;
  17073. this._update(trackers, sources, fps, gpu);
  17074. }
  17075. }
  17076. /**
  17077. * Visibility of the panel
  17078. */
  17079. get visible() {
  17080. return !this._container.hidden;
  17081. }
  17082. /**
  17083. * Visibility of the panel
  17084. */
  17085. set visible(visible) {
  17086. this._container.hidden = !visible;
  17087. }
  17088. /**
  17089. * Update the contents of the panel
  17090. * @param trackers the trackers attached to the session
  17091. * @param sources the sources of media linked to the session
  17092. * @param fps frames per second
  17093. * @param gpu GPU cycles per second
  17094. */
  17095. _update(trackers, sources, fps, gpu) {
  17096. // all sanitized
  17097. const lfps = this._label('_ar_fps');
  17098. if (lfps !== null) {
  17099. lfps.style.color = this._color(fps);
  17100. lfps.innerText = String(fps);
  17101. }
  17102. const lgpu = this._label('_ar_gpu');
  17103. if (lgpu !== null) {
  17104. lgpu.style.color = this._color(gpu);
  17105. lgpu.innerText = String(gpu);
  17106. }
  17107. const lpower = this._label('_ar_power');
  17108. if (lpower !== null)
  17109. lpower.innerHTML = POWER_ICON[Settings.powerPreference];
  17110. const lin = this._label('_ar_in');
  17111. if (lin !== null) {
  17112. const sourceStats = sources.map(source => source._stats).join(', ');
  17113. lin.innerText = sourceStats;
  17114. }
  17115. const lout = this._label('_ar_out');
  17116. if (lout !== null) {
  17117. const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
  17118. lout.innerText = trackerStats;
  17119. }
  17120. }
  17121. /**
  17122. * Get a label of the panel
  17123. * @param className
  17124. * @returns the HTML element, or null if it doesn't exist
  17125. */
  17126. _label(className) {
  17127. return this._container.getElementsByClassName(className).item(0);
  17128. }
  17129. /**
  17130. * Associate a color to a frequency number
  17131. * @param f frequency given in cycles per second
  17132. * @returns colorized number (HTML)
  17133. */
  17134. _color(f) {
  17135. const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
  17136. const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
  17137. const color2 = f >= 30 ? GREEN : RED;
  17138. const color = Settings.powerPreference != 'low-power' ? color3 : color2;
  17139. return color;
  17140. }
  17141. /**
  17142. * Create the container for the panel
  17143. * @returns a container
  17144. */
  17145. _createContainer() {
  17146. const container = document.createElement('div');
  17147. container.style.position = 'absolute';
  17148. container.style.left = container.style.top = '0px';
  17149. container.style.zIndex = '1000000';
  17150. container.style.padding = '0px';
  17151. container.appendChild(this._createTitle());
  17152. container.appendChild(this._createContent());
  17153. return container;
  17154. }
  17155. /**
  17156. * Create a title
  17157. * @returns a title
  17158. */
  17159. _createTitle() {
  17160. const title = document.createElement('div');
  17161. title.style.backgroundColor = '#7e56c2';
  17162. title.style.color = 'white';
  17163. title.style.fontFamily = 'monospace';
  17164. title.style.fontSize = '14px';
  17165. title.style.fontWeight = 'bold';
  17166. title.style.padding = '2px';
  17167. title.innerText = 'encantar.js ' + AR.version;
  17168. return title;
  17169. }
  17170. /**
  17171. * Create a content container
  17172. * @returns a content container
  17173. */
  17174. _createContent() {
  17175. const content = document.createElement('div');
  17176. const print = (html) => content.insertAdjacentHTML('beforeend', html);
  17177. content.style.backgroundColor = 'rgba(0,0,0,0.5)';
  17178. content.style.color = 'white';
  17179. content.style.fontFamily = 'monospace';
  17180. content.style.fontSize = '14px';
  17181. content.style.padding = '2px';
  17182. content.style.whiteSpace = 'pre-line';
  17183. // all sanitized
  17184. print('FPS: <span class="_ar_fps"></span> | ');
  17185. print('GPU: <span class="_ar_gpu"></span> ');
  17186. print('<span class="_ar_power"></span>');
  17187. print('<br>');
  17188. print('IN: <span class="_ar_in"></span>');
  17189. print('<br>');
  17190. print('OUT: <span class="_ar_out"></span>');
  17191. return content;
  17192. }
  17193. }
  17194. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
  17195. /*
  17196. * encantar.js
  17197. * GPU-accelerated Augmented Reality for the web
  17198. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17199. *
  17200. * This program is free software: you can redistribute it and/or modify
  17201. * it under the terms of the GNU Lesser General Public License as published
  17202. * by the Free Software Foundation, either version 3 of the License, or
  17203. * (at your option) any later version.
  17204. *
  17205. * This program is distributed in the hope that it will be useful,
  17206. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17207. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17208. * GNU Lesser General Public License for more details.
  17209. *
  17210. * You should have received a copy of the GNU Lesser General Public License
  17211. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17212. *
  17213. * settings.ts
  17214. * Settings of the Image Tracker
  17215. */
  17216. /** Maximum number of keypoints to be stored for each reference image when in the training state */
  17217. const TRAIN_MAX_KEYPOINTS = 1024; //512;
  17218. /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
  17219. const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
  17220. /** Width and height of the Normalized Image Space (NIS) */
  17221. const NIS_SIZE = 1024; // keypoint positions are stored as fixed point
  17222. /** Used to identify the best maches */
  17223. const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
  17224. /** Maximum number of keypoints to be analyzed when in the scanning state */
  17225. const SCAN_MAX_KEYPOINTS = 512;
  17226. /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
  17227. const SCAN_PYRAMID_LEVELS = 4; //7;
  17228. /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
  17229. const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
  17230. /** Threshold of the FAST corner detector used in the scanning/training states */
  17231. const SCAN_FAST_THRESHOLD = 60;
  17232. /** Minimum number of accepted matches for us to move out of the scanning state */
  17233. const SCAN_MIN_MATCHES = 20; //30;
  17234. /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
  17235. const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
  17236. /** Reprojection error, in NIS pixels, used when estimating a motion model (scanning state) */
  17237. const SCAN_RANSAC_REPROJECTIONERROR_NIS = (NIS_SIZE * 0.02) | 0;
  17238. /** Reprojection error, in NDC, used when estimating a motion model (scanning state) */
  17239. const SCAN_RANSAC_REPROJECTIONERROR_NDC = SCAN_RANSAC_REPROJECTIONERROR_NIS / (NIS_SIZE / 2);
  17240. /** Number of tables used in the LSH-based keypoint matching */
  17241. const SCAN_LSH_TABLES = 8; // up to 32
  17242. /** Hash size, in bits, used in the LSH-based keypoint matching */
  17243. const SCAN_LSH_HASHSIZE = 15; // up to 16
  17244. /** Use the Nightvision filter when in the scanning/training state? */
  17245. const SCAN_WITH_NIGHTVISION = true;
  17246. /** Nightvision filter: gain */
  17247. const NIGHTVISION_GAIN = 0.3; // 0.2;
  17248. /** Nightvision filter: offset */
  17249. const NIGHTVISION_OFFSET = 0.5;
  17250. /** Nightvision filter: decay */
  17251. const NIGHTVISION_DECAY = 0.0;
  17252. /** Nightvision filter: quality level */
  17253. const NIGHTVISION_QUALITY = 'low';
  17254. /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
  17255. const ORB_GAUSSIAN_KSIZE = 9;
  17256. /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
  17257. const ORB_GAUSSIAN_SIGMA = 2.0;
  17258. /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
  17259. const SUBPIXEL_GAUSSIAN_KSIZE = 5;
  17260. /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
  17261. const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
  17262. /** Subpixel refinement method */
  17263. const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
  17264. /** Minimum acceptable number of matched keypoints when in a pre-tracking state */
  17265. const PRE_TRACK_MIN_MATCHES = 4;
  17266. /** Minimum acceptable number of matched keypoints when in the tracking state */
  17267. const TRACK_MIN_MATCHES = 4; //10; //20;
  17268. /** Maximum number of keypoints to be analyzed in the tracking state */
  17269. const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
  17270. /** Capacity of the keypoint detector used in the the tracking state */
  17271. const TRACK_DETECTOR_CAPACITY = 2048; //4096;
  17272. /** Quality of the Harris/Shi-Tomasi corner detector */
  17273. const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
  17274. /** Use the Nightvision filter when in the tracking state? */
  17275. const TRACK_WITH_NIGHTVISION = false; // produces shaking?
  17276. /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
  17277. const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
  17278. /** Relative size (%) used to clip keypoints from the borders of the rectified image */
  17279. const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
  17280. /** Scale of the rectified image in NDC, without taking the aspect ratio into consideration */
  17281. const TRACK_RECTIFIED_SCALE = 1 - 2 * TRACK_RECTIFIED_BORDER;
  17282. /** Reprojection error, in NIS pixels, used when estimating a motion model (tracking state) */
  17283. const TRACK_RANSAC_REPROJECTIONERROR_NIS = (NIS_SIZE * 0.0125) | 0;
  17284. /** Reprojection error, in NDC, used when estimating a motion model (tracking state) */
  17285. const TRACK_RANSAC_REPROJECTIONERROR_NDC = TRACK_RANSAC_REPROJECTIONERROR_NIS / (NIS_SIZE / 2);
  17286. /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
  17287. const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
  17288. /** Used to identify the best maches */
  17289. const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
  17290. /** Number of consecutive frames in which we tolerate a "target lost" situation */
  17291. const TRACK_LOST_TOLERANCE = 15;
  17292. ;// CONCATENATED MODULE: ./src/ui/gizmos.ts
  17293. /*
  17294. * encantar.js
  17295. * GPU-accelerated Augmented Reality for the web
  17296. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17297. *
  17298. * This program is free software: you can redistribute it and/or modify
  17299. * it under the terms of the GNU Lesser General Public License as published
  17300. * by the Free Software Foundation, either version 3 of the License, or
  17301. * (at your option) any later version.
  17302. *
  17303. * This program is distributed in the hope that it will be useful,
  17304. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17305. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17306. * GNU Lesser General Public License for more details.
  17307. *
  17308. * You should have received a copy of the GNU Lesser General Public License
  17309. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17310. *
  17311. * gizmos.ts
  17312. * Visual cues for testing & debugging
  17313. */
  17314. /**
  17315. * Visual cues for testing & debugging
  17316. */
  17317. class Gizmos {
  17318. /**
  17319. * Constructor
  17320. */
  17321. constructor() {
  17322. this._visible = false;
  17323. this._imageTrackerGizmos = new ImageTrackerGizmos();
  17324. }
  17325. /**
  17326. * Whether or not the gizmos will be rendered
  17327. */
  17328. get visible() {
  17329. return this._visible;
  17330. }
  17331. /**
  17332. * Whether or not the gizmos will be rendered
  17333. */
  17334. set visible(visible) {
  17335. this._visible = visible;
  17336. }
  17337. /**
  17338. * Render gizmos
  17339. * @param viewport
  17340. * @param trackers
  17341. * @internal
  17342. */
  17343. _render(viewport, trackers) {
  17344. // no need to render?
  17345. if (!this._visible)
  17346. return;
  17347. // render the gizmos of each tracker
  17348. for (let i = 0; i < trackers.length; i++) {
  17349. if (trackers[i].type == 'image-tracker') {
  17350. const output = trackers[i]._output;
  17351. this._imageTrackerGizmos.render(viewport, output);
  17352. }
  17353. }
  17354. }
  17355. }
  17356. /**
  17357. * Gizmos renderer of Image Trackers
  17358. */
  17359. class ImageTrackerGizmos {
  17360. /**
  17361. * Render gizmos
  17362. * @param viewport viewport
  17363. * @param output tracker output
  17364. */
  17365. render(viewport, output) {
  17366. const canvas = viewport._backgroundCanvas;
  17367. const ctx = canvas.getContext('2d', { alpha: false });
  17368. if (!ctx)
  17369. return;
  17370. const viewportSize = viewport._realSize;
  17371. const keypointsNIS = output.keypointsNIS;
  17372. const polylineNDC = output.polylineNDC;
  17373. const camera = output.camera;
  17374. // debug
  17375. //ctx.fillStyle = '#000';
  17376. //ctx.fillRect(0, 0, canvas.width, canvas.height);
  17377. //ctx.clearRect(0, 0, canvas.width, canvas.height);
  17378. // render keypoints
  17379. if (keypointsNIS !== undefined)
  17380. this._splitAndRenderKeypointsNIS(ctx, keypointsNIS, viewportSize);
  17381. // render polylines
  17382. if (polylineNDC !== undefined)
  17383. this._renderPolylineNDC(ctx, polylineNDC, viewportSize);
  17384. // render the axes of the 3D coordinate system
  17385. if (camera !== undefined)
  17386. this._renderAxes(ctx, camera, viewportSize);
  17387. }
  17388. /**
  17389. * Split keypoints in matched/unmatched categories and
  17390. * render them for testing & development purposes
  17391. * @param ctx canvas 2D context
  17392. * @param keypoints keypoints in Normalized Image Space (NIS)
  17393. * @param viewportSize viewport size
  17394. * @param size base keypoint rendering size
  17395. */
  17396. _splitAndRenderKeypointsNIS(ctx, keypoints, viewportSize, size = 1) {
  17397. if (keypoints.length == 0)
  17398. return;
  17399. if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
  17400. this._renderKeypointsNIS(ctx, keypoints, viewportSize, '#f00', size);
  17401. return;
  17402. }
  17403. const goodMatches = [], badMatches = [];
  17404. for (let i = 0; i < keypoints.length; i++) {
  17405. const keypoint = keypoints[i];
  17406. if (this._isGoodMatch(keypoint))
  17407. goodMatches.push(keypoint);
  17408. else
  17409. badMatches.push(keypoint);
  17410. }
  17411. this._renderKeypointsNIS(ctx, badMatches, viewportSize, '#f00', size);
  17412. this._renderKeypointsNIS(ctx, goodMatches, viewportSize, '#0f0', size);
  17413. }
  17414. /**
  17415. * Check if a matched keypoint is "good enough"
  17416. * @param keypoint matched keypoint
  17417. * @returns a boolean
  17418. */
  17419. _isGoodMatch(keypoint) {
  17420. const GOOD_MATCH_THRESHOLD = 0.7; // the maximum match distance ratio we'll consider to be "good"
  17421. const n = keypoint.matches.length;
  17422. if (n > 1) {
  17423. return (keypoint.matches[0].index >= 0 &&
  17424. keypoint.matches[1].index >= 0 &&
  17425. keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
  17426. }
  17427. else if (n == 1)
  17428. return keypoint.matches[0].index >= 0;
  17429. return false;
  17430. }
  17431. /**
  17432. * Render keypoints for testing & development purposes
  17433. * @param ctx canvas 2D context
  17434. * @param keypoints keypoints in Normalized Image Space (NIS)
  17435. * @param viewportSize viewport size
  17436. * @param color color of the rendered keypoints
  17437. * @param size base keypoint rendering size
  17438. */
  17439. _renderKeypointsNIS(ctx, keypoints, viewportSize, color = 'red', size = 1) {
  17440. const sx = viewportSize.width / NIS_SIZE;
  17441. const sy = viewportSize.height / NIS_SIZE;
  17442. ctx.beginPath();
  17443. for (let i = keypoints.length - 1; i >= 0; i--) {
  17444. const keypoint = keypoints[i];
  17445. const x = (keypoint.x * sx + 0.5) | 0;
  17446. const y = (keypoint.y * sy + 0.5) | 0;
  17447. const r = (size * keypoint.scale + 0.5) | 0;
  17448. ctx.rect(x - r, y - r, 2 * r, 2 * r);
  17449. }
  17450. ctx.strokeStyle = color;
  17451. ctx.lineWidth = 1;
  17452. ctx.stroke();
  17453. }
  17454. /**
  17455. * Render a polyline for testing & development purposes
  17456. * @param ctx canvas 2D context
  17457. * @param polyline vertices in NDC
  17458. * @param viewportSize viewport size
  17459. * @param color color of the rendered polyline
  17460. * @param lineWidth
  17461. */
  17462. _renderPolylineNDC(ctx, polyline, viewportSize, color = '#0f0', lineWidth = 2) {
  17463. const n = polyline.length;
  17464. const w = viewportSize.width;
  17465. const h = viewportSize.height;
  17466. if (n == 0)
  17467. return;
  17468. ctx.beginPath();
  17469. ctx.moveTo((polyline[n - 1].x * 0.5 + 0.5) * w, (polyline[n - 1].y * -0.5 + 0.5) * h);
  17470. for (let j = 0; j < n; j++)
  17471. ctx.lineTo((polyline[j].x * 0.5 + 0.5) * w, (polyline[j].y * -0.5 + 0.5) * h);
  17472. ctx.strokeStyle = color;
  17473. ctx.lineWidth = lineWidth;
  17474. ctx.stroke();
  17475. }
  17476. /**
  17477. * Render the axes of a 3D coordinate system
  17478. * @param ctx canvas 2D context
  17479. * @param camera camera model
  17480. * @param viewportSize viewport size
  17481. * @param lineWidth
  17482. */
  17483. _renderAxes(ctx, camera, viewportSize, lineWidth = 4) {
  17484. const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
  17485. const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
  17486. const length = 1; // length of each axis-corresponding line, given in normalized space units
  17487. const w = viewportSize.width;
  17488. const h = viewportSize.height;
  17489. const iw = 1 / (camera.imageSize.width / 2);
  17490. const ih = -1 / (camera.imageSize.height / 2);
  17491. /*
  17492. Multiply the 3x4 camera matrix by:
  17493. [ 0 L 0 0 ]
  17494. [ 0 0 L 0 ] , where L = length in normalized space of the lines
  17495. [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
  17496. [ 1 1 1 1 ]
  17497. Each column of the resulting matrix will give us the pixel coordinates
  17498. we're looking for: origin and the axes.
  17499. Note: we're working with homogeneous coordinates
  17500. */
  17501. const p = camera.matrix.read();
  17502. const l = length;
  17503. const o = [p[9], p[10], p[11]]; // origin of the coordinate system
  17504. const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
  17505. const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
  17506. const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
  17507. const axis = [x, y, z];
  17508. // draw each axis
  17509. const ox = o[0] / o[2], oy = o[1] / o[2];
  17510. for (let i = 0; i < 3; i++) {
  17511. const q = axis[i];
  17512. const x = q[0] / q[2], y = q[1] / q[2];
  17513. ctx.beginPath();
  17514. ctx.moveTo((ox * iw * 0.5 + 0.5) * w, (oy * ih * 0.5 + 0.5) * h);
  17515. ctx.lineTo((x * iw * 0.5 + 0.5) * w, (y * ih * 0.5 + 0.5) * h);
  17516. ctx.strokeStyle = color[i];
  17517. ctx.lineWidth = lineWidth;
  17518. ctx.stroke();
  17519. }
  17520. //console.log("Origin",ox,oy);
  17521. }
  17522. }
  17523. ;// CONCATENATED MODULE: ./src/core/frame.ts
  17524. /*
  17525. * encantar.js
  17526. * GPU-accelerated Augmented Reality for the web
  17527. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17528. *
  17529. * This program is free software: you can redistribute it and/or modify
  17530. * it under the terms of the GNU Lesser General Public License as published
  17531. * by the Free Software Foundation, either version 3 of the License, or
  17532. * (at your option) any later version.
  17533. *
  17534. * This program is distributed in the hope that it will be useful,
  17535. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17536. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17537. * GNU Lesser General Public License for more details.
  17538. *
  17539. * You should have received a copy of the GNU Lesser General Public License
  17540. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17541. *
  17542. * frame.ts
  17543. * A Frame holds information used to render a single animation frame of a Session
  17544. */
  17545. /**
  17546. * A Frame holds information used to render a single animation frame of a Session
  17547. */
  17548. class Frame {
  17549. /**
  17550. * Constructor
  17551. * @param session
  17552. * @param results
  17553. */
  17554. constructor(session, results) {
  17555. this._session = session;
  17556. this._results = results;
  17557. }
  17558. /**
  17559. * The session of which this frame holds data
  17560. */
  17561. get session() {
  17562. return this._session;
  17563. }
  17564. /**
  17565. * The results of all trackers in this frame
  17566. */
  17567. get results() {
  17568. // we want to be able to iterate over the results of a frame multiple times
  17569. return this._results[Symbol.iterator]();
  17570. }
  17571. }
  17572. ;// CONCATENATED MODULE: ./src/core/time-manager.ts
  17573. /*
  17574. * encantar.js
  17575. * GPU-accelerated Augmented Reality for the web
  17576. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17577. *
  17578. * This program is free software: you can redistribute it and/or modify
  17579. * it under the terms of the GNU Lesser General Public License as published
  17580. * by the Free Software Foundation, either version 3 of the License, or
  17581. * (at your option) any later version.
  17582. *
  17583. * This program is distributed in the hope that it will be useful,
  17584. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17585. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17586. * GNU Lesser General Public License for more details.
  17587. *
  17588. * You should have received a copy of the GNU Lesser General Public License
  17589. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17590. *
  17591. * time-manager.ts
  17592. * Time Manager
  17593. */
  17594. /**
  17595. * Time Manager
  17596. */
  17597. class TimeManager {
  17598. constructor() {
  17599. /** time scale */
  17600. this._scale = 1;
  17601. /** time since the start of the session, in milliseconds */
  17602. this._time = 0;
  17603. /** unscaled time since the start of the session, in milliseconds */
  17604. this._unscaledTime = 0;
  17605. /** elapsed time between the current and the previous frame, in milliseconds */
  17606. this._delta = 0;
  17607. /** time of the first update call, in milliseconds */
  17608. this._firstUpdate = 0;
  17609. /** time of the last update call, in milliseconds */
  17610. this._lastUpdate = Number.POSITIVE_INFINITY;
  17611. }
  17612. /**
  17613. * Update the Time Manager
  17614. * @param timestamp in milliseconds
  17615. * @internal
  17616. */
  17617. _update(timestamp) {
  17618. if (timestamp < this._lastUpdate) {
  17619. this._firstUpdate = this._lastUpdate = timestamp;
  17620. return;
  17621. }
  17622. this._delta = (timestamp - this._lastUpdate) * this._scale;
  17623. this._time += this._delta;
  17624. this._unscaledTime = timestamp - this._firstUpdate;
  17625. this._lastUpdate = timestamp;
  17626. }
  17627. /**
  17628. * Elapsed time since the start of the session, measured at the
  17629. * beginning of the current animation frame and given in seconds
  17630. */
  17631. get elapsed() {
  17632. return this._time * 0.001;
  17633. }
  17634. /**
  17635. * Elapsed time between the current and the previous animation
  17636. * frame, given in seconds
  17637. */
  17638. get delta() {
  17639. return this._delta * 0.001;
  17640. }
  17641. /**
  17642. * Time scale (defaults to 1)
  17643. */
  17644. get scale() {
  17645. return this._scale;
  17646. }
  17647. /**
  17648. * Time scale (defaults to 1)
  17649. */
  17650. set scale(scale) {
  17651. this._scale = Math.max(0, +scale);
  17652. }
  17653. /**
  17654. * Time scale independent elapsed time since the start of the session,
  17655. * measured at the beginning of the current animation frame and given
  17656. * in seconds
  17657. */
  17658. get unscaled() {
  17659. return this._unscaledTime * 0.001;
  17660. }
  17661. }
  17662. ;// CONCATENATED MODULE: ./src/utils/asap.ts
  17663. /*
  17664. * encantar.js
  17665. * GPU-accelerated Augmented Reality for the web
  17666. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17667. *
  17668. * This program is free software: you can redistribute it and/or modify
  17669. * it under the terms of the GNU Lesser General Public License as published
  17670. * by the Free Software Foundation, either version 3 of the License, or
  17671. * (at your option) any later version.
  17672. *
  17673. * This program is distributed in the hope that it will be useful,
  17674. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17675. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17676. * GNU Lesser General Public License for more details.
  17677. *
  17678. * You should have received a copy of the GNU Lesser General Public License
  17679. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17680. *
  17681. * asap.ts
  17682. * Schedule a function to run "as soon as possible"
  17683. */
  17684. /** callbacks */
  17685. const callbacks = [];
  17686. /** arguments to be passed to the callbacks */
  17687. const args = [];
  17688. /** asap key */
  17689. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  17690. // Register an event listener
  17691. window.addEventListener('message', event => {
  17692. if (event.source !== window || event.data !== ASAP_KEY)
  17693. return;
  17694. event.stopPropagation();
  17695. if (callbacks.length == 0)
  17696. return;
  17697. const fn = callbacks.pop();
  17698. const argArray = args.pop();
  17699. fn.apply(undefined, argArray);
  17700. }, true);
  17701. /**
  17702. * Schedule a function to run "as soon as possible"
  17703. * @param fn callback
  17704. * @param params optional parameters
  17705. */
  17706. function asap(fn, ...params) {
  17707. callbacks.unshift(fn);
  17708. args.unshift(params);
  17709. window.postMessage(ASAP_KEY, '*');
  17710. }
  17711. ;// CONCATENATED MODULE: ./src/core/session.ts
  17712. /*
  17713. * encantar.js
  17714. * GPU-accelerated Augmented Reality for the web
  17715. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17716. *
  17717. * This program is free software: you can redistribute it and/or modify
  17718. * it under the terms of the GNU Lesser General Public License as published
  17719. * by the Free Software Foundation, either version 3 of the License, or
  17720. * (at your option) any later version.
  17721. *
  17722. * This program is distributed in the hope that it will be useful,
  17723. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17724. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17725. * GNU Lesser General Public License for more details.
  17726. *
  17727. * You should have received a copy of the GNU Lesser General Public License
  17728. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17729. *
  17730. * session.ts
  17731. * WebAR Session
  17732. */
  17733. /** An event emitted by a Session */
  17734. class SessionEvent extends AREvent {
  17735. }
  17736. /** Default options when starting a session */
  17737. const DEFAULT_OPTIONS = {
  17738. mode: 'immersive',
  17739. trackers: [],
  17740. sources: [],
  17741. viewport: null,
  17742. stats: false,
  17743. gizmos: false,
  17744. };
  17745. /**
  17746. * A Session represents an intent to display AR content
  17747. * and encapsulates the main loop (update-render cycle)
  17748. */
  17749. class Session extends AREventTarget {
  17750. /**
  17751. * Constructor
  17752. * @param sources previously initialized sources of data
  17753. * @param mode session mode
  17754. * @param viewport viewport
  17755. * @param stats render stats panel?
  17756. * @param gizmos render gizmos?
  17757. */
  17758. constructor(sources, mode, viewport, stats, gizmos) {
  17759. super();
  17760. this._mode = mode;
  17761. this._trackers = [];
  17762. this._sources = sources;
  17763. this._updateStats = new Stats();
  17764. this._renderStats = new Stats();
  17765. this._active = true;
  17766. this._frameReady = true; // no trackers at the moment
  17767. this._rafQueue = [];
  17768. this._time = new TimeManager();
  17769. this._gizmos = new Gizmos();
  17770. this._gizmos.visible = gizmos;
  17771. // validate the mode
  17772. if (mode != 'immersive' && mode != 'inline')
  17773. throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
  17774. // find the primary source of data
  17775. this._primarySource = this._findPrimarySource(sources);
  17776. // setup the viewport
  17777. this._viewport = viewport;
  17778. if (this._primarySource !== null)
  17779. this._viewport._init(() => this._primarySource._internalMedia.size, mode);
  17780. else
  17781. this._viewport._init(() => Utils.resolution('sm', window.innerWidth / window.innerHeight), mode);
  17782. // setup the main loop
  17783. this._setupUpdateLoop();
  17784. this._setupRenderLoop();
  17785. // setup the stats panel
  17786. this._statsPanel = new StatsPanel(this._viewport);
  17787. this._statsPanel.visible = stats;
  17788. // done!
  17789. Session._count++;
  17790. Utils.log(`The ${mode} session is now active!`);
  17791. }
  17792. /**
  17793. * Checks if the engine can be run in the browser the client is using
  17794. * @returns true if the engine is compatible with the browser
  17795. */
  17796. static isSupported() {
  17797. //alert(Utils.deviceInfo()); // debug
  17798. // If Safari / iOS, require version 15.2 or later
  17799. if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
  17800. /*
  17801. iOS compatibility
  17802. -----------------
  17803. The engine is known to work on iPhone 8 or later, with iOS 15.2 or
  17804. later. Tested on many devices, including iPads, on the cloud.
  17805. The engine crashes on an iPhone 13 Pro Max with iOS 15.1 and on an
  17806. iPhone 12 Pro with iOS 15.0.2. A (valid) shader from speedy-vision
  17807. version 0.9.1 (bf-knn) fails to compile: "WebGL error. Program has
  17808. not been successfully linked".
  17809. The engine freezes on an older iPhone 6S (2015) with iOS 15.8.2.
  17810. The exact cause is unknown, but it happens when training an image
  17811. tracker, at ImageTrackerTrainingState._gpuUpdate() (a WebGL error?
  17812. a hardware limitation?)
  17813. Successfully tested down to iPhone 8 so far.
  17814. Successfully tested down to iOS 15.2.
  17815. >> WebGL2 support was introduced in Safari 15 <<
  17816. Note: the webp image format used in the demos is supported on
  17817. Safari for iOS 14+. Desktop Safari 14-15.6 supports webp, but
  17818. requires macOS 11 Big Sur or later. https://caniuse.com/webp
  17819. */
  17820. const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
  17821. const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
  17822. const matches = safari || ios; // match safari first (min version)
  17823. if (matches !== null) {
  17824. const version = matches[3] || '0.0';
  17825. const [x, y] = version.split(/[\._]/).map(v => parseInt(v) | 0);
  17826. if ((x < 15) || (x == 15 && y < 2)) {
  17827. Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
  17828. return false;
  17829. }
  17830. // XXX reject older iPhone models? Which ones?
  17831. /*if(navigator.userAgent.includes('iPhone')) {
  17832. // detect screen size?
  17833. }*/
  17834. }
  17835. else
  17836. Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
  17837. }
  17838. // Android: reject very old / weak devices?
  17839. // XXX establish criteria?
  17840. /*if(Utils.isAndroid()) {
  17841. }*/
  17842. // Check if WebGL2 and WebAssembly are supported
  17843. return speedy_vision_default().isSupported();
  17844. }
  17845. /**
  17846. * Instantiate a session
  17847. * @param options options
  17848. * @returns a promise that resolves to a new session
  17849. */
  17850. static instantiate(options = DEFAULT_OPTIONS) {
  17851. const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
  17852. Utils.log(`Starting a new ${mode} session...`);
  17853. return speedy_vision_default().Promise.resolve().then(() => {
  17854. // is the engine supported?
  17855. if (!Session.isSupported())
  17856. throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with encantar.js');
  17857. // block multiple immersive sessions
  17858. if (mode !== 'inline' && Session.count > 0)
  17859. throw new IllegalOperationError(`Can't start more than one immersive session`);
  17860. // initialize matrix routines
  17861. return speedy_vision_default().Matrix.ready();
  17862. }).then(() => {
  17863. // validate sources of data
  17864. for (let i = sources.length - 1; i >= 0; i--) {
  17865. if (sources.indexOf(sources[i]) < i)
  17866. throw new IllegalArgumentError(`Found repeated sources of data`);
  17867. }
  17868. // initialize sources of data
  17869. return speedy_vision_default().Promise.all(sources.map(source => source._init()));
  17870. }).then(() => {
  17871. // get the viewport
  17872. if (!viewport)
  17873. throw new IllegalArgumentError(`Can't create a session without a viewport`);
  17874. // instantiate session
  17875. return new Session(sources, mode, viewport, stats, gizmos);
  17876. }).then(session => {
  17877. // validate trackers
  17878. if (trackers.length == 0)
  17879. Utils.warning(`No trackers have been attached to the session!`);
  17880. for (let i = trackers.length - 1; i >= 0; i--) {
  17881. if (trackers.indexOf(trackers[i]) < i)
  17882. throw new IllegalArgumentError(`Found repeated trackers`);
  17883. }
  17884. // attach trackers and return the session
  17885. return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session).catch(err => { throw err; });
  17886. }).catch(err => {
  17887. // log errors, if any
  17888. Utils.error(`Can't start session: ${err.message}`);
  17889. throw err;
  17890. });
  17891. }
  17892. /**
  17893. * Number of active sessions
  17894. */
  17895. static get count() {
  17896. return this._count;
  17897. }
  17898. /**
  17899. * End the session
  17900. * @returns promise that resolves after the session is shut down
  17901. */
  17902. end() {
  17903. // is the session inactive?
  17904. if (!this._active)
  17905. return speedy_vision_default().Promise.resolve();
  17906. // deactivate the session
  17907. Utils.log('Shutting down the session...');
  17908. this._active = false; // set before wait()
  17909. // wait a few ms, so that the GPU is no longer sending any data
  17910. // then, release resources
  17911. return Utils.wait(100).then(() => speedy_vision_default().Promise.all(
  17912. // release trackers
  17913. this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
  17914. // release input sources
  17915. this._sources.map(source => source._release()))).then(() => {
  17916. this._sources.length = 0;
  17917. this._trackers.length = 0;
  17918. // release internal components
  17919. this._updateStats.reset();
  17920. this._renderStats.reset();
  17921. this._statsPanel.release();
  17922. this._viewport._release();
  17923. // end the session
  17924. Session._count--;
  17925. // dispatch event
  17926. const event = new SessionEvent('end');
  17927. this.dispatchEvent(event);
  17928. // done!
  17929. Utils.log('Session ended.');
  17930. });
  17931. }
  17932. /**
  17933. * Analogous to window.requestAnimationFrame()
  17934. * @param callback
  17935. * @returns a handle
  17936. */
  17937. requestAnimationFrame(callback) {
  17938. const handle = Symbol('raf-handle');
  17939. if (this._active) {
  17940. this._rafQueue.push([handle, callback]);
  17941. }
  17942. else {
  17943. // if the session is inactive, we simply ignore this call
  17944. // this is friendly behavior, since RAF is used in animation loops
  17945. }
  17946. return handle;
  17947. }
  17948. /**
  17949. * Analogous to window.cancelAnimationFrame()
  17950. * @param handle a handle returned by this.requestAnimationFrame()
  17951. */
  17952. cancelAnimationFrame(handle) {
  17953. for (let i = this._rafQueue.length - 1; i >= 0; i--) {
  17954. if (this._rafQueue[i][0] === handle) {
  17955. this._rafQueue.splice(i, 1);
  17956. break;
  17957. }
  17958. }
  17959. }
  17960. /**
  17961. * Session mode
  17962. */
  17963. get mode() {
  17964. return this._mode;
  17965. }
  17966. /**
  17967. * Whether or not the session has been ended
  17968. */
  17969. get ended() {
  17970. return !this._active;
  17971. }
  17972. /**
  17973. * Time Manager
  17974. */
  17975. get time() {
  17976. return this._time;
  17977. }
  17978. /**
  17979. * Visual cues for testing & debugging
  17980. */
  17981. get gizmos() {
  17982. return this._gizmos;
  17983. }
  17984. /**
  17985. * Rendering viewport
  17986. */
  17987. get viewport() {
  17988. return this._viewport;
  17989. }
  17990. /**
  17991. * Attached trackers
  17992. */
  17993. get trackers() {
  17994. return this._trackers[Symbol.iterator]();
  17995. }
  17996. /**
  17997. * Sources of data
  17998. */
  17999. get sources() {
  18000. return this._sources[Symbol.iterator]();
  18001. }
  18002. /**
  18003. * Find the primary source of data (generally a camera stream)
  18004. * @param sources
  18005. * @returns the primary source, or null if there isn't any
  18006. */
  18007. _findPrimarySource(sources) {
  18008. // prefer video sources
  18009. for (let i = 0; i < sources.length; i++) {
  18010. if (sources[i]._type == 'video')
  18011. return sources[i];
  18012. }
  18013. for (let i = 0; i < sources.length; i++) {
  18014. if (sources[i]._type == 'canvas')
  18015. return sources[i];
  18016. }
  18017. // emit warning
  18018. Utils.warning(`No primary source of data was found!`);
  18019. return null;
  18020. }
  18021. /**
  18022. * Attach a tracker to the session
  18023. * @param tracker
  18024. * @returns a promise that resolves as soon as the tracker is attached and initialized
  18025. */
  18026. _attachTracker(tracker) {
  18027. if (this._trackers.indexOf(tracker) >= 0)
  18028. return speedy_vision_default().Promise.reject(new IllegalArgumentError(`Duplicate tracker attached to the session`));
  18029. else if (!this._active)
  18030. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Inactive session`));
  18031. this._trackers.push(tracker);
  18032. return tracker._init(this);
  18033. }
  18034. /**
  18035. * Render content to the background canvas
  18036. */
  18037. _renderBackground() {
  18038. const canvas = this._viewport._backgroundCanvas;
  18039. const ctx = canvas.getContext('2d', { alpha: false });
  18040. // error?
  18041. if (!ctx)
  18042. return;
  18043. ctx.imageSmoothingEnabled = false;
  18044. // render user media
  18045. if (this._primarySource !== null) {
  18046. const media = this._primarySource._internalMedia;
  18047. this._renderMedia(ctx, media, true);
  18048. }
  18049. // render output image(s) for debugging
  18050. for (let i = 0; i < this._trackers.length; i++) {
  18051. const media = this._trackers[i]._output.image;
  18052. if (media !== undefined)
  18053. this._renderMedia(ctx, media, false);
  18054. }
  18055. // render gizmos
  18056. this._gizmos._render(this._viewport, this._trackers);
  18057. }
  18058. /**
  18059. * Render a SpeedyMedia
  18060. * @param ctx rendering context
  18061. * @param media
  18062. * @param stretch
  18063. */
  18064. _renderMedia(ctx, media, stretch) {
  18065. const canvas = ctx.canvas;
  18066. const width = stretch ? canvas.width : media.width;
  18067. const height = stretch ? canvas.height : media.height;
  18068. if (media.type != 'data') {
  18069. const image = media.source;
  18070. ctx.drawImage(image, 0, 0, width, height);
  18071. }
  18072. else {
  18073. const image = media.source;
  18074. ctx.putImageData(image, 0, 0, 0, 0, width, height);
  18075. }
  18076. }
  18077. /**
  18078. * Setup the update loop
  18079. */
  18080. _setupUpdateLoop() {
  18081. const scheduleNextFrame = () => {
  18082. if (this._active) {
  18083. if (Settings.powerPreference == 'high-performance')
  18084. asap(repeat);
  18085. else
  18086. window.requestAnimationFrame(repeat);
  18087. }
  18088. };
  18089. const update = () => {
  18090. this._update().then(scheduleNextFrame).turbocharge();
  18091. };
  18092. function repeat() {
  18093. if (Settings.powerPreference == 'low-power') // 30 fps
  18094. window.requestAnimationFrame(update);
  18095. else
  18096. update();
  18097. }
  18098. window.requestAnimationFrame(update);
  18099. }
  18100. /**
  18101. * The core of the update loop
  18102. */
  18103. _update() {
  18104. // active session?
  18105. if (this._active) {
  18106. return speedy_vision_default().Promise.all(
  18107. // update trackers
  18108. this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
  18109. // update internals
  18110. this._updateStats.update();
  18111. this._frameReady = true;
  18112. }).catch(err => {
  18113. // log error
  18114. Utils.error('Tracking error: ' + err.toString(), err);
  18115. // handle WebGL errors
  18116. const cause = err.cause;
  18117. if (err.name == 'GLError') {
  18118. alert(err.message); // fatal error?
  18119. alert(Utils.deviceInfo()); // display useful info
  18120. throw err;
  18121. }
  18122. else if (typeof cause == 'object' && cause.name == 'GLError') {
  18123. alert(err.message);
  18124. alert(cause.message);
  18125. alert(Utils.deviceInfo());
  18126. throw err;
  18127. }
  18128. });
  18129. }
  18130. else {
  18131. // inactive session
  18132. this._updateStats.reset();
  18133. return speedy_vision_default().Promise.resolve();
  18134. }
  18135. }
  18136. /**
  18137. * Setup the render loop
  18138. */
  18139. _setupRenderLoop() {
  18140. let skip = false, toggle = false;
  18141. const render = (timestamp) => {
  18142. const enableFrameSkipping = (Settings.powerPreference == 'low-power');
  18143. const highPerformance = (Settings.powerPreference == 'high-performance');
  18144. // advance time
  18145. this._time._update(timestamp);
  18146. // skip frames
  18147. if (!enableFrameSkipping || !(skip = !skip))
  18148. this._render(timestamp, false);
  18149. //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
  18150. // repeat
  18151. if (this._active)
  18152. window.requestAnimationFrame(render);
  18153. };
  18154. window.requestAnimationFrame(render);
  18155. }
  18156. /**
  18157. * Render a frame (RAF callback)
  18158. * @param time current time, in ms
  18159. * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
  18160. */
  18161. _render(time, skipUserMedia) {
  18162. // is the session active?
  18163. if (this._active) {
  18164. // are we ready to render a frame?
  18165. if (this._frameReady) {
  18166. // create a frame
  18167. const results = this._trackers.map(tracker => tracker._output.exports || ({
  18168. tracker: tracker,
  18169. trackables: [],
  18170. }));
  18171. const frame = new Frame(this, results);
  18172. // clone & clear the RAF queue
  18173. const rafQueue = this._rafQueue.slice(0);
  18174. this._rafQueue.length = 0;
  18175. // render content to the background canvas
  18176. if (!skipUserMedia)
  18177. this._renderBackground();
  18178. // render frame
  18179. for (let i = 0; i < rafQueue.length; i++)
  18180. rafQueue[i][1].call(undefined, time, frame);
  18181. // update internals
  18182. this._renderStats.update();
  18183. this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
  18184. this._frameReady = false;
  18185. }
  18186. else {
  18187. // skip frame
  18188. ;
  18189. // we'll update the renderStats even if we skip the frame,
  18190. // otherwise this becomes updateStats! (approximately)
  18191. // This is a window.requestAnimationFrame() call, so the
  18192. // browser is rendering content even if we're not.
  18193. this._renderStats.update();
  18194. }
  18195. }
  18196. else {
  18197. // inactive session
  18198. this._renderStats.reset();
  18199. }
  18200. }
  18201. }
  18202. /** Number of active sessions */
  18203. Session._count = 0;
  18204. ;// CONCATENATED MODULE: ./src/core/settings.ts
  18205. /*
  18206. * encantar.js
  18207. * GPU-accelerated Augmented Reality for the web
  18208. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18209. *
  18210. * This program is free software: you can redistribute it and/or modify
  18211. * it under the terms of the GNU Lesser General Public License as published
  18212. * by the Free Software Foundation, either version 3 of the License, or
  18213. * (at your option) any later version.
  18214. *
  18215. * This program is distributed in the hope that it will be useful,
  18216. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18217. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18218. * GNU Lesser General Public License for more details.
  18219. *
  18220. * You should have received a copy of the GNU Lesser General Public License
  18221. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18222. *
  18223. * settings.ts
  18224. * Global Settings
  18225. */
  18226. /**
  18227. * Global Settings
  18228. */
  18229. class Settings {
  18230. /**
  18231. * Power preference (may impact performance x battery life)
  18232. */
  18233. static get powerPreference() {
  18234. return this._powerPreference;
  18235. }
  18236. /**
  18237. * Power preference (may impact performance x battery life)
  18238. * Note: this setting should be the very first thing you set
  18239. * (before the WebGL context is created by Speedy)
  18240. */
  18241. static set powerPreference(value) {
  18242. // validate
  18243. if (Session.count > 0)
  18244. throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
  18245. else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
  18246. throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
  18247. /*
  18248. // we won't use 'high-performance' for Speedy's GPU computations
  18249. // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
  18250. // also, it seems like low-power mode may break WebGL2 in some drivers?!
  18251. if(value == 'high-performance')
  18252. Speedy.Settings.powerPreference = 'default';
  18253. else
  18254. Speedy.Settings.powerPreference = value;
  18255. */
  18256. // change the GPU polling mode
  18257. if (value == 'high-performance')
  18258. (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
  18259. else
  18260. (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
  18261. // update the power preference
  18262. this._powerPreference = value;
  18263. // log
  18264. Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
  18265. }
  18266. }
  18267. Settings._powerPreference = 'default';
  18268. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image.ts
  18269. /*
  18270. * encantar.js
  18271. * GPU-accelerated Augmented Reality for the web
  18272. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18273. *
  18274. * This program is free software: you can redistribute it and/or modify
  18275. * it under the terms of the GNU Lesser General Public License as published
  18276. * by the Free Software Foundation, either version 3 of the License, or
  18277. * (at your option) any later version.
  18278. *
  18279. * This program is distributed in the hope that it will be useful,
  18280. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18281. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18282. * GNU Lesser General Public License for more details.
  18283. *
  18284. * You should have received a copy of the GNU Lesser General Public License
  18285. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18286. *
  18287. * reference-image.ts
  18288. * Reference Image for tracking
  18289. */
  18290. /**
  18291. * A ReferenceImage decorated with a SpeedyMedia
  18292. */
  18293. class ReferenceImageWithMedia {
  18294. /**
  18295. * Constructor
  18296. * @param referenceImage
  18297. * @param media
  18298. */
  18299. constructor(referenceImage, media) {
  18300. this._referenceImage = Object.assign({}, referenceImage);
  18301. this._media = media;
  18302. // generate a unique name if none is given
  18303. if (this._referenceImage.name === undefined)
  18304. this._referenceImage.name = this._generateUniqueName();
  18305. // store the aspect ratio
  18306. this._aspectRatio = media.width / media.height;
  18307. }
  18308. /**
  18309. * Getter of the name of the reference image
  18310. */
  18311. get name() {
  18312. return this._referenceImage.name;
  18313. }
  18314. /**
  18315. * Setter of the name of the reference image
  18316. */
  18317. set name(name) {
  18318. this._referenceImage.name = name;
  18319. }
  18320. /**
  18321. * Image data
  18322. */
  18323. get image() {
  18324. return this._referenceImage.image;
  18325. }
  18326. /**
  18327. * A SpeedyMedia corresponding to the reference media
  18328. */
  18329. get media() {
  18330. return this._media;
  18331. }
  18332. /**
  18333. * The aspect ratio of the reference image
  18334. */
  18335. get aspectRatio() {
  18336. return this._aspectRatio;
  18337. }
  18338. /**
  18339. * Generate a unique name for a reference image
  18340. * @returns a unique name
  18341. */
  18342. _generateUniqueName() {
  18343. return 'target-' + Math.random().toString(16).substr(2);
  18344. }
  18345. }
  18346. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
  18347. /*
  18348. * encantar.js
  18349. * GPU-accelerated Augmented Reality for the web
  18350. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18351. *
  18352. * This program is free software: you can redistribute it and/or modify
  18353. * it under the terms of the GNU Lesser General Public License as published
  18354. * by the Free Software Foundation, either version 3 of the License, or
  18355. * (at your option) any later version.
  18356. *
  18357. * This program is distributed in the hope that it will be useful,
  18358. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18359. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18360. * GNU Lesser General Public License for more details.
  18361. *
  18362. * You should have received a copy of the GNU Lesser General Public License
  18363. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18364. *
  18365. * reference-image-database.ts
  18366. * A collection of Reference Images
  18367. */
  18368. /** Default capacity of a Reference Image Database */
  18369. const DEFAULT_CAPACITY = 100; // this number should exceed normal usage
  18370. // XXX this number may be changed (is 100 too conservative?)
  18371. // further testing is needed to verify the appropriateness of this number;
  18372. // it depends on the images, on the keypoint descriptors, and even on the target devices
  18373. /**
  18374. * A collection of Reference Images
  18375. */
  18376. class ReferenceImageDatabase {
  18377. /**
  18378. * Constructor
  18379. */
  18380. constructor() {
  18381. this._capacity = DEFAULT_CAPACITY;
  18382. this._entries = new Map();
  18383. this._locked = false;
  18384. }
  18385. /**
  18386. * The number of reference images stored in this database
  18387. */
  18388. get count() {
  18389. return this._entries.size;
  18390. }
  18391. /**
  18392. * Maximum number of elements
  18393. */
  18394. get capacity() {
  18395. return this._capacity;
  18396. }
  18397. /**
  18398. * Maximum number of elements
  18399. * Increasing the capacity is considered experimental
  18400. */
  18401. set capacity(value) {
  18402. const capacity = Math.max(0, value | 0);
  18403. if (this.count > capacity)
  18404. throw new IllegalArgumentError(`Can't set the capacity of the database to ${capacity}: it currently stores ${this.count} entries`);
  18405. this._capacity = capacity;
  18406. }
  18407. /**
  18408. * Iterates over the collection
  18409. */
  18410. [Symbol.iterator]() {
  18411. return this._entries.values();
  18412. }
  18413. /**
  18414. * Add reference images to this database
  18415. * Add only the images you actually need to track!
  18416. * (each image take up storage space)
  18417. * @param referenceImages one or more reference images with unique names (a unique name will
  18418. * be generated automatically if you don't specify one)
  18419. * @returns a promise that resolves as soon as the images are loaded and added to this database
  18420. */
  18421. add(referenceImages) {
  18422. return this._preloadMany(referenceImages).then(referenceImagesWithMedia => {
  18423. referenceImagesWithMedia.forEach(referenceImageWithMedia => {
  18424. this._addOne(referenceImageWithMedia);
  18425. });
  18426. });
  18427. }
  18428. /**
  18429. * Add a single preloaded reference image to the database
  18430. * @param referenceImage
  18431. */
  18432. _addOne(referenceImage) {
  18433. const name = referenceImage.name;
  18434. // locked database?
  18435. if (this._locked)
  18436. throw new IllegalOperationError(`Can't add reference image "${name}" to the database: it's locked`);
  18437. // reached full capacity?
  18438. if (this.count >= this.capacity)
  18439. throw new IllegalOperationError(`Can't add reference image "${name}" to the database: the capacity of ${this.capacity} images has been exceeded.`);
  18440. // check if the image is valid
  18441. if (!(referenceImage.image instanceof HTMLImageElement) &&
  18442. !(referenceImage.image instanceof ImageBitmap) &&
  18443. !(referenceImage.image instanceof ImageData))
  18444. throw new IllegalArgumentError(`Can't add reference image "${name}" to the database: invalid image`);
  18445. // check for duplicate names
  18446. if (this._entries.has(name))
  18447. throw new IllegalArgumentError(`Can't add reference image "${name}" to the database: found duplicated name`);
  18448. // add the reference image to the database
  18449. Utils.log(`Adding reference image "${name}" to the database...`);
  18450. this._entries.set(name, referenceImage);
  18451. }
  18452. /**
  18453. * Lock the database, so that new reference images can no longer be added to it
  18454. * @internal
  18455. */
  18456. _lock() {
  18457. this._locked = true;
  18458. }
  18459. /**
  18460. * Get reference image by name
  18461. * @param name
  18462. * @returns the reference image with the given name, or null if there isn't any
  18463. * @internal
  18464. */
  18465. _find(name) {
  18466. return this._entries.get(name) || null;
  18467. }
  18468. /**
  18469. * Load a reference image
  18470. * @param referenceImage
  18471. * @returns a promise that resolves to a corresponding ReferenceImageWithMedia
  18472. */
  18473. _preloadOne(referenceImage) {
  18474. if (referenceImage.name !== undefined)
  18475. Utils.log(`Loading reference image \"${referenceImage.name}\"...`);
  18476. else
  18477. Utils.log(`Loading reference image...`);
  18478. if (!referenceImage.image)
  18479. return speedy_vision_default().Promise.reject(new IllegalArgumentError('The reference image was not provided!'));
  18480. return speedy_vision_default().load(referenceImage.image).then(media => {
  18481. return new ReferenceImageWithMedia(referenceImage, media);
  18482. });
  18483. }
  18484. /**
  18485. * Load multiple reference images
  18486. * @param referenceImages
  18487. * @returns a promise that resolves to corresponding ReferenceImageWithMedia objects
  18488. */
  18489. _preloadMany(referenceImages) {
  18490. const n = referenceImages.length;
  18491. Utils.log(`Loading ${n} reference image${n != 1 ? 's' : ''}...`);
  18492. const promises = referenceImages.map(referenceImage => this._preloadOne(referenceImage));
  18493. return speedy_vision_default().Promise.all(promises);
  18494. }
  18495. }
  18496. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
  18497. /*
  18498. * encantar.js
  18499. * GPU-accelerated Augmented Reality for the web
  18500. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18501. *
  18502. * This program is free software: you can redistribute it and/or modify
  18503. * it under the terms of the GNU Lesser General Public License as published
  18504. * by the Free Software Foundation, either version 3 of the License, or
  18505. * (at your option) any later version.
  18506. *
  18507. * This program is distributed in the hope that it will be useful,
  18508. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18509. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18510. * GNU Lesser General Public License for more details.
  18511. *
  18512. * You should have received a copy of the GNU Lesser General Public License
  18513. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18514. *
  18515. * state.ts
  18516. * Abstract state of the Image Tracker
  18517. */
  18518. /**
  18519. * Abstract state of the Image Tracker
  18520. */
  18521. class ImageTrackerState {
  18522. /**
  18523. * Constructor
  18524. * @param name
  18525. * @param imageTracker
  18526. */
  18527. constructor(name, imageTracker) {
  18528. this._name = name;
  18529. this._imageTracker = imageTracker;
  18530. this._pipeline = this._createPipeline();
  18531. this._pipelineReleased = false;
  18532. }
  18533. /**
  18534. * State name
  18535. */
  18536. get name() {
  18537. return this._name;
  18538. }
  18539. /**
  18540. * AR screen size
  18541. * It may change over time, as when flipping a phone
  18542. */
  18543. get screenSize() {
  18544. const screen = this._pipeline.node('screen');
  18545. if (!screen)
  18546. throw new IllegalOperationError();
  18547. // this is available once this state has run at least once
  18548. return screen.size;
  18549. }
  18550. /**
  18551. * Initialize the state
  18552. */
  18553. init() {
  18554. }
  18555. /**
  18556. * Release resources
  18557. */
  18558. release() {
  18559. if (!this._pipelineReleased) {
  18560. this._pipeline.release();
  18561. this._pipelineReleased = true;
  18562. }
  18563. return null;
  18564. }
  18565. /**
  18566. * Update the state
  18567. * @param media user media
  18568. * @param screenSize AR screen size for image processing
  18569. * @param state all states
  18570. * @returns promise
  18571. */
  18572. update(media, screenSize) {
  18573. const source = this._pipeline.node('source');
  18574. const screen = this._pipeline.node('screen');
  18575. // validate the pipeline
  18576. if (!source || !screen)
  18577. throw new IllegalOperationError();
  18578. // prepare the pipeline
  18579. source.media = media;
  18580. screen.size = screenSize;
  18581. // run the pipeline
  18582. return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
  18583. }
  18584. /**
  18585. * Called as soon as this becomes the active state, just before update() runs for the first time
  18586. * @param settings
  18587. */
  18588. onEnterState(settings) {
  18589. }
  18590. /**
  18591. * Called when leaving the state, after update()
  18592. */
  18593. onLeaveState() {
  18594. }
  18595. /**
  18596. * Called just before the GPU processing
  18597. * @returns promise
  18598. */
  18599. _beforeUpdate() {
  18600. return speedy_vision_default().Promise.resolve();
  18601. }
  18602. /**
  18603. * GPU processing
  18604. * @returns promise with the pipeline results
  18605. */
  18606. _gpuUpdate() {
  18607. return this._pipeline.run();
  18608. }
  18609. }
  18610. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
  18611. /*
  18612. * encantar.js
  18613. * GPU-accelerated Augmented Reality for the web
  18614. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18615. *
  18616. * This program is free software: you can redistribute it and/or modify
  18617. * it under the terms of the GNU Lesser General Public License as published
  18618. * by the Free Software Foundation, either version 3 of the License, or
  18619. * (at your option) any later version.
  18620. *
  18621. * This program is distributed in the hope that it will be useful,
  18622. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18623. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18624. * GNU Lesser General Public License for more details.
  18625. *
  18626. * You should have received a copy of the GNU Lesser General Public License
  18627. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18628. *
  18629. * initial.ts
  18630. * Initial state of the Image Tracker
  18631. */
  18632. /**
  18633. * The purpose of the initial state of the Image Tracker
  18634. * is to initialize the training state using the state machine
  18635. */
  18636. class ImageTrackerInitialState extends ImageTrackerState {
  18637. /**
  18638. * Constructor
  18639. * @param imageTracker
  18640. */
  18641. constructor(imageTracker) {
  18642. super('initial', imageTracker);
  18643. }
  18644. /**
  18645. * Called just before the GPU processing
  18646. * @returns promise
  18647. */
  18648. _beforeUpdate() {
  18649. const source = this._pipeline.node('source');
  18650. const media = source.media;
  18651. const mediaSize = media.size;
  18652. if (mediaSize.area() < this.screenSize.area())
  18653. Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
  18654. return speedy_vision_default().Promise.resolve();
  18655. }
  18656. /**
  18657. * Post processing that takes place just after the GPU processing
  18658. * @param result pipeline results
  18659. * @returns state output
  18660. */
  18661. _afterUpdate(result) {
  18662. return speedy_vision_default().Promise.resolve({
  18663. nextState: 'training',
  18664. trackerOutput: {},
  18665. });
  18666. }
  18667. /**
  18668. * Called when leaving the state, after update()
  18669. */
  18670. onLeaveState() {
  18671. // we don't return to this state, so we can release the pipeline early
  18672. this._pipeline.release();
  18673. this._pipelineReleased = true;
  18674. }
  18675. /**
  18676. * Create & setup the pipeline
  18677. * @returns pipeline
  18678. */
  18679. _createPipeline() {
  18680. // this pipeline does nothing useful,
  18681. // but it does preload some shaders...
  18682. const pipeline = speedy_vision_default().Pipeline();
  18683. const source = speedy_vision_default().Image.Source('source');
  18684. const screen = speedy_vision_default().Transform.Resize('screen');
  18685. const greyscale = speedy_vision_default().Filter.Greyscale();
  18686. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
  18687. const nightvision = speedy_vision_default().Filter.Nightvision();
  18688. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  18689. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  18690. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  18691. const blur = speedy_vision_default().Filter.GaussianBlur();
  18692. const clipper = speedy_vision_default().Keypoint.Clipper();
  18693. const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
  18694. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  18695. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  18696. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  18697. const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
  18698. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
  18699. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
  18700. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  18701. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
  18702. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  18703. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
  18704. source.media = null;
  18705. screen.size = speedy_vision_default().Size(0, 0);
  18706. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  18707. nightvision.quality = NIGHTVISION_QUALITY;
  18708. subpixel.method = SUBPIXEL_METHOD;
  18709. //borderClipper.imageSize = screen.size;
  18710. borderClipper.imageSize = speedy_vision_default().Size(100, 100);
  18711. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  18712. matcher.k = 1; //2;
  18713. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  18714. keypointPortalSource.source = keypointPortalSink;
  18715. muxOfReferenceKeypoints.port = 0;
  18716. muxOfBufferOfReferenceKeypoints.port = 0;
  18717. bufferOfReferenceKeypoints.frozen = false;
  18718. keypointSink.turbo = false;
  18719. // prepare input
  18720. source.output().connectTo(screen.input());
  18721. screen.output().connectTo(greyscale.input());
  18722. // preprocess images
  18723. greyscale.output().connectTo(imageRectifier.input());
  18724. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  18725. imageRectifier.output().connectTo(nightvision.input());
  18726. nightvision.output().connectTo(nightvisionMux.input('in1'));
  18727. nightvisionMux.output().connectTo(blur.input());
  18728. // keypoint detection & clipping
  18729. nightvisionMux.output().connectTo(detector.input());
  18730. detector.output().connectTo(borderClipper.input());
  18731. borderClipper.output().connectTo(clipper.input());
  18732. // keypoint refinement
  18733. imageRectifier.output().connectTo(denoiser.input());
  18734. denoiser.output().connectTo(subpixel.input('image'));
  18735. clipper.output().connectTo(subpixel.input('keypoints'));
  18736. // keypoint description
  18737. blur.output().connectTo(descriptor.input('image'));
  18738. subpixel.output().connectTo(descriptor.input('keypoints'));
  18739. // keypoint matching
  18740. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  18741. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  18742. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  18743. descriptor.output().connectTo(matcher.input('keypoints'));
  18744. // store reference keypoints
  18745. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  18746. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  18747. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  18748. // portals
  18749. descriptor.output().connectTo(keypointPortalSink.input());
  18750. // prepare output
  18751. descriptor.output().connectTo(keypointRectifier.input());
  18752. keypointRectifier.output().connectTo(keypointSink.input());
  18753. matcher.output().connectTo(keypointSink.input('matches'));
  18754. // done!
  18755. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  18756. /*
  18757. const run = pipeline.run.bind(pipeline);
  18758. pipeline.run = function() {
  18759. console.time("TIME");
  18760. return run().then(x => {
  18761. console.timeEnd("TIME");
  18762. return x;
  18763. });
  18764. };
  18765. */
  18766. return pipeline;
  18767. }
  18768. }
  18769. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-utils.ts
  18770. /*
  18771. * encantar.js
  18772. * GPU-accelerated Augmented Reality for the web
  18773. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18774. *
  18775. * This program is free software: you can redistribute it and/or modify
  18776. * it under the terms of the GNU Lesser General Public License as published
  18777. * by the Free Software Foundation, either version 3 of the License, or
  18778. * (at your option) any later version.
  18779. *
  18780. * This program is distributed in the hope that it will be useful,
  18781. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18782. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18783. * GNU Lesser General Public License for more details.
  18784. *
  18785. * You should have received a copy of the GNU Lesser General Public License
  18786. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18787. *
  18788. * image-tracker-utils.ts
  18789. * Image Tracker: Utilities
  18790. */
  18791. /**
  18792. * Utilities for the Image Tracker
  18793. */
  18794. class ImageTrackerUtils {
  18795. /**
  18796. * Find a transformation that converts a raster space to NIS
  18797. * @param size size of the raster space
  18798. * @returns a 3x3 matrix
  18799. */
  18800. static rasterToNIS(size) {
  18801. const sx = NIS_SIZE / size.width;
  18802. const sy = NIS_SIZE / size.height;
  18803. return speedy_vision_default().Matrix(3, 3, [
  18804. sx, 0, 0,
  18805. 0, sy, 0,
  18806. 0, 0, 1
  18807. ]);
  18808. }
  18809. /**
  18810. * Find a transformation that converts a raster space to NDC
  18811. * @param size size of the raster space
  18812. * @returns a 3x3 matrix
  18813. */
  18814. static rasterToNDC(size) {
  18815. const w = size.width, h = size.height;
  18816. return speedy_vision_default().Matrix(3, 3, [
  18817. 2 / w, 0, 0,
  18818. 0, -2 / h, 0,
  18819. -1, 1, 1
  18820. ]);
  18821. }
  18822. /**
  18823. * Find a transformation that converts NDC to a raster space
  18824. * @param size size of the raster space
  18825. * @returns a 3x3 matrix
  18826. */
  18827. static NDCToRaster(size) {
  18828. const w = size.width, h = size.height;
  18829. return speedy_vision_default().Matrix(3, 3, [
  18830. w / 2, 0, 0,
  18831. 0, -h / 2, 0,
  18832. w / 2, h / 2, 1
  18833. ]);
  18834. }
  18835. /**
  18836. * Find a transformation that scales points in NDC
  18837. * @param sx horizontal scale factor
  18838. * @param sy vertical scale factor
  18839. * @returns a 3x3 matrix
  18840. */
  18841. static scaleNDC(sx, sy = sx) {
  18842. // In NDC, the origin is at the center of the space!
  18843. return speedy_vision_default().Matrix(3, 3, [
  18844. sx, 0, 0,
  18845. 0, sy, 0,
  18846. 0, 0, 1
  18847. ]);
  18848. }
  18849. /**
  18850. * Find a scale transformation in NDC such that the output has a desired aspect ratio
  18851. * @param aspectRatio desired aspect ratio
  18852. * @param scale optional scale factor in both axes
  18853. * @returns a 3x3 matrix
  18854. */
  18855. static bestFitScaleNDC(aspectRatio, scale = 1) {
  18856. if (aspectRatio >= 1)
  18857. return this.scaleNDC(scale, scale / aspectRatio); // s/(s/a) = a, sx >= sy
  18858. else
  18859. return this.scaleNDC(scale * aspectRatio, scale); // (s*a)/s = a, sx < sy
  18860. }
  18861. /**
  18862. * Find the inverse matrix of bestFitScaleNDC()
  18863. * @param aspectRatio as given to bestFitScaleNDC()
  18864. * @param scale optional, as given to bestFitScaleNDC()
  18865. * @returns a 3x3 matrix
  18866. */
  18867. static inverseBestFitScaleNDC(aspectRatio, scale = 1) {
  18868. if (aspectRatio >= 1)
  18869. return this.scaleNDC(1 / scale, aspectRatio / scale);
  18870. else
  18871. return this.scaleNDC(1 / (scale * aspectRatio), 1 / scale);
  18872. }
  18873. /**
  18874. * Find the best-fit aspect ratio for the rectification of the reference image in NDC
  18875. * @param screenSize
  18876. * @param referenceImage
  18877. * @returns a best-fit aspect ratio
  18878. */
  18879. static bestFitAspectRatioNDC(screenSize, referenceImage) {
  18880. /*
  18881. The best-fit aspectRatio (a) is constructed as follows:
  18882. 1) a fully stretched(*) and distorted reference image in NDC:
  18883. a = 1
  18884. 2) a square in NDC:
  18885. a = 1 / screenAspectRatio
  18886. 3) an image with the aspect ratio of the reference image in NDC:
  18887. a = referenceImageAspectRatio * (1 / screenAspectRatio)
  18888. (*) AR screen space
  18889. By transforming the reference image twice, first by converting it to AR
  18890. screen space, and then by rectifying it, we lose a little bit of quality.
  18891. Nothing to be too concerned about, though?
  18892. */
  18893. const screenAspectRatio = screenSize.width / screenSize.height;
  18894. return referenceImage.aspectRatio / screenAspectRatio;
  18895. }
  18896. /**
  18897. * Given n > 0 pairs (src_i, dest_i) of keypoints in NIS,
  18898. * convert them to NDC and output a 2 x 2n matrix of the form:
  18899. * [ src_0.x src_1.x ... | dest_0.x dest_1.x ... ]
  18900. * [ src_0.y src_1.y ... | dest_0.y dest_1.y ... ]
  18901. * @param pairs pairs of keypoints in NIS
  18902. * @returns 2 x 2n matrix with two 2 x n blocks: [ src | dest ]
  18903. * @throws
  18904. */
  18905. static compilePairsOfKeypointsNDC(pairs) {
  18906. const n = pairs.length;
  18907. if (n == 0)
  18908. throw new IllegalArgumentError();
  18909. const scale = 2 / NIS_SIZE;
  18910. const data = new Array(2 * 2 * n);
  18911. for (let i = 0, j = 0, k = 2 * n; i < n; i++, j += 2, k += 2) {
  18912. const src = pairs[i][0];
  18913. const dest = pairs[i][1];
  18914. data[j] = src.x * scale - 1; // convert from NIS to NDC
  18915. data[j + 1] = 1 - src.y * scale; // flip y-axis
  18916. data[k] = dest.x * scale - 1;
  18917. data[k + 1] = 1 - dest.y * scale;
  18918. }
  18919. return speedy_vision_default().Matrix(2, 2 * n, data);
  18920. }
  18921. /**
  18922. * Given n > 0 pairs of keypoints in NDC as a 2 x 2n [ src | dest ] matrix,
  18923. * find a perspective warp (homography) from src to dest in NDC
  18924. * @param points compiled pairs of keypoints in NDC
  18925. * @param options to be passed to speedy-vision
  18926. * @returns a pair [ 3x3 transformation matrix, quality score ]
  18927. */
  18928. static findPerspectiveWarpNDC(points, options) {
  18929. // too few data points?
  18930. const n = points.columns / 2;
  18931. if (n < 4) {
  18932. return speedy_vision_default().Promise.reject(new IllegalArgumentError(`Too few data points to compute a perspective warp`));
  18933. }
  18934. // compute a homography
  18935. const src = points.block(0, 1, 0, n - 1);
  18936. const dest = points.block(0, 1, n, 2 * n - 1);
  18937. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  18938. return speedy_vision_default().Matrix.findHomography(speedy_vision_default().Matrix.Zeros(3), src, dest, Object.assign({ mask }, options)).then(homography => {
  18939. // check if this is a valid warp
  18940. const a00 = homography.at(0, 0);
  18941. if (Number.isNaN(a00))
  18942. throw new NumericalError(`Can't compute a perspective warp: bad keypoints`);
  18943. // count the number of inliers
  18944. const inliers = mask.read();
  18945. let inlierCount = 0;
  18946. for (let i = inliers.length - 1; i >= 0; i--)
  18947. inlierCount += inliers[i];
  18948. const score = inlierCount / inliers.length;
  18949. // done!
  18950. return [homography, score];
  18951. });
  18952. }
  18953. /**
  18954. * Given n > 0 pairs of keypoints in NDC as a 2 x 2n [ src | dest ] matrix,
  18955. * find an affine warp from src to dest in NDC. The affine warp is given as
  18956. * a 3x3 matrix whose last row is [0 0 1]
  18957. * @param points compiled pairs of keypoints in NDC
  18958. * @param options to be passed to speedy-vision
  18959. * @returns a pair [ 3x3 transformation matrix, quality score ]
  18960. */
  18961. static findAffineWarpNDC(points, options) {
  18962. // too few data points?
  18963. const n = points.columns / 2;
  18964. if (n < 3) {
  18965. return speedy_vision_default().Promise.reject(new IllegalArgumentError(`Too few data points to compute an affine warp`));
  18966. }
  18967. // compute an affine transformation
  18968. const model = speedy_vision_default().Matrix.Eye(3);
  18969. const src = points.block(0, 1, 0, n - 1);
  18970. const dest = points.block(0, 1, n, 2 * n - 1);
  18971. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  18972. return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), // 2x3 submatrix
  18973. src, dest, Object.assign({ mask }, options)).then(_ => {
  18974. // check if this is a valid warp
  18975. const a00 = model.at(0, 0);
  18976. if (Number.isNaN(a00))
  18977. throw new NumericalError(`Can't compute an affine warp: bad keypoints`);
  18978. // count the number of inliers
  18979. const inliers = mask.read();
  18980. let inlierCount = 0;
  18981. for (let i = inliers.length - 1; i >= 0; i--)
  18982. inlierCount += inliers[i];
  18983. const score = inlierCount / inliers.length;
  18984. // done!
  18985. return [model, score];
  18986. });
  18987. }
  18988. /**
  18989. * Find a polyline in Normalized Device Coordinates (NDC)
  18990. * @param homography maps the corners of NDC to a quadrilateral in NDC
  18991. * @returns 4 points in NDC
  18992. */
  18993. static findPolylineNDC(homography) {
  18994. const h = homography.read();
  18995. const uv = [-1, +1, -1, -1, +1, -1, +1, +1]; // the corners of a reference image in NDC
  18996. const polyline = new Array(4);
  18997. for (let i = 0, j = 0; i < 4; i++, j += 2) {
  18998. const u = uv[j], v = uv[j + 1];
  18999. const x = h[0] * u + h[3] * v + h[6];
  19000. const y = h[1] * u + h[4] * v + h[7];
  19001. const w = h[2] * u + h[5] * v + h[8];
  19002. polyline[i] = speedy_vision_default().Point2(x / w, y / w);
  19003. }
  19004. return polyline;
  19005. }
  19006. /**
  19007. * Find a better spatial distribution of the input matches
  19008. * @param pairs in the [src, dest] format
  19009. * @returns refined pairs of quality matches
  19010. */
  19011. static refineMatchingPairs(pairs) {
  19012. // collect all keypoints obtained in this frame
  19013. const m = pairs.length;
  19014. const destKeypoints = new Array(m);
  19015. for (let j = 0; j < m; j++)
  19016. destKeypoints[j] = pairs[j][1];
  19017. // find a better spatial distribution of the keypoints
  19018. const indices = this._distributeKeypoints(destKeypoints);
  19019. // assemble output
  19020. const n = indices.length; // number of refined matches
  19021. const result = new Array(n);
  19022. for (let i = 0; i < n; i++)
  19023. result[i] = pairs[indices[i]];
  19024. // done!
  19025. return result;
  19026. }
  19027. /**
  19028. * Spatially distribute keypoints over a grid
  19029. * @param keypoints keypoints to be distributed
  19030. * @returns a list of indices of keypoints[]
  19031. */
  19032. static _distributeKeypoints(keypoints) {
  19033. // create a grid
  19034. const gridCells = TRACK_GRID_GRANULARITY; // number of grid elements in each axis
  19035. const numberOfCells = gridCells * gridCells;
  19036. const n = keypoints.length;
  19037. // get the coordinates of the keypoints
  19038. const points = new Array(2 * n);
  19039. for (let i = 0, j = 0; i < n; i++, j += 2) {
  19040. points[j] = keypoints[i].x;
  19041. points[j + 1] = keypoints[i].y;
  19042. }
  19043. // normalize the coordinates to [0,1) x [0,1)
  19044. this._normalizePoints(points);
  19045. // distribute the keypoints over the grid
  19046. const grid = new Array(numberOfCells).fill(-1);
  19047. for (let i = 0, j = 0; i < n; i++, j += 2) {
  19048. // find the grid location of the i-th point
  19049. const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
  19050. const yg = Math.floor(points[j + 1] * gridCells);
  19051. // store the index of the i-th point in the grid
  19052. const k = yg * gridCells + xg;
  19053. if (grid[k] < 0)
  19054. grid[k] = i;
  19055. }
  19056. // retrieve points of the grid
  19057. let m = 0;
  19058. const indices = new Array(numberOfCells);
  19059. for (let g = 0; g < numberOfCells; g++) {
  19060. if (grid[g] >= 0)
  19061. indices[m++] = grid[g];
  19062. }
  19063. indices.length = m;
  19064. // done!
  19065. return indices;
  19066. }
  19067. /**
  19068. * Normalize points to [0,1)^2
  19069. * @param points 2 x n matrix of points in column-major format
  19070. * @returns points
  19071. */
  19072. static _normalizePoints(points) {
  19073. Utils.assert(points.length % 2 == 0);
  19074. const n = points.length / 2;
  19075. if (n == 0)
  19076. return points;
  19077. let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
  19078. let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
  19079. for (let i = 0, j = 0; i < n; i++, j += 2) {
  19080. const x = points[j], y = points[j + 1];
  19081. xmin = x < xmin ? x : xmin;
  19082. ymin = y < ymin ? y : ymin;
  19083. xmax = x > xmax ? x : xmax;
  19084. ymax = y > ymax ? y : ymax;
  19085. }
  19086. const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
  19087. const ylen = ymax - ymin + 1;
  19088. for (let i = 0, j = 0; i < n; i++, j += 2) {
  19089. points[j] = (points[j] - xmin) / xlen;
  19090. points[j + 1] = (points[j + 1] - ymin) / ylen;
  19091. }
  19092. return points;
  19093. }
  19094. }
  19095. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
  19096. /*
  19097. * encantar.js
  19098. * GPU-accelerated Augmented Reality for the web
  19099. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19100. *
  19101. * This program is free software: you can redistribute it and/or modify
  19102. * it under the terms of the GNU Lesser General Public License as published
  19103. * by the Free Software Foundation, either version 3 of the License, or
  19104. * (at your option) any later version.
  19105. *
  19106. * This program is distributed in the hope that it will be useful,
  19107. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19108. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19109. * GNU Lesser General Public License for more details.
  19110. *
  19111. * You should have received a copy of the GNU Lesser General Public License
  19112. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19113. *
  19114. * training.ts
  19115. * Training state of the Image Tracker
  19116. */
  19117. /**
  19118. * Training state of the Image Tracker
  19119. */
  19120. class ImageTrackerTrainingState extends ImageTrackerState {
  19121. /**
  19122. * Constructor
  19123. * @param imageTracker
  19124. */
  19125. constructor(imageTracker) {
  19126. super('training', imageTracker);
  19127. /** index of the image being used to train the tracker */
  19128. this._currentImageIndex = 0;
  19129. // initialize the training map
  19130. this._trainingMap = {
  19131. keypoints: [],
  19132. referenceImageIndex: [],
  19133. referenceImages: [],
  19134. };
  19135. }
  19136. /**
  19137. * Called as soon as this becomes the active state, just before update() runs for the first time
  19138. * @param settings
  19139. */
  19140. onEnterState(settings) {
  19141. const database = this._imageTracker.database;
  19142. // validate
  19143. if (database.count == 0)
  19144. throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
  19145. // prepare to train...
  19146. this._currentImageIndex = 0;
  19147. this._trainingMap.keypoints.length = 0;
  19148. this._trainingMap.referenceImageIndex.length = 0;
  19149. this._trainingMap.referenceImages.length = 0;
  19150. // lock the database
  19151. Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
  19152. database._lock();
  19153. // collect all images
  19154. for (const referenceImage of database)
  19155. this._trainingMap.referenceImages.push(referenceImage);
  19156. }
  19157. /**
  19158. * Called when leaving the state, after update()
  19159. */
  19160. onLeaveState() {
  19161. // we don't return to this state, so we can release the pipeline early
  19162. this._pipeline.release();
  19163. this._pipelineReleased = true;
  19164. }
  19165. /**
  19166. * Called just before the GPU processing
  19167. * @returns promise
  19168. */
  19169. _beforeUpdate() {
  19170. const source = this._pipeline.node('source');
  19171. const screen = this._pipeline.node('screen');
  19172. const keypointScaler = this._pipeline.node('keypointScaler');
  19173. // set the appropriate training media
  19174. const referenceImage = this._trainingMap.referenceImages[this._currentImageIndex];
  19175. source.media = referenceImage.media;
  19176. // compute the appropriate size of the training image space
  19177. const resolution = this._imageTracker.resolution;
  19178. const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
  19179. const aspectRatioOfTrainingImage = referenceImage.aspectRatio;
  19180. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  19181. screen.size.width = Math.round(screen.size.width * scale);
  19182. screen.size.height = Math.round(screen.size.height * scale);
  19183. // convert keypoints to NIS
  19184. keypointScaler.transform = ImageTrackerUtils.rasterToNIS(screen.size);
  19185. // log
  19186. Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
  19187. // done!
  19188. return speedy_vision_default().Promise.resolve();
  19189. }
  19190. /**
  19191. * Post processing that takes place just after the GPU processing
  19192. * @param result pipeline results
  19193. * @returns state output
  19194. */
  19195. _afterUpdate(result) {
  19196. const referenceImage = this._trainingMap.referenceImages[this._currentImageIndex];
  19197. const keypoints = result.keypoints;
  19198. const image = result.image;
  19199. // log
  19200. Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
  19201. // tracker output
  19202. const trackerOutput = {
  19203. keypointsNIS: image !== undefined ? keypoints : undefined,
  19204. image: image,
  19205. };
  19206. // set the training map, so that we can map all keypoints of the current image to the current image
  19207. for (let i = 0; i < keypoints.length; i++) {
  19208. this._trainingMap.keypoints.push(keypoints[i]);
  19209. this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
  19210. }
  19211. // the current image has been processed!
  19212. ++this._currentImageIndex;
  19213. // we're not done yet
  19214. if (this._currentImageIndex < this._trainingMap.referenceImages.length) {
  19215. return speedy_vision_default().Promise.resolve({
  19216. nextState: 'training',
  19217. trackerOutput: trackerOutput
  19218. });
  19219. }
  19220. // finished training!
  19221. return speedy_vision_default().Promise.resolve({
  19222. nextState: 'scanning',
  19223. trackerOutput: trackerOutput,
  19224. nextStateSettings: {
  19225. database: this._trainingMap.keypoints,
  19226. }
  19227. });
  19228. }
  19229. /**
  19230. * Create & setup the pipeline
  19231. * @returns pipeline
  19232. */
  19233. _createPipeline() {
  19234. const pipeline = speedy_vision_default().Pipeline();
  19235. const source = speedy_vision_default().Image.Source('source');
  19236. const screen = speedy_vision_default().Transform.Resize('screen');
  19237. const greyscale = speedy_vision_default().Filter.Greyscale();
  19238. const blur = speedy_vision_default().Filter.GaussianBlur();
  19239. const nightvision = speedy_vision_default().Filter.Nightvision();
  19240. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  19241. const pyramid = speedy_vision_default().Image.Pyramid();
  19242. const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
  19243. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19244. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19245. const blurredPyramid = speedy_vision_default().Image.Pyramid();
  19246. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19247. const clipper = speedy_vision_default().Keypoint.Clipper();
  19248. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  19249. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  19250. //const imageSink = Speedy.Image.Sink('image');
  19251. source.media = null;
  19252. screen.size = speedy_vision_default().Size(0, 0);
  19253. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19254. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19255. nightvision.gain = NIGHTVISION_GAIN;
  19256. nightvision.offset = NIGHTVISION_OFFSET;
  19257. nightvision.decay = NIGHTVISION_DECAY;
  19258. nightvision.quality = NIGHTVISION_QUALITY;
  19259. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19260. detector.levels = SCAN_PYRAMID_LEVELS;
  19261. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  19262. detector.threshold = SCAN_FAST_THRESHOLD;
  19263. detector.capacity = 8192;
  19264. subpixel.method = SUBPIXEL_METHOD;
  19265. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  19266. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  19267. clipper.size = TRAIN_MAX_KEYPOINTS;
  19268. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  19269. keypointSink.turbo = false;
  19270. // prepare input
  19271. source.output().connectTo(screen.input());
  19272. screen.output().connectTo(greyscale.input());
  19273. // preprocess image
  19274. greyscale.output().connectTo(nightvisionMux.input('in0'));
  19275. greyscale.output().connectTo(nightvision.input());
  19276. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19277. nightvisionMux.output().connectTo(pyramid.input());
  19278. // keypoint detection
  19279. pyramid.output().connectTo(detector.input());
  19280. detector.output().connectTo(clipper.input());
  19281. // keypoint refinement
  19282. greyscale.output().connectTo(denoiser.input()); // reduce noise
  19283. denoiser.output().connectTo(blurredPyramid.input());
  19284. clipper.output().connectTo(subpixel.input('keypoints'));
  19285. blurredPyramid.output().connectTo(subpixel.input('image'));
  19286. // keypoint description
  19287. greyscale.output().connectTo(blur.input());
  19288. blur.output().connectTo(descriptor.input('image'));
  19289. subpixel.output().connectTo(descriptor.input('keypoints'));
  19290. // prepare output
  19291. descriptor.output().connectTo(keypointScaler.input());
  19292. keypointScaler.output().connectTo(keypointSink.input());
  19293. //nightvisionMux.output().connectTo(imageSink.input());
  19294. // done!
  19295. pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink);
  19296. return pipeline;
  19297. }
  19298. /**
  19299. * Get the reference image associated with a keypoint index in the training map
  19300. * @param keypointIndex -1 if not found
  19301. * @returns reference image
  19302. */
  19303. referenceImageOfKeypoint(keypointIndex) {
  19304. const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
  19305. if (imageIndex < 0)
  19306. return null;
  19307. return this._trainingMap.referenceImages[imageIndex];
  19308. }
  19309. /**
  19310. * Get the reference image index associated with a keypoint index in the training map
  19311. * @param keypointIndex -1 if not found
  19312. * @returns reference image index, or -1 if not found
  19313. */
  19314. referenceImageIndexOfKeypoint(keypointIndex) {
  19315. const n = this._trainingMap.referenceImageIndex.length;
  19316. if (keypointIndex < 0 || keypointIndex >= n)
  19317. return -1;
  19318. const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
  19319. if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImages.length)
  19320. return -1;
  19321. return imageIndex;
  19322. }
  19323. /**
  19324. * Get a keypoint of the trained set
  19325. * @param keypointIndex -1 if not found
  19326. * @returns a keypoint
  19327. */
  19328. referenceKeypoint(keypointIndex) {
  19329. if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
  19330. return null;
  19331. return this._trainingMap.keypoints[keypointIndex];
  19332. }
  19333. }
  19334. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
  19335. /*
  19336. * encantar.js
  19337. * GPU-accelerated Augmented Reality for the web
  19338. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19339. *
  19340. * This program is free software: you can redistribute it and/or modify
  19341. * it under the terms of the GNU Lesser General Public License as published
  19342. * by the Free Software Foundation, either version 3 of the License, or
  19343. * (at your option) any later version.
  19344. *
  19345. * This program is distributed in the hope that it will be useful,
  19346. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19347. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19348. * GNU Lesser General Public License for more details.
  19349. *
  19350. * You should have received a copy of the GNU Lesser General Public License
  19351. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19352. *
  19353. * scanning.ts
  19354. * Scanning state of the Image Tracker
  19355. */
  19356. /** Port of the portal multiplexer: get new data from the camera */
  19357. const PORT_CAMERA = 0;
  19358. /** Port of the portal multiplexer: get previously memorized data */
  19359. const PORT_MEMORY = 1;
  19360. /**
  19361. * In the scanning state we look for a reference image in the video
  19362. */
  19363. class ImageTrackerScanningState extends ImageTrackerState {
  19364. /**
  19365. * Constructor
  19366. * @param imageTracker
  19367. */
  19368. constructor(imageTracker) {
  19369. super('scanning', imageTracker);
  19370. this._counter = 0;
  19371. this._bestScore = 0;
  19372. this._bestHomography = speedy_vision_default().Matrix.Eye(3);
  19373. }
  19374. /**
  19375. * Called as soon as this becomes the active state, just before update() runs for the first time
  19376. * @param settings
  19377. */
  19378. onEnterState(settings) {
  19379. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19380. const lshTables = this._pipeline.node('lshTables');
  19381. const database = settings.database;
  19382. // set attributes
  19383. this._counter = 0;
  19384. this._bestScore = 0;
  19385. // reset the image memorization circuit
  19386. imagePortalMux.port = PORT_CAMERA;
  19387. // prepare the keypoint matcher
  19388. if (database !== undefined)
  19389. lshTables.keypoints = database;
  19390. }
  19391. /**
  19392. * Called just before the GPU processing
  19393. * @returns promise
  19394. */
  19395. _beforeUpdate() {
  19396. const keypointScaler = this._pipeline.node('keypointScaler');
  19397. const screenSize = this.screenSize;
  19398. // convert keypoints to NIS
  19399. keypointScaler.transform = ImageTrackerUtils.rasterToNIS(screenSize);
  19400. // done!
  19401. return speedy_vision_default().Promise.resolve();
  19402. }
  19403. /**
  19404. * Post processing that takes place just after the GPU processing
  19405. * @param result pipeline results
  19406. * @returns state output
  19407. */
  19408. _afterUpdate(result) {
  19409. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19410. const keypoints = result.keypoints;
  19411. const image = result.image;
  19412. // tracker output
  19413. const trackerOutput = {
  19414. keypointsNIS: keypoints,
  19415. polylineNDC: [],
  19416. image: image,
  19417. };
  19418. // keep the last memorized image
  19419. imagePortalMux.port = PORT_MEMORY;
  19420. // find high quality matches
  19421. const matchedKeypoints = this._selectGoodMatches(keypoints);
  19422. if (matchedKeypoints.length < SCAN_MIN_MATCHES) {
  19423. // not enough high quality matches?
  19424. // we'll continue to scan the scene
  19425. this._counter = 0;
  19426. this._bestScore = 0;
  19427. return speedy_vision_default().Promise.resolve({
  19428. nextState: 'scanning',
  19429. trackerOutput: trackerOutput,
  19430. });
  19431. }
  19432. // we have enough high quality matches!
  19433. const pairs = this._findMatchingPairs(matchedKeypoints);
  19434. const points = ImageTrackerUtils.compilePairsOfKeypointsNDC(pairs);
  19435. // find a homography
  19436. return this._findHomographyNDC(points).then(([homography, score]) => {
  19437. // have we found the best homography so far?
  19438. if (score >= this._bestScore) {
  19439. // store it only if we'll be running the pipeline again
  19440. if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
  19441. this._bestScore = score;
  19442. this._bestHomography = homography;
  19443. // memorize the last image, corresponding to the best homography(*)
  19444. imagePortalMux.port = PORT_CAMERA;
  19445. /*
  19446. (*) technically speaking, this is not exactly the case. Since we're
  19447. using turbo to download the keypoints, there's a slight difference
  19448. between the data used to compute the homography and the last image.
  19449. Still, assuming continuity of the video stream, this logic is
  19450. good enough.
  19451. */
  19452. }
  19453. }
  19454. // find a polyline surrounding the target
  19455. const polylineNDC = ImageTrackerUtils.findPolylineNDC(homography);
  19456. trackerOutput.polylineNDC.push(...polylineNDC);
  19457. // continue a little longer in the scanning state
  19458. if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
  19459. return {
  19460. nextState: 'scanning',
  19461. trackerOutput: trackerOutput
  19462. };
  19463. }
  19464. // this image should correspond to the best homography
  19465. const snapshot = this._pipeline.node('imagePortalSink');
  19466. // the reference image that we'll track
  19467. const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
  19468. // this shouldn't happen
  19469. if (!referenceImage)
  19470. throw new DetectionError(`Can't track an unknown reference image`);
  19471. // let's track the target!
  19472. return {
  19473. nextState: 'pre-tracking-a',
  19474. nextStateSettings: {
  19475. homography: this._bestHomography,
  19476. snapshot: snapshot,
  19477. referenceImage: referenceImage,
  19478. },
  19479. trackerOutput: trackerOutput
  19480. };
  19481. })
  19482. .catch(err => {
  19483. // continue in the scanning state
  19484. Utils.warning(`Error when scanning: ${err.toString()}`);
  19485. return {
  19486. nextState: 'scanning',
  19487. trackerOutput: trackerOutput,
  19488. };
  19489. });
  19490. }
  19491. /**
  19492. * Select high quality matches of a single reference image
  19493. * @param keypoints matched keypoints of any quality, to any reference image
  19494. * @returns high quality matches of a single reference image
  19495. */
  19496. _selectGoodMatches(keypoints) {
  19497. const matchedKeypointsPerImageIndex = Object.create(null);
  19498. // find high quality matches, regardless of reference image
  19499. for (let j = keypoints.length - 1; j >= 0; j--) {
  19500. const keypoint = keypoints[j];
  19501. if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
  19502. const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
  19503. // the best match should be "much better" than the second best match,
  19504. // which means that they are "distinct enough"
  19505. if (d1 <= SCAN_MATCH_RATIO * d2) {
  19506. const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
  19507. //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
  19508. //if(idx1 == idx2 && idx1 >= 0) {
  19509. if (idx1 >= 0) {
  19510. if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
  19511. matchedKeypointsPerImageIndex[idx1] = [];
  19512. matchedKeypointsPerImageIndex[idx1].push(keypoint);
  19513. }
  19514. }
  19515. }
  19516. }
  19517. // find the reference image with the most high quality matches
  19518. let matchedKeypoints = [];
  19519. for (const imageIndex in matchedKeypointsPerImageIndex) {
  19520. if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
  19521. matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
  19522. }
  19523. // done!
  19524. return matchedKeypoints;
  19525. }
  19526. /**
  19527. * Find a homography matrix using matched keypoints in NDC
  19528. * @param points compiled pairs of keypoints in NDC
  19529. * @returns homography (from reference to matched, NDC) & "quality" score
  19530. */
  19531. _findHomographyNDC(points) {
  19532. return ImageTrackerUtils.findPerspectiveWarpNDC(points, {
  19533. method: 'pransac',
  19534. reprojectionError: SCAN_RANSAC_REPROJECTIONERROR_NDC,
  19535. numberOfHypotheses: 512,
  19536. bundleSize: 128,
  19537. });
  19538. }
  19539. /**
  19540. * Find matching pairs of keypoints from reference image (src) to matched image (dest)
  19541. * @param matchedKeypoints
  19542. * @returns an array of matching pairs [src, dest]
  19543. */
  19544. _findMatchingPairs(matchedKeypoints) {
  19545. const pairs = new Array(matchedKeypoints.length);
  19546. for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
  19547. const matchedKeypoint = matchedKeypoints[i];
  19548. const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
  19549. // this shouldn't happen
  19550. if (referenceKeypoint == null)
  19551. throw new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`);
  19552. pairs[i] = [referenceKeypoint, matchedKeypoint];
  19553. }
  19554. return pairs;
  19555. }
  19556. /**
  19557. * Create & setup the pipeline
  19558. * @returns pipeline
  19559. */
  19560. _createPipeline() {
  19561. const pipeline = speedy_vision_default().Pipeline();
  19562. const source = speedy_vision_default().Image.Source('source');
  19563. const screen = speedy_vision_default().Transform.Resize('screen');
  19564. const greyscale = speedy_vision_default().Filter.Greyscale();
  19565. const blur = speedy_vision_default().Filter.GaussianBlur();
  19566. const nightvision = speedy_vision_default().Filter.Nightvision();
  19567. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  19568. const pyramid = speedy_vision_default().Image.Pyramid();
  19569. const detector = speedy_vision_default().Keypoint.Detector.FAST();
  19570. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19571. const clipper = speedy_vision_default().Keypoint.Clipper();
  19572. const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
  19573. const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
  19574. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  19575. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  19576. const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
  19577. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  19578. const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
  19579. const imagePortalBuffer = speedy_vision_default().Image.Buffer();
  19580. const imagePortalCopy = speedy_vision_default().Transform.Resize();
  19581. //const imageSink = Speedy.Image.Sink('image');
  19582. source.media = null;
  19583. screen.size = speedy_vision_default().Size(0, 0);
  19584. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19585. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19586. nightvision.gain = NIGHTVISION_GAIN;
  19587. nightvision.offset = NIGHTVISION_OFFSET;
  19588. nightvision.decay = NIGHTVISION_DECAY;
  19589. nightvision.quality = NIGHTVISION_QUALITY;
  19590. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19591. detector.levels = SCAN_PYRAMID_LEVELS;
  19592. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  19593. detector.threshold = SCAN_FAST_THRESHOLD;
  19594. detector.capacity = 2048;
  19595. clipper.size = SCAN_MAX_KEYPOINTS;
  19596. lshTables.keypoints = [];
  19597. lshTables.numberOfTables = SCAN_LSH_TABLES;
  19598. lshTables.hashSize = SCAN_LSH_HASHSIZE;
  19599. knn.k = 2;
  19600. knn.quality = 'default';
  19601. //knn.quality = 'fastest';
  19602. imagePortalSource.source = imagePortalSink;
  19603. imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
  19604. imagePortalCopy.size = speedy_vision_default().Size(0, 0);
  19605. imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
  19606. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  19607. keypointSink.turbo = true;
  19608. // prepare input
  19609. source.output().connectTo(screen.input());
  19610. screen.output().connectTo(greyscale.input());
  19611. // preprocess image
  19612. greyscale.output().connectTo(blur.input());
  19613. greyscale.output().connectTo(nightvisionMux.input('in0'));
  19614. greyscale.output().connectTo(nightvision.input());
  19615. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19616. nightvisionMux.output().connectTo(pyramid.input());
  19617. // keypoint detection
  19618. pyramid.output().connectTo(detector.input());
  19619. detector.output().connectTo(clipper.input());
  19620. // keypoint description
  19621. blur.output().connectTo(descriptor.input('image'));
  19622. clipper.output().connectTo(descriptor.input('keypoints'));
  19623. // keypoint matching
  19624. descriptor.output().connectTo(knn.input('keypoints'));
  19625. lshTables.output().connectTo(knn.input('lsh'));
  19626. // prepare output
  19627. clipper.output().connectTo(keypointScaler.input());
  19628. keypointScaler.output().connectTo(keypointSink.input());
  19629. knn.output().connectTo(keypointSink.input('matches'));
  19630. //pyramid.output().connectTo(imageSink.input());
  19631. // memorize image
  19632. source.output().connectTo(imagePortalBuffer.input());
  19633. imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
  19634. imagePortalSource.output().connectTo(imagePortalCopy.input());
  19635. imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
  19636. imagePortalMux.output().connectTo(imagePortalSink.input());
  19637. // done!
  19638. pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointScaler, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
  19639. return pipeline;
  19640. }
  19641. }
  19642. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking-a.ts
  19643. /*
  19644. * encantar.js
  19645. * GPU-accelerated Augmented Reality for the web
  19646. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19647. *
  19648. * This program is free software: you can redistribute it and/or modify
  19649. * it under the terms of the GNU Lesser General Public License as published
  19650. * by the Free Software Foundation, either version 3 of the License, or
  19651. * (at your option) any later version.
  19652. *
  19653. * This program is distributed in the hope that it will be useful,
  19654. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19655. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19656. * GNU Lesser General Public License for more details.
  19657. *
  19658. * You should have received a copy of the GNU Lesser General Public License
  19659. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19660. *
  19661. * pre-tracking-a.ts
  19662. * Image tracker: Pre-Tracking A state
  19663. */
  19664. /**
  19665. * Pre-Tracking A is a new training phase. The reference image that was found
  19666. * in the scanning state is transported to AR screen space, and a new training
  19667. * takes place there, with new keypoints and in a suitable warp.
  19668. */
  19669. class ImageTrackerPreTrackingAState extends ImageTrackerState {
  19670. /**
  19671. * Constructor
  19672. * @param imageTracker
  19673. */
  19674. constructor(imageTracker) {
  19675. super('pre-tracking-a', imageTracker);
  19676. this._homography = speedy_vision_default().Matrix.Eye(3);
  19677. this._referenceImage = null;
  19678. this._snapshot = null;
  19679. }
  19680. /**
  19681. * Called as soon as this becomes the active state, just before update() runs for the first time
  19682. * @param settings
  19683. */
  19684. onEnterState(settings) {
  19685. const homography = settings.homography;
  19686. const referenceImage = settings.referenceImage;
  19687. const snapshot = settings.snapshot;
  19688. // set attributes
  19689. this._homography = homography;
  19690. this._referenceImage = referenceImage;
  19691. this._snapshot = snapshot;
  19692. }
  19693. /**
  19694. * Called just before the GPU processing
  19695. * @returns promise
  19696. */
  19697. _beforeUpdate() {
  19698. const screenSize = this.screenSize;
  19699. const source = this._pipeline.node('source');
  19700. const imageRectifier = this._pipeline.node('imageRectifier');
  19701. const keypointScaler = this._pipeline.node('keypointScaler');
  19702. const borderClipper = this._pipeline.node('borderClipper');
  19703. // set the reference image as the source image
  19704. source.media = this._referenceImage.media;
  19705. // clip keypoints from the borders of the target image
  19706. borderClipper.imageSize = screenSize;
  19707. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  19708. // convert keypoints to NIS
  19709. keypointScaler.transform = ImageTrackerUtils.rasterToNIS(screenSize);
  19710. // rectify the image
  19711. const scale = TRACK_RECTIFIED_SCALE;
  19712. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(screenSize, this._referenceImage);
  19713. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  19714. const toScreen = ImageTrackerUtils.NDCToRaster(screenSize);
  19715. const toNDC = ImageTrackerUtils.rasterToNDC(screenSize);
  19716. return imageRectifier.transform.setTo(toScreen.times(shrink).times(toNDC)).then(() => void 0);
  19717. }
  19718. /**
  19719. * Post processing that takes place just after the GPU processing
  19720. * @param result pipeline results
  19721. * @returns state output
  19722. */
  19723. _afterUpdate(result) {
  19724. const referenceImage = this._referenceImage;
  19725. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  19726. const keypoints = result.keypoints;
  19727. const image = result.image;
  19728. // tracker output
  19729. const trackerOutput = {
  19730. keypointsNIS: image !== undefined ? keypoints : undefined,
  19731. image: image,
  19732. };
  19733. // not enough keypoints? something went wrong!
  19734. if (keypoints.length < PRE_TRACK_MIN_MATCHES) {
  19735. Utils.warning(`Can't pre-track "${referenceImage.name}" in ${this.name}!`);
  19736. return speedy_vision_default().Promise.resolve({
  19737. nextState: 'scanning',
  19738. trackerOutput: trackerOutput,
  19739. });
  19740. }
  19741. // done!
  19742. return speedy_vision_default().Promise.resolve({
  19743. nextState: 'pre-tracking-b',
  19744. trackerOutput: trackerOutput,
  19745. nextStateSettings: {
  19746. referenceKeypointPortalSink: keypointPortalSink,
  19747. referenceImage: this._referenceImage,
  19748. snapshot: this._snapshot,
  19749. homography: this._homography,
  19750. }
  19751. });
  19752. }
  19753. /**
  19754. * Create & setup the pipeline
  19755. * @returns pipeline
  19756. */
  19757. _createPipeline() {
  19758. const pipeline = speedy_vision_default().Pipeline();
  19759. const source = speedy_vision_default().Image.Source('source');
  19760. const screen = speedy_vision_default().Transform.Resize('screen');
  19761. const greyscale = speedy_vision_default().Filter.Greyscale();
  19762. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  19763. const nightvision = speedy_vision_default().Filter.Nightvision();
  19764. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  19765. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  19766. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19767. const blur = speedy_vision_default().Filter.GaussianBlur();
  19768. const clipper = speedy_vision_default().Keypoint.Clipper();
  19769. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  19770. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19771. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19772. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  19773. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  19774. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  19775. //const imageSink = Speedy.Image.Sink('image');
  19776. source.media = null;
  19777. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19778. screen.size = speedy_vision_default().Size(0, 0);
  19779. nightvision.gain = NIGHTVISION_GAIN;
  19780. nightvision.offset = NIGHTVISION_OFFSET;
  19781. nightvision.decay = NIGHTVISION_DECAY;
  19782. nightvision.quality = NIGHTVISION_QUALITY;
  19783. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19784. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19785. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19786. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  19787. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  19788. detector.quality = TRACK_HARRIS_QUALITY;
  19789. detector.capacity = TRACK_DETECTOR_CAPACITY;
  19790. subpixel.method = SUBPIXEL_METHOD;
  19791. clipper.size = TRACK_MAX_KEYPOINTS;
  19792. borderClipper.imageSize = screen.size;
  19793. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  19794. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  19795. keypointSink.turbo = false;
  19796. // prepare input
  19797. source.output().connectTo(screen.input());
  19798. screen.output().connectTo(greyscale.input());
  19799. // preprocess images
  19800. greyscale.output().connectTo(imageRectifier.input());
  19801. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  19802. imageRectifier.output().connectTo(nightvision.input());
  19803. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19804. // keypoint detection & clipping
  19805. nightvisionMux.output().connectTo(detector.input());
  19806. detector.output().connectTo(borderClipper.input());
  19807. borderClipper.output().connectTo(clipper.input());
  19808. // keypoint refinement
  19809. imageRectifier.output().connectTo(denoiser.input());
  19810. denoiser.output().connectTo(subpixel.input('image'));
  19811. clipper.output().connectTo(subpixel.input('keypoints'));
  19812. // keypoint description
  19813. nightvisionMux.output().connectTo(blur.input());
  19814. blur.output().connectTo(descriptor.input('image'));
  19815. subpixel.output().connectTo(descriptor.input('keypoints'));
  19816. // prepare output
  19817. descriptor.output().connectTo(keypointScaler.input());
  19818. keypointScaler.output().connectTo(keypointSink.input());
  19819. keypointScaler.output().connectTo(keypointPortalSink.input());
  19820. //imageRectifier.output().connectTo(imageSink.input());
  19821. // done!
  19822. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, detector, borderClipper, clipper, denoiser, subpixel, blur, descriptor, keypointScaler, keypointSink, keypointPortalSink);
  19823. return pipeline;
  19824. }
  19825. }
  19826. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking-b.ts
  19827. /*
  19828. * encantar.js
  19829. * GPU-accelerated Augmented Reality for the web
  19830. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19831. *
  19832. * This program is free software: you can redistribute it and/or modify
  19833. * it under the terms of the GNU Lesser General Public License as published
  19834. * by the Free Software Foundation, either version 3 of the License, or
  19835. * (at your option) any later version.
  19836. *
  19837. * This program is distributed in the hope that it will be useful,
  19838. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19839. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19840. * GNU Lesser General Public License for more details.
  19841. *
  19842. * You should have received a copy of the GNU Lesser General Public License
  19843. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19844. *
  19845. * pre-tracking-b.ts
  19846. * Image tracker: Pre-Tracking B state
  19847. */
  19848. /**
  19849. * In Pre-Tracking B, we refine the homography obtained at the scanning state.
  19850. * We find a transformation that warps the snapshot obtained from the scanning
  19851. * state to an image that closely resembles the output of Pre-Tracking A.
  19852. */
  19853. class ImageTrackerPreTrackingBState extends ImageTrackerState {
  19854. /**
  19855. * Constructor
  19856. * @param imageTracker
  19857. */
  19858. constructor(imageTracker) {
  19859. super('pre-tracking-b', imageTracker);
  19860. this._homography = speedy_vision_default().Matrix.Eye(3);
  19861. this._referenceImage = null;
  19862. this._snapshot = null;
  19863. this._referenceKeypointPortalSink = null;
  19864. }
  19865. /**
  19866. * Called as soon as this becomes the active state, just before update() runs for the first time
  19867. * @param settings
  19868. */
  19869. onEnterState(settings) {
  19870. const homography = settings.homography;
  19871. const referenceImage = settings.referenceImage;
  19872. const snapshot = settings.snapshot;
  19873. const referenceKeypointPortalSink = settings.referenceKeypointPortalSink;
  19874. // set attributes
  19875. this._homography = homography;
  19876. this._referenceImage = referenceImage;
  19877. this._snapshot = snapshot;
  19878. this._referenceKeypointPortalSink = referenceKeypointPortalSink;
  19879. }
  19880. /**
  19881. * Called just before the GPU processing
  19882. * @returns promise
  19883. */
  19884. _beforeUpdate() {
  19885. const screenSize = this.screenSize;
  19886. const imageRectifier = this._pipeline.node('imageRectifier');
  19887. const keypointScaler = this._pipeline.node('keypointScaler');
  19888. const borderClipper = this._pipeline.node('borderClipper');
  19889. const imagePortalSource = this._pipeline.node('imagePortalSource');
  19890. const referenceKeypointPortalSource = this._pipeline.node('referenceKeypointPortalSource');
  19891. // get the snapshot from the scanning state
  19892. imagePortalSource.source = this._snapshot;
  19893. // get the reference keypoints from Pre-Tracking A
  19894. referenceKeypointPortalSource.source = this._referenceKeypointPortalSink;
  19895. // clip keypoints from the borders of the target image
  19896. borderClipper.imageSize = screenSize;
  19897. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  19898. // convert keypoints to NIS
  19899. keypointScaler.transform = ImageTrackerUtils.rasterToNIS(screenSize);
  19900. // rectify the image
  19901. const scale = TRACK_RECTIFIED_SCALE;
  19902. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(screenSize, this._referenceImage);
  19903. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  19904. const undistort = this._homography.inverse();
  19905. const toScreen = ImageTrackerUtils.NDCToRaster(screenSize);
  19906. const toNDC = ImageTrackerUtils.rasterToNDC(screenSize);
  19907. return imageRectifier.transform.setTo(toScreen.times(shrink.times(undistort)).times(toNDC)).then(() => void 0);
  19908. }
  19909. /**
  19910. * Post processing that takes place just after the GPU processing
  19911. * @param result pipeline results
  19912. * @returns state output
  19913. */
  19914. _afterUpdate(result) {
  19915. const referenceImage = this._referenceImage;
  19916. const referenceKeypoints = result.referenceKeypoints; // from Pre-Tracking A
  19917. const keypoints = result.keypoints; // from Pre-Tracking B
  19918. const image = result.image;
  19919. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  19920. // tracker output
  19921. const trackerOutput = {
  19922. keypointsNIS: image !== undefined ? keypoints : undefined,
  19923. image: image,
  19924. };
  19925. return speedy_vision_default().Promise.resolve()
  19926. .then(() => {
  19927. // find matching pairs of keypoints
  19928. const pairs = this._findMatchingPairs(referenceKeypoints, keypoints);
  19929. //const pairs = ImageTrackerUtils.refineMatchingPairs(allPairs);
  19930. if (pairs.length < PRE_TRACK_MIN_MATCHES)
  19931. throw new TrackingError('Not enough data points');
  19932. // find a warp
  19933. const points = ImageTrackerUtils.compilePairsOfKeypointsNDC(pairs);
  19934. return this._findAffineMotionNDC(points);
  19935. })
  19936. .then(warp => {
  19937. // refine the homography
  19938. return this._homography.setTo(warp.times(this._homography));
  19939. })
  19940. .then(_ => ({
  19941. nextState: 'tracking',
  19942. //nextState: 'pre-tracking-b',
  19943. trackerOutput: trackerOutput,
  19944. nextStateSettings: {
  19945. // we export keypoints obtained in Pre-Tracking B, not in A.
  19946. // lighting conditions match, but what if the snapshot is too blurry?
  19947. templateKeypoints: keypoints,
  19948. templateKeypointPortalSink: keypointPortalSink,
  19949. referenceImage: this._referenceImage,
  19950. homography: this._homography,
  19951. initialScreenSize: this.screenSize,
  19952. }
  19953. }))
  19954. .catch(err => {
  19955. Utils.warning(`Can't pre-track "${referenceImage.name}" in ${this.name}! ${err.toString()}`);
  19956. return {
  19957. nextState: 'scanning',
  19958. trackerOutput: trackerOutput,
  19959. };
  19960. });
  19961. }
  19962. /**
  19963. * Find an affine motion model in NDC between pairs of keypoints in NDC
  19964. * given as a 2 x 2n [ src | dest ] matrix
  19965. * @param points compiled pairs of keypoints in NDC
  19966. * @returns a promise that resolves to a 3x3 warp in NDC that maps source to destination
  19967. */
  19968. _findAffineMotionNDC(points) {
  19969. return ImageTrackerUtils.findAffineWarpNDC(points, {
  19970. method: 'pransac',
  19971. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR_NDC,
  19972. numberOfHypotheses: 512 * 4,
  19973. bundleSize: 128,
  19974. mask: undefined // score is not needed
  19975. }).then(([warp, score]) => {
  19976. const scale = TRACK_RECTIFIED_SCALE;
  19977. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(this.screenSize, this._referenceImage);
  19978. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  19979. const grow = ImageTrackerUtils.inverseBestFitScaleNDC(aspectRatio, scale);
  19980. const scaledWarp = grow.times(warp).times(shrink);
  19981. const distort = this._homography;
  19982. const undistort = distort.inverse();
  19983. const correctedWarp = distort.times(scaledWarp).times(undistort);
  19984. //console.log(Speedy.Matrix(warp).toString());
  19985. //console.log(Speedy.Matrix(scaledWarp).toString());
  19986. //console.log(Speedy.Matrix(correctedWarp).toString());
  19987. return correctedWarp;
  19988. });
  19989. }
  19990. /**
  19991. * Find matching pairs of two sets of keypoints matched via brute force
  19992. * @param srcKeypoints source (database)
  19993. * @param destKeypoints destination
  19994. * @returns an array of matching pairs [src, dest]
  19995. */
  19996. _findMatchingPairs(srcKeypoints, destKeypoints) {
  19997. const pairs = [];
  19998. for (let i = 0; i < destKeypoints.length; i++) {
  19999. const destKeypoint = destKeypoints[i];
  20000. if (destKeypoint.matches[0].index >= 0 && destKeypoint.matches[1].index >= 0) {
  20001. const d1 = destKeypoint.matches[0].distance;
  20002. const d2 = destKeypoint.matches[1].distance;
  20003. // the best match should be "much better" than the second best match,
  20004. // which means that they are "distinct enough"
  20005. if (d1 <= TRACK_MATCH_RATIO * d2) {
  20006. const srcKeypoint = srcKeypoints[destKeypoint.matches[0].index];
  20007. pairs.push([srcKeypoint, destKeypoint]);
  20008. }
  20009. }
  20010. }
  20011. return pairs;
  20012. }
  20013. /**
  20014. * Create & setup the pipeline
  20015. * @returns pipeline
  20016. */
  20017. _createPipeline() {
  20018. const pipeline = speedy_vision_default().Pipeline();
  20019. const source = speedy_vision_default().Image.Source('source');
  20020. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  20021. const referenceKeypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('referenceKeypointPortalSource');
  20022. const screen = speedy_vision_default().Transform.Resize('screen');
  20023. const greyscale = speedy_vision_default().Filter.Greyscale();
  20024. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  20025. const nightvision = speedy_vision_default().Filter.Nightvision();
  20026. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  20027. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  20028. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20029. const blur = speedy_vision_default().Filter.GaussianBlur();
  20030. const clipper = speedy_vision_default().Keypoint.Clipper();
  20031. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  20032. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  20033. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  20034. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  20035. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  20036. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  20037. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  20038. const referenceKeypointSink = speedy_vision_default().Keypoint.Sink('referenceKeypoints');
  20039. //const imageSink = Speedy.Image.Sink('image');
  20040. source.media = null;
  20041. imagePortalSource.source = null;
  20042. referenceKeypointPortalSource.source = null;
  20043. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20044. screen.size = speedy_vision_default().Size(0, 0);
  20045. nightvision.gain = NIGHTVISION_GAIN;
  20046. nightvision.offset = NIGHTVISION_OFFSET;
  20047. nightvision.decay = NIGHTVISION_DECAY;
  20048. nightvision.quality = NIGHTVISION_QUALITY;
  20049. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  20050. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  20051. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  20052. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  20053. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  20054. detector.quality = TRACK_HARRIS_QUALITY;
  20055. detector.capacity = TRACK_DETECTOR_CAPACITY;
  20056. subpixel.method = SUBPIXEL_METHOD;
  20057. clipper.size = TRACK_MAX_KEYPOINTS;
  20058. borderClipper.imageSize = screen.size;
  20059. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  20060. matcher.k = 2;
  20061. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  20062. keypointSink.turbo = false;
  20063. // prepare input
  20064. //source.output(); // ignore, but keep it in the pipeline
  20065. imagePortalSource.output().connectTo(screen.input());
  20066. screen.output().connectTo(greyscale.input());
  20067. // preprocess images
  20068. greyscale.output().connectTo(imageRectifier.input());
  20069. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  20070. imageRectifier.output().connectTo(nightvision.input());
  20071. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20072. // keypoint detection & clipping
  20073. nightvisionMux.output().connectTo(detector.input());
  20074. detector.output().connectTo(borderClipper.input());
  20075. borderClipper.output().connectTo(clipper.input());
  20076. // keypoint refinement
  20077. imageRectifier.output().connectTo(denoiser.input());
  20078. denoiser.output().connectTo(subpixel.input('image'));
  20079. clipper.output().connectTo(subpixel.input('keypoints'));
  20080. // keypoint description
  20081. nightvisionMux.output().connectTo(blur.input());
  20082. blur.output().connectTo(descriptor.input('image'));
  20083. subpixel.output().connectTo(descriptor.input('keypoints'));
  20084. // keypoint matching
  20085. descriptor.output().connectTo(matcher.input('keypoints'));
  20086. referenceKeypointPortalSource.output().connectTo(matcher.input('database'));
  20087. // prepare output
  20088. descriptor.output().connectTo(keypointScaler.input());
  20089. keypointScaler.output().connectTo(keypointPortalSink.input());
  20090. keypointScaler.output().connectTo(keypointSink.input());
  20091. matcher.output().connectTo(keypointSink.input('matches'));
  20092. referenceKeypointPortalSource.output().connectTo(referenceKeypointSink.input());
  20093. //imageRectifier.output().connectTo(imageSink.input());
  20094. // done!
  20095. pipeline.init(source, screen, imagePortalSource, referenceKeypointPortalSource, greyscale, imageRectifier, nightvision, nightvisionMux, detector, borderClipper, clipper, denoiser, subpixel, blur, descriptor, matcher, keypointScaler, keypointSink, keypointPortalSink, referenceKeypointSink);
  20096. return pipeline;
  20097. }
  20098. }
  20099. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
  20100. /*
  20101. * encantar.js
  20102. * GPU-accelerated Augmented Reality for the web
  20103. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20104. *
  20105. * This program is free software: you can redistribute it and/or modify
  20106. * it under the terms of the GNU Lesser General Public License as published
  20107. * by the Free Software Foundation, either version 3 of the License, or
  20108. * (at your option) any later version.
  20109. *
  20110. * This program is distributed in the hope that it will be useful,
  20111. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20112. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20113. * GNU Lesser General Public License for more details.
  20114. *
  20115. * You should have received a copy of the GNU Lesser General Public License
  20116. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20117. *
  20118. * image-tracker-event.ts
  20119. * Events emitted by an Image Tracker
  20120. */
  20121. /**
  20122. * An event emitted by an Image Tracker
  20123. */
  20124. class ImageTrackerEvent extends AREvent {
  20125. /**
  20126. * Constructor
  20127. * @param type event type
  20128. * @param referenceImage optional reference image
  20129. */
  20130. constructor(type, referenceImage) {
  20131. super(type);
  20132. this._referenceImage = referenceImage;
  20133. }
  20134. /**
  20135. * Reference image
  20136. */
  20137. get referenceImage() {
  20138. return this._referenceImage;
  20139. }
  20140. }
  20141. ;// CONCATENATED MODULE: ./src/geometry/quaternion.ts
  20142. /*
  20143. * encantar.js
  20144. * GPU-accelerated Augmented Reality for the web
  20145. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20146. *
  20147. * This program is free software: you can redistribute it and/or modify
  20148. * it under the terms of the GNU Lesser General Public License as published
  20149. * by the Free Software Foundation, either version 3 of the License, or
  20150. * (at your option) any later version.
  20151. *
  20152. * This program is distributed in the hope that it will be useful,
  20153. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20154. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20155. * GNU Lesser General Public License for more details.
  20156. *
  20157. * You should have received a copy of the GNU Lesser General Public License
  20158. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20159. *
  20160. * quaternion.ts
  20161. * Quaternions
  20162. */
  20163. /** Small number */
  20164. const EPSILON = 1e-6;
  20165. // public / non-internal methods do not change the contents of the quaternion
  20166. /**
  20167. * Quaternion q = x i + y j + z k + w
  20168. */
  20169. class Quaternion {
  20170. /**
  20171. * Constructor
  20172. * @param x x coordinate (imaginary)
  20173. * @param y y coordinate (imaginary)
  20174. * @param z z coordinate (imaginary)
  20175. * @param w w coordinate (real)
  20176. */
  20177. constructor(x = 0, y = 0, z = 0, w = 1) {
  20178. this._x = +x;
  20179. this._y = +y;
  20180. this._z = +z;
  20181. this._w = +w;
  20182. }
  20183. /**
  20184. * Instantiate an identity quaternion q = 1
  20185. * @returns a new identity quaternion
  20186. */
  20187. static Identity() {
  20188. return new Quaternion(0, 0, 0, 1);
  20189. }
  20190. /**
  20191. * The x coordinate of the quaternion (imaginary)
  20192. */
  20193. get x() {
  20194. return this._x;
  20195. }
  20196. /**
  20197. * The y coordinate of the quaternion (imaginary)
  20198. */
  20199. get y() {
  20200. return this._y;
  20201. }
  20202. /**
  20203. * The z coordinate of the quaternion (imaginary)
  20204. */
  20205. get z() {
  20206. return this._z;
  20207. }
  20208. /**
  20209. * The w coordinate of the quaternion (real)
  20210. */
  20211. get w() {
  20212. return this._w;
  20213. }
  20214. /**
  20215. * The length of this quaternion
  20216. * @returns sqrt(x^2 + y^2 + z^2 + w^2)
  20217. */
  20218. length() {
  20219. const x = this._x;
  20220. const y = this._y;
  20221. const z = this._z;
  20222. const w = this._w;
  20223. return Math.sqrt(x * x + y * y + z * z + w * w);
  20224. }
  20225. /**
  20226. * Check if this and q have the same coordinates
  20227. * @param q a quaternion
  20228. * @returns true if this and q have the same coordinates
  20229. */
  20230. equals(q) {
  20231. return this._w === q._w && this._x === q._x && this._y === q._y && this._z === q._z;
  20232. }
  20233. /**
  20234. * Convert to string
  20235. * @returns a string
  20236. */
  20237. toString() {
  20238. const x = this._x.toFixed(4);
  20239. const y = this._y.toFixed(4);
  20240. const z = this._z.toFixed(4);
  20241. const w = this._w.toFixed(4);
  20242. return `Quaternion(${x},${y},${z},${w})`;
  20243. }
  20244. /**
  20245. * Normalize this quaternion
  20246. * @returns this quaternion, normalized
  20247. * @internal
  20248. */
  20249. _normalize() {
  20250. const length = this.length();
  20251. if (length < EPSILON) // zero?
  20252. return this;
  20253. this._x /= length;
  20254. this._y /= length;
  20255. this._z /= length;
  20256. this._w /= length;
  20257. return this;
  20258. }
  20259. /**
  20260. * Conjugate this quaternion
  20261. * @returns this quaternion, conjugated
  20262. * @internal
  20263. */
  20264. _conjugate() {
  20265. this._x = -this._x;
  20266. this._y = -this._y;
  20267. this._z = -this._z;
  20268. return this;
  20269. }
  20270. /**
  20271. * Set the coordinates of this quaternion
  20272. * @param x x-coordinate
  20273. * @param y y-coordinate
  20274. * @param z z-coordinate
  20275. * @param w w-coordinate
  20276. * @returns this quaternion
  20277. * @internal
  20278. */
  20279. _set(x, y, z, w) {
  20280. this._x = +x;
  20281. this._y = +y;
  20282. this._z = +z;
  20283. this._w = +w;
  20284. return this;
  20285. }
  20286. /**
  20287. * Copy q to this
  20288. * @param q a quaternion
  20289. * @returns this quaternion
  20290. * @internal
  20291. */
  20292. _copyFrom(q) {
  20293. this._x = q._x;
  20294. this._y = q._y;
  20295. this._z = q._z;
  20296. this._w = q._w;
  20297. return this;
  20298. }
  20299. /**
  20300. * Convert a quaternion to a 3x3 rotation matrix
  20301. * @returns a 3x3 rotation matrix
  20302. * @internal
  20303. */
  20304. _toRotationMatrix() {
  20305. const length = this.length(); // should be ~ 1
  20306. // sanity check
  20307. if (length < EPSILON)
  20308. return speedy_vision_default().Matrix.Eye(3);
  20309. // let q = (x,y,z,w) be a unit quaternion
  20310. const x = this._x / length;
  20311. const y = this._y / length;
  20312. const z = this._z / length;
  20313. const w = this._w / length;
  20314. /*
  20315. Let q = x i + y j + z k + w be a unit quaternion and
  20316. p = x_p i + y_p j + z_p k be a purely imaginary quaternion (w_p = 0)
  20317. representing a vector or point P = (x_p, y_p, z_p) in 3D space.
  20318. Let's rewrite q as q = v + w, where v = x i + y j + z k, and then
  20319. substitute v by the unit vector u = v / |v|, so that q = |v| u + w.
  20320. Since q is a unit quaternion, it follows that:
  20321. 1 = |q|^2 = x^2 + y^2 + z^2 + w^2 = |v|^2 + w^2.
  20322. Given that cos(t~)^2 + sin(t~)^2 = 1 for all real t~, there is a real t
  20323. such that cos(t) = w and sin(t) = |v|. Let's rewrite q as:
  20324. q = cos(t) + u sin(t)
  20325. (since 0 <= |v| = sin(t), it follows that 0 <= t <= pi)
  20326. A rotation of P, of 2t radians around axis u, can be computed as:
  20327. r_q(p) = q p q*
  20328. where q* is the conjugate of q. (note: since |q| = 1, q q* = q* q = 1)
  20329. ---
  20330. Let h = x_h i + y_h j + z_h k + w_h be a quaternion. The multplication
  20331. q h can be performed by pre-multiplying h, written as a column vector,
  20332. by the following L_q matrix:
  20333. [ w -z y x ] [ x_h ]
  20334. q h = L_q h = [ z w -x y ] [ y_h ]
  20335. [ -y x w z ] [ z_h ]
  20336. [ -x -y -z w ] [ w_h ]
  20337. (expand q h = (x i + y j + z k + w) (x_h i + y_h j + z_h k + w_h) to see)
  20338. Similarly, the product h q* can be expressed by pre-multiplying h by the
  20339. following R_q* matrix:
  20340. [ w -z y -x ] [ x_h ]
  20341. h q* = R_q* h = [ z w -x -y ] [ y_h ]
  20342. [ -y x w -z ] [ z_h ]
  20343. [ x y z w ] [ w_h ]
  20344. (expand h q* = (x_h i + y_h j + z_h k + w_h) (-x i - y j - z k + w) to see)
  20345. ---
  20346. Although quaternion multiplication is not commutative, it is associative,
  20347. i.e., r_q(p) = (q p)q* = q(p q*). From the matrix equations above, it
  20348. follows that r_q(p) can be expressed as R_q* L_q p = L_q R_q* p. If we
  20349. define M_q = L_q R_q* = R_q* L_q, we can write r_q(p) = M_q p. Matrix M_q
  20350. has the following form:
  20351. [ w^2 + x^2 - y^2 - z^2 2xy - 2wz 2xz + 2wy 0 ]
  20352. M_q = [ 2xy + 2wz w^2 - x^2 + y^2 - z^2 2yz - 2wx 0 ]
  20353. [ 2xz - 2wy 2yz + 2wx w^2 - x^2 - y^2 + z^2 0 ]
  20354. [ 0 0 0 1 ]
  20355. Note: the bottom-right entry is x^2 + y^2 + z^2 + w^2 = |q|^2 = 1.
  20356. Let M be the top-left 3x3 submatrix of M_q. A direct, but boring,
  20357. computation shows that M'M = M M' = I, where M' is the transpose of M.
  20358. In addition, det M = |q|^6 = +1. Therefore, M is a 3x3 rotation matrix.
  20359. */
  20360. const x2 = x * x, y2 = y * y, z2 = z * z; //, w2 = w*w;
  20361. const xy = 2 * x * y, xz = 2 * x * z, yz = 2 * y * z;
  20362. const wx = 2 * w * x, wy = 2 * w * y, wz = 2 * w * z;
  20363. return speedy_vision_default().Matrix(3, 3, [
  20364. 1 - 2 * (y2 + z2), xy + wz, xz - wy,
  20365. xy - wz, 1 - 2 * (x2 + z2), yz + wx,
  20366. xz + wy, yz - wx, 1 - 2 * (x2 + y2)
  20367. ]);
  20368. }
  20369. /**
  20370. * Convert a 3x3 rotation matrix to a unit quaternion
  20371. * @param m a 3x3 rotation matrix. You should ensure that it is a rotation matrix
  20372. * @returns this quaternion
  20373. * @internal
  20374. */
  20375. _fromRotationMatrix(m) {
  20376. if (m.rows != 3 || m.columns != 3)
  20377. throw new IllegalArgumentError();
  20378. /*
  20379. Let M be the rotation matrix defined above. We're going to find a
  20380. unit quaternion q associated with M.
  20381. Before we begin, note that q and (-q) encode the same rotation, for
  20382. r_(-q)(p) = (-q)p(-q)* = (-1)q p (-1)q* = (-1)(-1)q p q* = q p q* = r_q(p).
  20383. Quaternion multiplication is commutative when a factor is a scalar, i.e.,
  20384. d p = p d for a real d and a quaternion p (check: distributive operation).
  20385. The trace of M, denoted as tr M, is 3w^2 - x^2 - y^2 - z^2. Since |q| = 1,
  20386. it follows that tr M = 3w^2 - (1 - w^2), which means that 4w^2 = 1 + tr M.
  20387. That being the case, we can write:
  20388. |w| = sqrt(1 + tr M) / 2
  20389. We'll arbitrarily pick w >= 0, for q and (-q) encode the same rotation.
  20390. Let mij denote the element at the i-th row and at the j-th column of M.
  20391. A direct verification shows that m21 - m12 = 4wz. Since w >= 0, it follows
  20392. that sign(z) = sign(m21 - m12). Similarly, sign(y) = sign(m13 - m31) and
  20393. sign(x) = sign(m32 - m23).
  20394. The quantity m11 + m22 is equal to 2w^2 - 2z^2, which means that 4z^2 =
  20395. 4w^2 - 2(m11 + m22) = (1 + tr M) - 2(m11 + m22). Therefore, let's write:
  20396. |z| = sqrt((1 + tr M) - 2(m11 + m22)) / 2
  20397. Of course, z = |z| sign(z). Similarly,
  20398. |y| = sqrt((1 + tr M) - 2(m11 + m33)) / 2
  20399. |x| = sqrt((1 + tr M) - 2(m22 + m33)) / 2
  20400. This gives (x, y, z, w).
  20401. ---
  20402. We quickly verify that (1 + tr M) - 2(m11 + m22) >= 0 if M is (really)
  20403. a rotation matrix: (1 + tr M) - 2(m11 + m22) = 1 + tr M - 2(tr M - m33) =
  20404. 1 - tr M + 2 m33 = 1 - (3w^2 - x^2 - y^2 - z^2) + 2(w^2 - x^2 - y^2 + z^2) =
  20405. 1 - w^2 + z^2 = (x^2 + y^2 + z^2 + w^2) - w^2 + z^2 = x^2 + y^2 + 2 z^2 >= 0.
  20406. Similarly, (1 + tr M) - 2(m11 + m33) >= 0 and (1 + tr M) - 2(m22 + m33) >= 0.
  20407. */
  20408. const data = m.read();
  20409. const m11 = data[0], m21 = data[1], m31 = data[2], m12 = data[3], m22 = data[4], m32 = data[5], m13 = data[6], m23 = data[7], m33 = data[8];
  20410. const tr = 1 + m11 + m22 + m33; // 1 + tr M
  20411. const sx = +(m32 >= m23) - +(m32 < m23); // sign(x)
  20412. const sy = +(m13 >= m31) - +(m13 < m31); // sign(y)
  20413. const sz = +(m21 >= m12) - +(m21 < m12); // sign(z)
  20414. const w = 0.5 * Math.sqrt(Math.max(0, tr)); // |w| = w
  20415. const x = 0.5 * Math.sqrt(Math.max(0, tr - 2 * (m22 + m33))); // |x|
  20416. const y = 0.5 * Math.sqrt(Math.max(0, tr - 2 * (m11 + m33))); // |y|
  20417. const z = 0.5 * Math.sqrt(Math.max(0, tr - 2 * (m11 + m22))); // |z|
  20418. const length = Math.sqrt(x * x + y * y + z * z + w * w); // should be ~ 1
  20419. this._x = (x * sx) / length;
  20420. this._y = (y * sy) / length;
  20421. this._z = (z * sz) / length;
  20422. this._w = w / length;
  20423. return this;
  20424. }
  20425. /**
  20426. * Clone this quaternion
  20427. * @returns a clone of this quaternion
  20428. * @internal
  20429. */
  20430. _clone() {
  20431. return new Quaternion(this._x, this._y, this._z, this._w);
  20432. }
  20433. }
  20434. ;// CONCATENATED MODULE: ./src/geometry/vector3.ts
  20435. /*
  20436. * encantar.js
  20437. * GPU-accelerated Augmented Reality for the web
  20438. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20439. *
  20440. * This program is free software: you can redistribute it and/or modify
  20441. * it under the terms of the GNU Lesser General Public License as published
  20442. * by the Free Software Foundation, either version 3 of the License, or
  20443. * (at your option) any later version.
  20444. *
  20445. * This program is distributed in the hope that it will be useful,
  20446. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20447. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20448. * GNU Lesser General Public License for more details.
  20449. *
  20450. * You should have received a copy of the GNU Lesser General Public License
  20451. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20452. *
  20453. * vector3.ts
  20454. * 3D vectors
  20455. */
  20456. /** Small number */
  20457. const vector3_EPSILON = 1e-6;
  20458. /** Immutable zero vector */
  20459. let ZERO = null;
  20460. // public / non-internal methods do not change the contents of the vector
  20461. /**
  20462. * A vector in 3D space
  20463. */
  20464. class Vector3 {
  20465. /**
  20466. * Constructor
  20467. */
  20468. constructor(x = 0, y = 0, z = 0) {
  20469. this._x = +x;
  20470. this._y = +y;
  20471. this._z = +z;
  20472. }
  20473. /**
  20474. * Instantiate a zero vector
  20475. * @returns a new zero vector
  20476. */
  20477. static Zero() {
  20478. return new Vector3(0, 0, 0);
  20479. }
  20480. /**
  20481. * Immutable zero vector
  20482. * @returns an immutable zero vector
  20483. */
  20484. static get ZERO() {
  20485. return ZERO || (ZERO = Object.freeze(Vector3.Zero()));
  20486. }
  20487. /**
  20488. * The x coordinate of the vector
  20489. */
  20490. get x() {
  20491. return this._x;
  20492. }
  20493. /**
  20494. * The y coordinate of the vector
  20495. */
  20496. get y() {
  20497. return this._y;
  20498. }
  20499. /**
  20500. * The z coordinate of the vector
  20501. */
  20502. get z() {
  20503. return this._z;
  20504. }
  20505. /**
  20506. * The length of this vector
  20507. * @returns sqrt(x^2 + y^2 + z^2)
  20508. */
  20509. length() {
  20510. const x = this._x;
  20511. const y = this._y;
  20512. const z = this._z;
  20513. return Math.sqrt(x * x + y * y + z * z);
  20514. }
  20515. /**
  20516. * Compute the dot product of this and v
  20517. * @param v a vector
  20518. * @returns the dot product of the vectors
  20519. */
  20520. dot(v) {
  20521. return this._x * v._x + this._y * v._y + this._z * v._z;
  20522. }
  20523. /**
  20524. * Compute the distance between points this and v
  20525. * @param v a vector / point
  20526. * @returns the distance between the points
  20527. */
  20528. distanceTo(v) {
  20529. const dx = this._x - v._x;
  20530. const dy = this._y - v._y;
  20531. const dz = this._z - v._z;
  20532. return Math.sqrt(dx * dx + dy * dy + dz * dz);
  20533. }
  20534. /**
  20535. * Compute the direction from this to v
  20536. * @param v a vector
  20537. * @returns a new unit vector pointing to v from this
  20538. */
  20539. directionTo(v) {
  20540. return v._clone()._subtract(this)._normalize();
  20541. }
  20542. /**
  20543. * The cross product of this and v
  20544. * @param v a vector
  20545. * @returns the cross product this x v
  20546. */
  20547. cross(v) {
  20548. const x = this._y * v._z - this._z * v._y;
  20549. const y = this._z * v._x - this._x * v._z;
  20550. const z = this._x * v._y - this._y * v._x;
  20551. return new Vector3(x, y, z);
  20552. }
  20553. /**
  20554. * Check if this and v have the same coordinates
  20555. * @param v a vector
  20556. * @returns true if this and v have the same coordinates
  20557. */
  20558. equals(v) {
  20559. return this._x === v._x && this._y === v._y && this._z === v._z;
  20560. }
  20561. /**
  20562. * Convert to string
  20563. * @returns a string
  20564. */
  20565. toString() {
  20566. const x = this._x.toFixed(5);
  20567. const y = this._y.toFixed(5);
  20568. const z = this._z.toFixed(5);
  20569. return `Vector3(${x},${y},${z})`;
  20570. }
  20571. /**
  20572. * Set the coordinates of this vector
  20573. * @param x x-coordinate
  20574. * @param y y-coordinate
  20575. * @param z z-coordinate
  20576. * @returns this vector
  20577. * @internal
  20578. */
  20579. _set(x, y, z) {
  20580. this._x = +x;
  20581. this._y = +y;
  20582. this._z = +z;
  20583. return this;
  20584. }
  20585. /**
  20586. * Copy v to this
  20587. * @param v a vector
  20588. * @returns this vector
  20589. * @internal
  20590. */
  20591. _copyFrom(v) {
  20592. this._x = v._x;
  20593. this._y = v._y;
  20594. this._z = v._z;
  20595. return this;
  20596. }
  20597. /**
  20598. * Normalize this vector
  20599. * @returns this vector, normalized
  20600. * @internal
  20601. */
  20602. _normalize() {
  20603. const length = this.length();
  20604. if (length < vector3_EPSILON) // zero?
  20605. return this;
  20606. this._x /= length;
  20607. this._y /= length;
  20608. this._z /= length;
  20609. return this;
  20610. }
  20611. /**
  20612. * Add v to this vector
  20613. * @param v a vector
  20614. * @returns this vector
  20615. * @internal
  20616. */
  20617. _add(v) {
  20618. this._x += v._x;
  20619. this._y += v._y;
  20620. this._z += v._z;
  20621. return this;
  20622. }
  20623. /**
  20624. * Subtract v from this vector
  20625. * @param v a vector
  20626. * @returns this vector
  20627. * @internal
  20628. */
  20629. _subtract(v) {
  20630. this._x -= v._x;
  20631. this._y -= v._y;
  20632. this._z -= v._z;
  20633. return this;
  20634. }
  20635. /**
  20636. * Scale this vector by a scalar
  20637. * @param s scalar
  20638. * @returns this vector
  20639. * @internal
  20640. */
  20641. _scale(s) {
  20642. this._x *= s;
  20643. this._y *= s;
  20644. this._z *= s;
  20645. return this;
  20646. }
  20647. /**
  20648. * Compute the rotation q p q* in place, where q is a unit quaternion,
  20649. * q* is its conjugate and multiplicative inverse, and p is this vector
  20650. * @param q unit quaternion
  20651. * @returns this vector
  20652. * @internal
  20653. */
  20654. _applyRotationQuaternion(q) {
  20655. // based on Quaternion._toRotationMatrix()
  20656. const x = q.x, y = q.y, z = q.z, w = q.w;
  20657. const vx = this._x, vy = this._y, vz = this._z;
  20658. const x2 = x * x, y2 = y * y, z2 = z * z;
  20659. const xy = 2 * x * y, xz = 2 * x * z, yz = 2 * y * z;
  20660. const wx = 2 * w * x, wy = 2 * w * y, wz = 2 * w * z;
  20661. this._x = (1 - 2 * (y2 + z2)) * vx + (xy - wz) * vy + (xz + wy) * vz;
  20662. this._y = (xy + wz) * vx + (1 - 2 * (x2 + z2)) * vy + (yz - wx) * vz;
  20663. this._z = (xz - wy) * vx + (yz + wx) * vy + (1 - 2 * (x2 + y2)) * vz;
  20664. return this;
  20665. }
  20666. /**
  20667. * Clone this vector
  20668. * @returns a clone of this vector
  20669. * @internal
  20670. */
  20671. _clone() {
  20672. return new Vector3(this._x, this._y, this._z);
  20673. }
  20674. }
  20675. ;// CONCATENATED MODULE: ./src/geometry/pose-filter.ts
  20676. /*
  20677. * encantar.js
  20678. * GPU-accelerated Augmented Reality for the web
  20679. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20680. *
  20681. * This program is free software: you can redistribute it and/or modify
  20682. * it under the terms of the GNU Lesser General Public License as published
  20683. * by the Free Software Foundation, either version 3 of the License, or
  20684. * (at your option) any later version.
  20685. *
  20686. * This program is distributed in the hope that it will be useful,
  20687. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20688. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20689. * GNU Lesser General Public License for more details.
  20690. *
  20691. * You should have received a copy of the GNU Lesser General Public License
  20692. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20693. *
  20694. * pose-filter.ts
  20695. * Smoothing filter for a pose
  20696. */
  20697. /** Number of translation samples */
  20698. const TRANSLATION_SAMPLES = 5;
  20699. /** Number of rotation samples */
  20700. const ROTATION_SAMPLES = 12;
  20701. /** A vector representing "no translation" */
  20702. const NO_TRANSLATION = Vector3.Zero();
  20703. /** A quaternion representing "no rotation" */
  20704. const NO_ROTATION = Quaternion.Identity();
  20705. /** The zero quaternion */
  20706. const ZERO_QUATERNION = new Quaternion(0, 0, 0, 0);
  20707. /**
  20708. * Smoothing filter for a pose
  20709. */
  20710. class PoseFilter {
  20711. /**
  20712. * Constructor
  20713. */
  20714. constructor() {
  20715. this._smoothRotation = Quaternion.Identity();
  20716. this._smoothTranslation = Vector3.Zero();
  20717. this._rotationSample = Array.from({ length: ROTATION_SAMPLES }, () => Quaternion.Identity());
  20718. this._translationSample = Array.from({ length: TRANSLATION_SAMPLES }, () => Vector3.Zero());
  20719. this._isEmpty = true;
  20720. }
  20721. /**
  20722. * Reset the filter
  20723. */
  20724. reset() {
  20725. this._rotationSample.forEach(q => q._copyFrom(NO_ROTATION));
  20726. this._translationSample.forEach(t => t._copyFrom(NO_TRANSLATION));
  20727. this._isEmpty = true;
  20728. }
  20729. /**
  20730. * Feed the filter with a sample
  20731. * @param sample 3x4 [ R | t ] matrix
  20732. * @returns true on success
  20733. */
  20734. feed(sample) {
  20735. const data = sample.read();
  20736. // sanity check
  20737. if (sample.rows != 3 || sample.columns != 4)
  20738. throw new IllegalArgumentError();
  20739. // discard invalid samples
  20740. if (Number.isNaN(data[0] * data[9])) // rotation, translation
  20741. return false;
  20742. // store sample
  20743. const q = this._rotationSample[ROTATION_SAMPLES - 1];
  20744. for (let i = ROTATION_SAMPLES - 1; i > 0; i--)
  20745. this._rotationSample[i] = this._rotationSample[i - 1];
  20746. this._rotationSample[0] = q._fromRotationMatrix(sample.block(0, 2, 0, 2));
  20747. const t = this._translationSample[TRANSLATION_SAMPLES - 1];
  20748. for (let i = TRANSLATION_SAMPLES - 1; i > 0; i--)
  20749. this._translationSample[i] = this._translationSample[i - 1];
  20750. this._translationSample[0] = t._set(data[9], data[10], data[11]);
  20751. // empty buffers?
  20752. if (this._isEmpty) {
  20753. this._rotationSample.forEach((q, i) => i > 0 && q._copyFrom(this._rotationSample[0]));
  20754. this._translationSample.forEach((t, i) => i > 0 && t._copyFrom(this._translationSample[0]));
  20755. this._isEmpty = false;
  20756. }
  20757. // done!
  20758. return true;
  20759. }
  20760. /**
  20761. * Run the filter
  20762. * @returns a 3x4 [ R | t ] matrix
  20763. */
  20764. output() {
  20765. // how many samples should we use, at most?
  20766. const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
  20767. const T = Math.ceil(TRANSLATION_SAMPLES / div);
  20768. const R = Math.ceil(ROTATION_SAMPLES / div);
  20769. // clear the output of the filter
  20770. const t = this._smoothTranslation._copyFrom(NO_TRANSLATION);
  20771. const q = this._smoothRotation._copyFrom(ZERO_QUATERNION);
  20772. // average translations
  20773. for (let i = 0, d = 2 / (T * T + T); i < T; i++) {
  20774. const ti = this._translationSample[i];
  20775. const w = (T - i) * d;
  20776. // weighted avg: sum from i=0 to T-1 { (T-i) * t[i] } * (2/(T^2+T))
  20777. t._set(t.x + ti.x * w, t.y + ti.y * w, t.z + ti.z * w);
  20778. }
  20779. // average *nearby* rotations
  20780. // based on https://web.archive.org/web/20130514122622/http://wiki.unity3d.com/index.php/Averaging_Quaternions_and_Vectors
  20781. // reminder: a unit quaternion q may be expressed as
  20782. // cos t + u sin t, where 2t is a rotation angle and u is a rotation axis
  20783. for (let i = 0; i < R; i++) {
  20784. const qi = this._rotationSample[i];
  20785. const w = 1 / R; //(R - (i - i%2)) / R;
  20786. // since unit quaternions qi and -qi encode the same rotation
  20787. // (see quaternion.ts), let's enforce dot(qi, 1) = qi.w >= 0
  20788. if (qi.w < 0) {
  20789. // XXX since Quaternion._fromRotationMatrix() computes w >= 0,
  20790. // this will never happen. Leave this here for extra safety
  20791. // in case anything changes?
  20792. qi._set(-qi.x, -qi.y, -qi.z, -qi.w);
  20793. }
  20794. q._set(q.x + qi.x * w, q.y + qi.y * w, q.z + qi.z * w, q.w + qi.w * w);
  20795. }
  20796. //q._normalize();
  20797. // convert to matrix form and return
  20798. const entries = q._toRotationMatrix().read();
  20799. entries.push(t.x, t.y, t.z);
  20800. return speedy_vision_default().Matrix(3, 4, entries);
  20801. }
  20802. }
  20803. ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
  20804. /*
  20805. * encantar.js
  20806. * GPU-accelerated Augmented Reality for the web
  20807. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20808. *
  20809. * This program is free software: you can redistribute it and/or modify
  20810. * it under the terms of the GNU Lesser General Public License as published
  20811. * by the Free Software Foundation, either version 3 of the License, or
  20812. * (at your option) any later version.
  20813. *
  20814. * This program is distributed in the hope that it will be useful,
  20815. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20816. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20817. * GNU Lesser General Public License for more details.
  20818. *
  20819. * You should have received a copy of the GNU Lesser General Public License
  20820. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20821. *
  20822. * camera-model.ts
  20823. * Camera model
  20824. */
  20825. /** A guess of the horizontal field-of-view of a typical camera, in degrees */
  20826. const HFOV_GUESS = 60; // https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html
  20827. /** The default scale of the image plane. The scale affects the focal length */
  20828. const DEFAULT_SCALE = 2; // the length of the [-1,+1] interval
  20829. /** Convert degrees to radians */
  20830. const DEG2RAD = 0.017453292519943295; // pi / 180
  20831. /** Convert radians to degrees */
  20832. const RAD2DEG = 57.29577951308232; // 180 / pi
  20833. /** Numerical tolerance */
  20834. const camera_model_EPSILON = 1e-6;
  20835. /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
  20836. const FX = 0;
  20837. /** Index of the vertical focal length in the camera intrinsics matrix */
  20838. const FY = 4;
  20839. /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
  20840. const U0 = 6;
  20841. /** Index of the vertical position of the principal point in the camera intrinsics matrix */
  20842. const V0 = 7;
  20843. /** Number of iterations used to refine the estimated pose */
  20844. const POSE_REFINEMENT_ITERATIONS = 30;
  20845. /** Maximum number of iterations used when refining the translation vector */
  20846. const TRANSLATION_REFINEMENT_ITERATIONS = 15;
  20847. /** Tolerance used to exit early when refining the translation vector */
  20848. const TRANSLATION_REFINEMENT_TOLERANCE = DEFAULT_SCALE * 0.01;
  20849. /** Size of the grid used to refine the translation vector */
  20850. const TRANSLATION_REFINEMENT_GRIDSIZE = 5; //3;
  20851. /**
  20852. * Camera model
  20853. */
  20854. class CameraModel {
  20855. /**
  20856. * Constructor
  20857. */
  20858. constructor() {
  20859. this._imageSize = speedy_vision_default().Size(0, 0);
  20860. this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
  20861. this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // 3x3 identity matrix
  20862. this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // 3x4 matrix [ R | t ] = [ I | 0 ] no rotation & no translation
  20863. this._filter = new PoseFilter();
  20864. this._flipZ = speedy_vision_default().Matrix(4, 4, [
  20865. 1, 0, 0, 0,
  20866. 0, 1, 0, 0,
  20867. 0, 0, -1, 0,
  20868. 0, 0, 0, 1
  20869. ]);
  20870. }
  20871. /**
  20872. * Initialize the model
  20873. * @param aspectRatio aspect ratio of the image plane
  20874. * @param scale optional scale factor of the image plane
  20875. */
  20876. init(aspectRatio, scale = DEFAULT_SCALE) {
  20877. // log
  20878. Utils.log(`Initializing the camera model...`);
  20879. Utils.assert(aspectRatio > 0 && scale > 1e-5);
  20880. // set the size of the image plane
  20881. // this rule is conceived so that min(w,h) = s and w/h = a
  20882. if (aspectRatio >= 1) {
  20883. this._imageSize.width = aspectRatio * scale;
  20884. this._imageSize.height = scale;
  20885. }
  20886. else {
  20887. this._imageSize.width = scale;
  20888. this._imageSize.height = scale / aspectRatio;
  20889. }
  20890. // reset the model
  20891. this.reset();
  20892. }
  20893. /**
  20894. * Release the model
  20895. */
  20896. release() {
  20897. this.reset();
  20898. return null;
  20899. }
  20900. /**
  20901. * Update the camera model
  20902. * @param homographyNDC 3x3 perspective transform
  20903. * @returns a promise that resolves to a camera matrix
  20904. */
  20905. update(homographyNDC) {
  20906. Utils.assert(homographyNDC.rows == 3 && homographyNDC.columns == 3);
  20907. // convert to image space
  20908. const homography = this._convertToImageSpace(homographyNDC);
  20909. // read the entries of the homography
  20910. const h = homography.read();
  20911. const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
  20912. // validate the homography (homography matrices aren't singular)
  20913. const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
  20914. if (Math.abs(det) < camera_model_EPSILON || Number.isNaN(det))
  20915. return speedy_vision_default().Promise.reject(new NumericalError(`Can't update the camera model using an invalid homography matrix`));
  20916. // estimate the pose
  20917. const pose = this._estimatePose(homography);
  20918. if (this._filter.feed(pose))
  20919. this._extrinsics = this._filter.output().read();
  20920. // compute the camera matrix
  20921. const Z = this._flipZ; // switch to a right handed system
  20922. const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
  20923. const E = speedy_vision_default().Matrix(3, 4, this._extrinsics);
  20924. this._matrix.setToSync(K.times(E).times(Z));
  20925. /*
  20926. // test
  20927. console.log("homography ------------", homography.toString());
  20928. console.log("intrinsics ------------", K.toString());
  20929. console.log("extrinsics ------------", E.toString());
  20930. console.log("extrinsicsINV ---------", Speedy.Matrix(this.computeViewMatrix().inverse()).toString());
  20931. console.log("matrix ----------------", this._matrix.toString());
  20932. console.log("projectionMatrix ----- ", this.computeProjectionMatrix(0.1,100).toString());
  20933. */
  20934. // done!
  20935. return speedy_vision_default().Promise.resolve(this._matrix);
  20936. }
  20937. /**
  20938. * Reset the camera model
  20939. */
  20940. reset() {
  20941. this._resetIntrinsics();
  20942. this._resetExtrinsics();
  20943. }
  20944. /**
  20945. * The 3x4 camera matrix
  20946. */
  20947. get matrix() {
  20948. return this._matrix;
  20949. }
  20950. /**
  20951. * The size of the image plane
  20952. */
  20953. get imageSize() {
  20954. return this._imageSize;
  20955. }
  20956. /**
  20957. * The aspect ratio of the image
  20958. */
  20959. get aspectRatio() {
  20960. return this._imageSize.width / this._imageSize.height;
  20961. }
  20962. /**
  20963. * Focal length in "pixels" (projection distance in the pinhole camera model)
  20964. * same as (focal length in mm) * (number of "pixels" per world unit in "pixels"/mm)
  20965. * "pixels" means image plane units
  20966. */
  20967. get focalLength() {
  20968. return this._intrinsics[FX]; // fx == fy
  20969. }
  20970. /**
  20971. * Horizontal field-of-view, given in radians
  20972. */
  20973. get fovx() {
  20974. const halfWidth = this._imageSize.width / 2;
  20975. return 2 * Math.atan(halfWidth / this._intrinsics[FX]);
  20976. }
  20977. /**
  20978. * Vertical field-of-view, given in radians
  20979. */
  20980. get fovy() {
  20981. const halfHeight = this._imageSize.height / 2;
  20982. return 2 * Math.atan(halfHeight / this._intrinsics[FY]);
  20983. }
  20984. /**
  20985. * Compute the view matrix. This 4x4 matrix moves 3D points from
  20986. * world space to view space. We want the camera looking in the
  20987. * direction of the negative z-axis (WebGL-friendly)
  20988. * @returns a view matrix
  20989. */
  20990. computeViewMatrix() {
  20991. const E = this._extrinsics;
  20992. // We augment the 3x4 extrinsics matrix E with the [ 0 0 0 1 ] row
  20993. // and get E+. Let Z be 4x4 flipZ, the identity matrix with the third
  20994. // column negated. The following matrix is View = Z * E+ * Z. We get
  20995. // the camera looking in the direction of the negative z-axis in a
  20996. // right handed system!
  20997. return speedy_vision_default().Matrix(4, 4, [
  20998. E[0], E[1], -E[2], 0,
  20999. E[3], E[4], -E[5], 0,
  21000. -E[6], -E[7], +E[8], 0,
  21001. E[9], E[10], -E[11], 1 // t
  21002. ]);
  21003. }
  21004. /**
  21005. * Compute a perspective projection matrix for WebGL
  21006. * @param near distance of the near plane
  21007. * @param far distance of the far plane
  21008. */
  21009. computeProjectionMatrix(near, far) {
  21010. const fx = this._intrinsics[FX];
  21011. const fy = this._intrinsics[FY];
  21012. const halfWidth = this._imageSize.width / 2;
  21013. const halfHeight = this._imageSize.height / 2;
  21014. // we assume that the principal point is at the center of the image plane
  21015. const right = near * (halfWidth / fx);
  21016. const top = near * (halfHeight / fy);
  21017. //const top = right * (halfHeight / halfWidth); // same thing
  21018. const bottom = -top, left = -right; // symmetric frustum
  21019. // a derivation of this projection matrix can be found at
  21020. // https://www.songho.ca/opengl/gl_projectionmatrix.html
  21021. // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
  21022. return speedy_vision_default().Matrix(4, 4, [
  21023. 2 * near / (right - left), 0, 0, 0,
  21024. 0, 2 * near / (top - bottom), 0, 0,
  21025. (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
  21026. 0, 0, -2 * far * near / (far - near), 0
  21027. ]);
  21028. }
  21029. /**
  21030. * Reset camera extrinsics
  21031. */
  21032. _resetExtrinsics() {
  21033. // set the rotation matrix to the identity
  21034. this._extrinsics.fill(0);
  21035. this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
  21036. // reset filter
  21037. this._filter.reset();
  21038. }
  21039. /**
  21040. * Reset camera intrinsics
  21041. */
  21042. _resetIntrinsics() {
  21043. const cameraWidth = Math.max(this._imageSize.width, this._imageSize.height); // portrait or landscape?
  21044. const u0 = 0; // principal point at the center of the image plane
  21045. const v0 = 0;
  21046. const fx = (cameraWidth / 2) / Math.tan(DEG2RAD * HFOV_GUESS / 2);
  21047. const fy = fx;
  21048. this._intrinsics[FX] = fx;
  21049. this._intrinsics[FY] = fy;
  21050. this._intrinsics[U0] = u0;
  21051. this._intrinsics[V0] = v0;
  21052. }
  21053. /**
  21054. * Convert a homography from NDC to image space
  21055. * @param homographyNDC
  21056. * @returns a new homography
  21057. */
  21058. _convertToImageSpace(homographyNDC) {
  21059. const w = this._imageSize.width / 2;
  21060. const h = this._imageSize.height / 2;
  21061. // fromNDC converts points from NDC to image space
  21062. const fromNDC = speedy_vision_default().Matrix(3, 3, [
  21063. w, 0, 0,
  21064. 0, h, 0,
  21065. 0, 0, 1
  21066. ]);
  21067. /*
  21068. // make h33 = 1 (wanted?)
  21069. const data = homographyNDC.read();
  21070. const h33 = data[8];
  21071. const hom = homographyNDC.times(1/h33);
  21072. */
  21073. // convert homography
  21074. return speedy_vision_default().Matrix(fromNDC.times(homographyNDC));
  21075. }
  21076. /**
  21077. * Compute a normalized homography H^ = K^(-1) * H for an
  21078. * ideal pinhole with f = 1 and principal point = (0,0)
  21079. * @param homography homography H to be normalized
  21080. * @returns normalized homography H^
  21081. */
  21082. _normalizeHomography(homography) {
  21083. const u0 = this._intrinsics[U0];
  21084. const v0 = this._intrinsics[V0];
  21085. const fx = this._intrinsics[FX];
  21086. const fy = this._intrinsics[FY];
  21087. const u0fx = u0 / fx;
  21088. const v0fy = v0 / fy;
  21089. const h = homography.read();
  21090. const h11 = h[0] / fx - u0fx * h[2], h12 = h[3] / fx - u0fx * h[5], h13 = h[6] / fx - u0fx * h[8];
  21091. const h21 = h[1] / fy - v0fy * h[2], h22 = h[4] / fy - v0fy * h[5], h23 = h[7] / fy - v0fy * h[8];
  21092. const h31 = h[2], h32 = h[5], h33 = h[8];
  21093. /*console.log([
  21094. h11, h21, h31,
  21095. h12, h22, h32,
  21096. h13, h23, h33,
  21097. ]);*/
  21098. return speedy_vision_default().Matrix(3, 3, [
  21099. h11, h21, h31,
  21100. h12, h22, h32,
  21101. h13, h23, h33,
  21102. ]);
  21103. }
  21104. /**
  21105. * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
  21106. * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
  21107. * @returns a 3x3 matrix
  21108. */
  21109. _estimatePartialPose(normalizedHomography) {
  21110. const h = normalizedHomography.read();
  21111. const h11 = h[0], h12 = h[3], h13 = h[6];
  21112. const h21 = h[1], h22 = h[4], h23 = h[7];
  21113. const h31 = h[2], h32 = h[5], h33 = h[8];
  21114. const h1norm2 = h11 * h11 + h21 * h21 + h31 * h31;
  21115. const h2norm2 = h12 * h12 + h22 * h22 + h32 * h32;
  21116. const h1norm = Math.sqrt(h1norm2);
  21117. const h2norm = Math.sqrt(h2norm2);
  21118. //const hnorm = (h1norm + h2norm) / 2;
  21119. //const hnorm = Math.sqrt(h1norm * h2norm);
  21120. const hnorm = Math.max(h1norm, h2norm); // this seems to work. why?
  21121. // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
  21122. // if h1norm is not approximately h2norm, it means that the first two columns of
  21123. // the normalized homography are not really encoding a rotation (up to a scale)
  21124. //console.log("h1,h2",h1norm,h2norm);
  21125. //console.log(normalizedHomography.toString());
  21126. // compute a rough estimate for the scale factor
  21127. // select the sign so that t3 = tz > 0
  21128. const sign = h33 >= 0 ? 1 : -1;
  21129. let scale = sign / hnorm;
  21130. // sanity check
  21131. if (Number.isNaN(scale))
  21132. return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
  21133. // recover the rotation
  21134. let r = new Array(6);
  21135. r[0] = scale * h11;
  21136. r[1] = scale * h21;
  21137. r[2] = scale * h31;
  21138. r[3] = scale * h12;
  21139. r[4] = scale * h22;
  21140. r[5] = scale * h32;
  21141. // refine the rotation (r is initially noisy)
  21142. r = this._refineRotation(r);
  21143. /*
  21144. After refining the rotation vectors, let's adjust the scale factor as
  21145. follows:
  21146. We know that [ r1 | r2 | t ] is equal to the normalized homography H up
  21147. to a non-zero scale factor s, i.e., [ r1 | r2 | t ] = s H. Let's call M
  21148. the first two columns of H, i.e., M = [ h1 | h2 ], and R = [ r1 | r2 ].
  21149. It follows that R = s M, meaning that M'R = s M'M. The trace of 2x2 M'R
  21150. is such that tr(M'R) = tr(s M'M) = s tr(M'M), which means:
  21151. s = tr(M'R) / tr(M'M) = (r1'h1 + r2'h2) / (h1'h1 + h2'h2)
  21152. (also: s^2 = det(M'R) / det(M'M))
  21153. */
  21154. // adjust the scale factor
  21155. scale = r[0] * h11 + r[1] * h21 + r[2] * h31;
  21156. scale += r[3] * h12 + r[4] * h22 + r[5] * h32;
  21157. scale /= h1norm2 + h2norm2;
  21158. // recover the translation
  21159. let t = new Array(3);
  21160. t[0] = scale * h13;
  21161. t[1] = scale * h23;
  21162. t[2] = scale * h33;
  21163. // done!
  21164. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  21165. }
  21166. /**
  21167. * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
  21168. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  21169. * @returns a 3x2 matrix R such that R'R = I (column-major format)
  21170. */
  21171. _refineRotation(rot) {
  21172. const [r11, r21, r31, r12, r22, r32] = rot;
  21173. /*
  21174. A little technique I figured out to correct the rotation vectors
  21175. ----------------------------------------------------------------
  21176. We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
  21177. R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
  21178. because vectors r1 and r2 are not perfectly orthonormal due to noise.
  21179. Let's first notice that R'R is symmetric. You can easily check that its
  21180. two eigenvalues are both real and positive (as long as r1, r2 != 0 and
  21181. r1 is not parallel to r2, but we never take such vectors as input).
  21182. R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
  21183. [ r1'r2 r2'r2 ]
  21184. We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
  21185. chosen to be orthogonal and D is a diagonal matrix whose entries are
  21186. the eigenvalues of R'R.
  21187. Let LL' be the Cholesky decomposition of D. Such decomposition exists
  21188. and is trivially computed: just take the square roots of the entries of
  21189. D. Since L is diagonal, we have L = L'. Its inverse is also trivially
  21190. computed - call it Linv.
  21191. Now, define a 2x2 correction matrix C as follows:
  21192. C = Q * Linv * Q'
  21193. This matrix rotates the input vector, scales it by some amount, and
  21194. then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
  21195. We compute RC in order to correct the rotation vectors. We take its
  21196. two columns as the corrected vectors.
  21197. In order to show that the two columns of RC are orthonormal, we can
  21198. show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
  21199. expand the expression:
  21200. (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
  21201. Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
  21202. Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
  21203. I have provided below a closed formula to correct the rotation vectors.
  21204. What C does to R is very interesting: it makes the singular values
  21205. become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
  21206. values of R are the square roots of the eigenvalues of R'R. Letting
  21207. S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
  21208. means that RC is equivalent to the correction "trick" using the SVD
  21209. found in the computer vision literature (i.e., compute the SVD and
  21210. return U V'). That "trick" is known to return the rotation matrix that
  21211. minimizes the Frobenius norm of the difference between the input and
  21212. the output. Consequently, the technique I have just presented is also
  21213. optimal in that sense!
  21214. By the way, the input matrix R does not need to be 3x2.
  21215. */
  21216. // compute the entries of R'R
  21217. const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
  21218. const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
  21219. const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
  21220. // compute the two real eigenvalues of R'R
  21221. const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
  21222. const sqrt = Math.sqrt(delta); // delta >= 0 always
  21223. const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
  21224. const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
  21225. // compute two unit eigenvectors qi = (xi,yi) of R'R
  21226. const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
  21227. const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
  21228. const y1 = x1 / alpha1;
  21229. const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
  21230. const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
  21231. const y2 = x2 / alpha2;
  21232. // compute the Cholesky decomposition LL' of the diagonal matrix D
  21233. // whose entries are the two eigenvalues of R'R and then invert L
  21234. const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
  21235. /*
  21236. const Linv = Speedy.Matrix(2, 2, [1/s1, 0, 0, 1/s2]); // L inverse
  21237. // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
  21238. // is orthogonal and Linv is computed as above
  21239. const Q = Speedy.Matrix(2, 2, [x1, y1, x2, y2]);
  21240. const Qt = Speedy.Matrix(2, 2, [x1, x2, y1, y2]);
  21241. const C = Q.times(Linv).times(Qt);
  21242. // correct the rotation vectors r1 and r2 using C
  21243. const R = Speedy.Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
  21244. return Speedy.Matrix(R.times(C)).read();
  21245. */
  21246. // find C = Q * Linv * Q' manually
  21247. // [ a b ] is symmetric
  21248. // [ b c ]
  21249. const a = x1 * x1 / s1 + x2 * x2 / s2;
  21250. const b = x1 * y1 / s1 + x2 * y2 / s2;
  21251. const c = y1 * y1 / s1 + y2 * y2 / s2;
  21252. // find RC manually
  21253. return [
  21254. a * r11 + b * r12,
  21255. a * r21 + b * r22,
  21256. a * r31 + b * r32,
  21257. b * r11 + c * r12,
  21258. b * r21 + c * r22,
  21259. b * r31 + c * r32
  21260. ];
  21261. }
  21262. /**
  21263. * Compute a refined translation vector
  21264. * @param normalizedHomography ideal pinhole K = I
  21265. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  21266. * @param t0 initial estimate for the translation vector
  21267. * @returns 3x1 translation vector in column-major format
  21268. */
  21269. _refineTranslation(normalizedHomography, rot, t0) {
  21270. /*
  21271. Given a normalized homography H, the rotation vectors r1, r2, and a
  21272. translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
  21273. scale factor s.
  21274. If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
  21275. [ r1 | r2 | t ] u is parallel to H u, which means that their cross
  21276. product is zero:
  21277. [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
  21278. The following code finds an optimal translation vector t based on the
  21279. above observation. H, r1, r2 are known.
  21280. */
  21281. const h = normalizedHomography.read();
  21282. const h11 = h[0], h12 = h[3], h13 = h[6];
  21283. const h21 = h[1], h22 = h[4], h23 = h[7];
  21284. const h31 = h[2], h32 = h[5], h33 = h[8];
  21285. const r11 = rot[0], r12 = rot[3];
  21286. const r21 = rot[1], r22 = rot[4];
  21287. const r31 = rot[2], r32 = rot[5];
  21288. // generate a grid of sample points [ xi yi ]' in the image
  21289. //const x = [ 0, -1, +1, +1, -1 ];
  21290. //const y = [ 0, -1, -1, +1, +1 ];
  21291. const g = TRANSLATION_REFINEMENT_GRIDSIZE;
  21292. const x = new Array(g * g);
  21293. const y = new Array(g * g);
  21294. const halfWidth = this._imageSize.width / 2;
  21295. const halfHeight = this._imageSize.height / 2;
  21296. for (let k = 0, i = 0; i < g; i++) {
  21297. for (let j = 0; j < g; j++, k++) {
  21298. // in [-1,+1]
  21299. x[k] = (i / (g - 1)) * 2 - 1;
  21300. y[k] = (j / (g - 1)) * 2 - 1;
  21301. // in [-s/2,+s/2], where s = w,h
  21302. x[k] *= halfWidth;
  21303. y[k] *= halfHeight;
  21304. }
  21305. }
  21306. //console.log(x.toString(), y.toString());
  21307. // set auxiliary values: ai = H [ xi yi 1 ]'
  21308. const n = x.length;
  21309. const a1 = new Array(n);
  21310. const a2 = new Array(n);
  21311. const a3 = new Array(n);
  21312. for (let i = 0; i < n; i++) {
  21313. a1[i] = x[i] * h11 + y[i] * h12 + h13;
  21314. a2[i] = x[i] * h21 + y[i] * h22 + h23;
  21315. a3[i] = x[i] * h31 + y[i] * h32 + h33;
  21316. }
  21317. // we'll solve M t = v for t with linear least squares
  21318. // M: 3n x 3, v: 3n x 1, t: 3 x 1
  21319. const n3 = 3 * n;
  21320. const m = new Array(n3 * 3);
  21321. const v = new Array(n3);
  21322. for (let i = 0, k = 0; k < n; i += 3, k++) {
  21323. m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
  21324. m[i + n3] = -(m[i + 1] = a3[k]);
  21325. m[i + 2] = -(m[i + n3 + n3] = a2[k]);
  21326. m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
  21327. v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
  21328. v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
  21329. v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
  21330. }
  21331. /*
  21332. // this works, but I want more lightweight
  21333. const M = Speedy.Matrix(n3, 3, m);
  21334. const v_ = Speedy.Matrix(n3, 1, v);
  21335. return Speedy.Matrix(M.ldiv(v_)).read();
  21336. */
  21337. /*
  21338. Gradient descent with optimal step size / learning rate
  21339. -------------------------------------------------------
  21340. Let's find the column-vector x that minimizes the error function
  21341. E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
  21342. least squares. We want to find x easily, QUICKLY and iteratively.
  21343. The update rule of gradient descent is set to:
  21344. x := x - w * grad(E)
  21345. where w is the learning rate and grad(E) is the gradient of E(x):
  21346. grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
  21347. Let's adjust w to make x "converge quickly". Define function S(w) as:
  21348. S(w) = x - w * grad(E) (step)
  21349. and another function F(w) as:
  21350. F(w) = E(S(w))
  21351. which is the error of the step. We minimize F by setting its derivative
  21352. to zero:
  21353. 0 = dF = dF dS
  21354. dw dS dw
  21355. What follows is a fair amount of algebra. Do the math and you'll find
  21356. the following optimal update rule:
  21357. (c'c)
  21358. x := x - --------- c
  21359. (Ac)'(Ac)
  21360. where c = A'r = A'(Ax - b)
  21361. */
  21362. // gradient descent: super lightweight implementation
  21363. const r = new Array(3 * n);
  21364. const c = new Array(3);
  21365. const Mc = new Array(3 * n);
  21366. // initial guess
  21367. const t = new Array(3);
  21368. t[0] = t0[0];
  21369. t[1] = t0[1];
  21370. t[2] = t0[2];
  21371. // iterate
  21372. for (let it = 0; it < TRANSLATION_REFINEMENT_ITERATIONS; it++) {
  21373. //console.log("it",it+1);
  21374. // compute residual r = Mt - v
  21375. for (let i = 0; i < n3; i++) {
  21376. r[i] = 0;
  21377. for (let j = 0; j < 3; j++)
  21378. r[i] += m[j * n3 + i] * t[j];
  21379. r[i] -= v[i];
  21380. }
  21381. // compute c = M'r
  21382. for (let i = 0; i < 3; i++) {
  21383. c[i] = 0;
  21384. for (let j = 0; j < n3; j++)
  21385. c[i] += m[i * n3 + j] * r[j];
  21386. }
  21387. // compute Mc
  21388. for (let i = 0; i < n3; i++) {
  21389. Mc[i] = 0;
  21390. for (let j = 0; j < 3; j++)
  21391. Mc[i] += m[j * n3 + i] * c[j];
  21392. }
  21393. // compute c'c
  21394. let num = 0;
  21395. for (let i = 0; i < 3; i++)
  21396. num += c[i] * c[i];
  21397. //console.log("c'c=",num," at #",it+1);
  21398. if (num < TRANSLATION_REFINEMENT_TOLERANCE)
  21399. break;
  21400. // compute (Mc)'(Mc)
  21401. let den = 0;
  21402. for (let i = 0; i < n3; i++)
  21403. den += Mc[i] * Mc[i];
  21404. // compute frc = c'c / (Mc)'(Mc)
  21405. const frc = num / den;
  21406. if (Number.isNaN(frc)) // this shouldn't happen
  21407. break;
  21408. // iterate: t = t - frc * c
  21409. for (let i = 0; i < 3; i++)
  21410. t[i] -= frc * c[i];
  21411. }
  21412. //console.log("OLD t:\n\n",t0.join('\n'));
  21413. //console.log("new t:\n\n",t.join('\n'));
  21414. // done!
  21415. return t;
  21416. }
  21417. /**
  21418. * Find a 3x3 rotation matrix R given two orthonormal vectors [ r1 | r2 ]
  21419. * @param partialRotation partial rotation matrix [ r1 | r2 ] in column-major format
  21420. * @returns a rotation matrix R in column-major format
  21421. */
  21422. _computeFullRotation(partialRotation) {
  21423. const r11 = partialRotation[0], r12 = partialRotation[3];
  21424. const r21 = partialRotation[1], r22 = partialRotation[4];
  21425. const r31 = partialRotation[2], r32 = partialRotation[5];
  21426. // r3 = +- ( r1 x r2 )
  21427. let r13 = r21 * r32 - r31 * r22;
  21428. let r23 = r31 * r12 - r11 * r32;
  21429. let r33 = r11 * r22 - r21 * r12;
  21430. // let's make sure that det R = +1 (keep the orientation)
  21431. const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
  21432. if (det < 0) {
  21433. r13 = -r13;
  21434. r23 = -r23;
  21435. r33 = -r33;
  21436. }
  21437. // done!
  21438. return [
  21439. r11, r21, r31,
  21440. r12, r22, r32,
  21441. r13, r23, r33
  21442. ];
  21443. }
  21444. /**
  21445. * Estimate the pose [ R | t ] given a homography in sensor space
  21446. * @param homography must be valid
  21447. * @returns 3x4 matrix
  21448. */
  21449. _estimatePose(homography) {
  21450. const normalizedHomography = this._normalizeHomography(homography);
  21451. const partialPose = speedy_vision_default().Matrix.Eye(3);
  21452. // we want the estimated partial pose [ r1 | r2 | t ] to be as close
  21453. // as possible to the normalized homography, up to a scale factor;
  21454. // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
  21455. // it won't be a perfect equality due to noise in the homography.
  21456. // remark: composition of homographies
  21457. const residual = speedy_vision_default().Matrix(normalizedHomography);
  21458. for (let k = 0; k < POSE_REFINEMENT_ITERATIONS; k++) {
  21459. // incrementally improve the partial pose
  21460. const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
  21461. partialPose.setToSync(rt.times(partialPose));
  21462. residual.setToSync(residual.times(rt.inverse()));
  21463. //console.log("rt",rt.toString());
  21464. //console.log("residual",residual.toString());
  21465. }
  21466. //console.log('-----------');
  21467. // read the partial pose
  21468. const mat = partialPose.read();
  21469. const r0 = mat.slice(0, 6);
  21470. const t0 = mat.slice(6, 9);
  21471. // refine the translation vector and compute the full rotation matrix
  21472. const t = this._refineTranslation(normalizedHomography, r0, t0);
  21473. const r = this._computeFullRotation(r0);
  21474. // done!
  21475. return speedy_vision_default().Matrix(3, 4, r.concat(t));
  21476. }
  21477. }
  21478. ;// CONCATENATED MODULE: ./src/geometry/pose.ts
  21479. /*
  21480. * encantar.js
  21481. * GPU-accelerated Augmented Reality for the web
  21482. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21483. *
  21484. * This program is free software: you can redistribute it and/or modify
  21485. * it under the terms of the GNU Lesser General Public License as published
  21486. * by the Free Software Foundation, either version 3 of the License, or
  21487. * (at your option) any later version.
  21488. *
  21489. * This program is distributed in the hope that it will be useful,
  21490. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21491. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21492. * GNU Lesser General Public License for more details.
  21493. *
  21494. * You should have received a copy of the GNU Lesser General Public License
  21495. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21496. *
  21497. * pose.ts
  21498. * A pose represents a position and an orientation in 3D space
  21499. */
  21500. /**
  21501. * A pose represents a position and an orientation in 3D space
  21502. */
  21503. class Pose {
  21504. /**
  21505. * Constructor
  21506. * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
  21507. */
  21508. constructor(transform) {
  21509. this._transform = transform;
  21510. }
  21511. /**
  21512. * A transform describing the position and the orientation
  21513. * of the pose relative to the 3D space to which it belongs
  21514. */
  21515. get transform() {
  21516. return this._transform;
  21517. }
  21518. }
  21519. ;// CONCATENATED MODULE: ./src/geometry/transform.ts
  21520. /*
  21521. * encantar.js
  21522. * GPU-accelerated Augmented Reality for the web
  21523. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21524. *
  21525. * This program is free software: you can redistribute it and/or modify
  21526. * it under the terms of the GNU Lesser General Public License as published
  21527. * by the Free Software Foundation, either version 3 of the License, or
  21528. * (at your option) any later version.
  21529. *
  21530. * This program is distributed in the hope that it will be useful,
  21531. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21532. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21533. * GNU Lesser General Public License for more details.
  21534. *
  21535. * You should have received a copy of the GNU Lesser General Public License
  21536. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21537. *
  21538. * transform.ts
  21539. * 3D transforms
  21540. */
  21541. /** Small number */
  21542. const transform_EPSILON = 1e-6;
  21543. /**
  21544. * A Transform represents a position, a rotation and a scale in 3D space
  21545. */
  21546. class Transform {
  21547. /**
  21548. * Constructor
  21549. * @param matrix a 4x4 transformation matrix. You should ensure that its form is T * R * S (translation * rotation * scale).
  21550. */
  21551. constructor(matrix) {
  21552. if (matrix.rows != 4 || matrix.columns != 4)
  21553. throw new IllegalArgumentError('A Transform expects a 4x4 transformation matrix');
  21554. this._matrix = matrix;
  21555. this._inverse = null;
  21556. this._position = Vector3.Zero();
  21557. this._orientation = Quaternion.Identity();
  21558. this._scale = new Vector3(1, 1, 1);
  21559. this._isDecomposed = false;
  21560. this._isPositionComputed = false;
  21561. this._right = Vector3.ZERO;
  21562. this._up = Vector3.ZERO;
  21563. this._forward = Vector3.ZERO;
  21564. }
  21565. /**
  21566. * The 4x4 transformation matrix
  21567. * This matrix is not meant to be changed. Changing it will not update the
  21568. * previously computed components of the transform!
  21569. */
  21570. get matrix() {
  21571. return this._matrix;
  21572. }
  21573. /**
  21574. * The inverse transform
  21575. */
  21576. get inverse() {
  21577. if (this._inverse === null)
  21578. this._inverse = new Transform(this._inverseMatrix());
  21579. return this._inverse;
  21580. }
  21581. /**
  21582. * The 3D position encoded by the transform
  21583. */
  21584. get position() {
  21585. if (!this._isPositionComputed)
  21586. this._computePosition();
  21587. return this._position;
  21588. }
  21589. /**
  21590. * A unit quaternion describing the rotational component of the transform
  21591. */
  21592. get orientation() {
  21593. if (!this._isDecomposed)
  21594. this._decompose();
  21595. return this._orientation;
  21596. }
  21597. /**
  21598. * The scale encoded by the transform
  21599. */
  21600. get scale() {
  21601. if (!this._isDecomposed)
  21602. this._decompose();
  21603. return this._scale;
  21604. }
  21605. /**
  21606. * Unit right vector of the local space
  21607. */
  21608. get right() {
  21609. if (this._right === Vector3.ZERO)
  21610. this._right = this._scaleAndRotate(new Vector3(1, 0, 0))._normalize();
  21611. return this._right;
  21612. }
  21613. /**
  21614. * Unit up vector of the local space
  21615. */
  21616. get up() {
  21617. if (this._up === Vector3.ZERO)
  21618. this._up = this._scaleAndRotate(new Vector3(0, 1, 0))._normalize();
  21619. return this._up;
  21620. }
  21621. /**
  21622. * Unit forward vector of the local space
  21623. */
  21624. get forward() {
  21625. if (this._forward === Vector3.ZERO) {
  21626. // in a right-handed system, the unit forward vector is (0, 0, -1)
  21627. // in a left-handed system, it is (0, 0, 1)
  21628. this._forward = this._scaleAndRotate(new Vector3(0, 0, -1))._normalize();
  21629. }
  21630. return this._forward;
  21631. }
  21632. /**
  21633. * Use this transform to scale and rotate a vector
  21634. * The translation component of the transform is ignored
  21635. * @param v a vector
  21636. * @returns input vector v
  21637. */
  21638. _scaleAndRotate(v) {
  21639. const m = this._matrix.read();
  21640. const h = Math.abs(m[15]) < transform_EPSILON ? Number.NaN : 1 / m[15]; // usually h = 1
  21641. const vx = v.x, vy = v.y, vz = v.z;
  21642. const x = m[0] * vx + m[4] * vy + m[8] * vz;
  21643. const y = m[1] * vx + m[5] * vy + m[9] * vz;
  21644. const z = m[2] * vx + m[6] * vy + m[10] * vz;
  21645. return v._set(x * h, y * h, z * h);
  21646. }
  21647. /**
  21648. * Decompose this transform
  21649. */
  21650. _decompose() {
  21651. /*
  21652. The shape of a 4x4 transform T * R * S is
  21653. [ RS t ]
  21654. [ 0' 1 ]
  21655. where S is a 3x3 diagonal matrix, R is a 3x3 rotation matrix, t is a
  21656. 3x1 translation vector and 0' is a 1x3 zero vector.
  21657. How do we decompose it?
  21658. 1) Decomposing the translation vector t is trivial
  21659. 2) Decomposing matrices R (rotation) and S (scale) can be done by
  21660. noticing that (RS)'(RS) = (S'R')(RS) = S'(R'R) S = S'S is diagonal
  21661. 3) Since R is a rotation matrix, we have det R = +1. This means that
  21662. det RS = det R * det S = det S. If det RS < 0, then we have a change
  21663. of handedness (i.e., a negative scale). We may flip the forward axis
  21664. (Z) and let the rotation matrix encode the rest of the transformation
  21665. 4) Use 2) and 3) to find a suitable S
  21666. 5) Compute R = (RS) * S^(-1)
  21667. */
  21668. const m = this._matrix.read();
  21669. const h = Math.abs(m[15]) < transform_EPSILON ? Number.NaN : 1 / m[15]; // usually h = 1
  21670. // find t
  21671. const tx = m[12] * h;
  21672. const ty = m[13] * h;
  21673. const tz = m[14] * h;
  21674. // find RS
  21675. const rs11 = m[0] * h;
  21676. const rs21 = m[1] * h;
  21677. const rs31 = m[2] * h;
  21678. const rs12 = m[4] * h;
  21679. const rs22 = m[5] * h;
  21680. const rs32 = m[6] * h;
  21681. const rs13 = m[8] * h;
  21682. const rs23 = m[9] * h;
  21683. const rs33 = m[10] * h;
  21684. // do we have a change of handedness?
  21685. const det = rs13 * (rs21 * rs32 - rs22 * rs31) + rs33 * (rs11 * rs22 - rs12 * rs21) - rs23 * (rs11 * rs32 - rs12 * rs31);
  21686. const sign = +(det >= 0) - +(det < 0);
  21687. // if det = 0, RS is not invertible!
  21688. // find S
  21689. const sx = Math.sqrt(rs11 * rs11 + rs12 * rs12 + rs13 * rs13);
  21690. const sy = Math.sqrt(rs21 * rs21 + rs22 * rs22 + rs23 * rs23);
  21691. const sz = Math.sqrt(rs31 * rs31 + rs32 * rs32 + rs33 * rs33) * sign;
  21692. // zero scale?
  21693. if (sx < transform_EPSILON || sy < transform_EPSILON || sz * sign < transform_EPSILON) {
  21694. this._position._set(tx, ty, tz);
  21695. this._scale._set(sx, sy, sz);
  21696. this._orientation._copyFrom(Quaternion.Identity());
  21697. this._isDecomposed = true;
  21698. this._isPositionComputed = true;
  21699. return;
  21700. }
  21701. // find S^(-1)
  21702. const zx = 1 / sx;
  21703. const zy = 1 / sy;
  21704. const zz = 1 / sz;
  21705. // find R
  21706. const r11 = rs11 * zx;
  21707. const r21 = rs21 * zx;
  21708. const r31 = rs31 * zx;
  21709. const r12 = rs12 * zy;
  21710. const r22 = rs22 * zy;
  21711. const r32 = rs32 * zy;
  21712. const r13 = rs13 * zz;
  21713. const r23 = rs23 * zz;
  21714. const r33 = rs33 * zz;
  21715. // set the components
  21716. this._position._set(tx, ty, tz);
  21717. this._scale._set(sx, sy, sz);
  21718. this._orientation._fromRotationMatrix(speedy_vision_default().Matrix(3, 3, [
  21719. r11, r21, r31,
  21720. r12, r22, r32,
  21721. r13, r23, r33
  21722. ]));
  21723. // done!
  21724. this._isDecomposed = true;
  21725. this._isPositionComputed = true;
  21726. }
  21727. /**
  21728. * A simpler decomposition routine.
  21729. * Sometimes we just need the position.
  21730. */
  21731. _computePosition() {
  21732. const m = this._matrix.read();
  21733. const h = Math.abs(m[15]) < transform_EPSILON ? Number.NaN : 1 / m[15]; // usually h = 1
  21734. // find t
  21735. this._position._set(m[12] * h, m[13] * h, m[14] * h);
  21736. // done!
  21737. this._isPositionComputed = true;
  21738. }
  21739. /**
  21740. * Compute the inverse matrix of this transform
  21741. * @returns the inverse matrix
  21742. */
  21743. _inverseMatrix() {
  21744. // test
  21745. //console.log(Speedy.Matrix(this._matrix.inverse().times(this._matrix)).toString());
  21746. // this works, but this inverse is straightforward
  21747. return speedy_vision_default().Matrix(this._matrix.inverse());
  21748. /*
  21749. Simple analytic method
  21750. ----------------------
  21751. The inverse of a 4x4 transform T * R * S
  21752. [ RS t ] is [ ZR' -ZR't ]
  21753. [ 0' 1 ] [ 0' 1 ]
  21754. where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
  21755. R is a rotation matrix; S is a diagonal matrix
  21756. */
  21757. /*
  21758. // decompose the transform
  21759. if(!this._isDecomposed)
  21760. this._decompose();
  21761. // find t
  21762. const tx = this._position.x;
  21763. const ty = this._position.y;
  21764. const tz = this._position.z;
  21765. // find S (typically 1, but not very accurate)
  21766. const sx = this._scale.x;
  21767. const sy = this._scale.y;
  21768. const sz = this._scale.z;
  21769. // sanity check
  21770. if(Math.abs(sx) < EPSILON || Math.abs(sy) < EPSILON || Math.abs(sz) < EPSILON) {
  21771. //throw new IllegalOperationError('Not an invertible transform: ' + this._matrix.toString());
  21772. return Speedy.Matrix(4, 4, new Array(16).fill(Number.NaN)); // more friendly behavior
  21773. }
  21774. // find R
  21775. const r = this._rotation.read();
  21776. const r11 = r[0];
  21777. const r21 = r[1];
  21778. const r31 = r[2];
  21779. const r12 = r[3];
  21780. const r22 = r[4];
  21781. const r32 = r[5];
  21782. const r13 = r[6];
  21783. const r23 = r[7];
  21784. const r33 = r[8];
  21785. // find Z = S^(-1)
  21786. const zx = 1 / sx;
  21787. const zy = 1 / sy;
  21788. const zz = 1 / sz;
  21789. // compute Z R'
  21790. const zr11 = zx * r11;
  21791. const zr21 = zy * r12;
  21792. const zr31 = zz * r13;
  21793. const zr12 = zx * r21;
  21794. const zr22 = zy * r22;
  21795. const zr32 = zz * r23;
  21796. const zr13 = zx * r31;
  21797. const zr23 = zy * r32;
  21798. const zr33 = zz * r33;
  21799. // compute -Z R't
  21800. const zrt1 = -(tx * zr11 + ty * zr12 + tz * zr13);
  21801. const zrt2 = -(tx * zr21 + ty * zr22 + tz * zr23);
  21802. const zrt3 = -(tx * zr31 + ty * zr32 + tz * zr33);
  21803. // test
  21804. console.log('inverse', Speedy.Matrix(Speedy.Matrix(4, 4, [
  21805. zr11, zr21, zr31, 0,
  21806. zr12, zr22, zr32, 0,
  21807. zr13, zr23, zr33, 0,
  21808. zrt1, zrt2, zrt3, 1
  21809. ]).times(this._matrix)).toString());
  21810. console.log('rotation', Speedy.Matrix(
  21811. this._rotation.transpose().times(this._rotation)
  21812. ).toString());
  21813. console.log('scale', this._scale);
  21814. // done!
  21815. return Speedy.Matrix(4, 4, [
  21816. zr11, zr21, zr31, 0,
  21817. zr12, zr22, zr32, 0,
  21818. zr13, zr23, zr33, 0,
  21819. zrt1, zrt2, zrt3, 1
  21820. ]);
  21821. */
  21822. }
  21823. }
  21824. ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
  21825. /*
  21826. * encantar.js
  21827. * GPU-accelerated Augmented Reality for the web
  21828. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21829. *
  21830. * This program is free software: you can redistribute it and/or modify
  21831. * it under the terms of the GNU Lesser General Public License as published
  21832. * by the Free Software Foundation, either version 3 of the License, or
  21833. * (at your option) any later version.
  21834. *
  21835. * This program is distributed in the hope that it will be useful,
  21836. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21837. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21838. * GNU Lesser General Public License for more details.
  21839. *
  21840. * You should have received a copy of the GNU Lesser General Public License
  21841. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21842. *
  21843. * viewer-pose.ts
  21844. * The pose of a virtual camera in 3D world space at a moment in time
  21845. */
  21846. /**
  21847. * The pose of a virtual camera in 3D world space at a moment in time
  21848. */
  21849. class ViewerPose extends Pose {
  21850. /**
  21851. * Constructor
  21852. * @param camera camera model
  21853. */
  21854. constructor(camera) {
  21855. const viewMatrix = camera.computeViewMatrix();
  21856. const modelMatrix = speedy_vision_default().Matrix(viewMatrix.inverse());
  21857. const transform = new Transform(modelMatrix);
  21858. super(transform);
  21859. this._viewMatrix = viewMatrix;
  21860. }
  21861. /**
  21862. * This 4x4 matrix moves 3D points from world space to view space.
  21863. * We assume that the camera is looking in the direction of the
  21864. * negative z-axis (WebGL-friendly)
  21865. */
  21866. get viewMatrix() {
  21867. return this._viewMatrix;
  21868. }
  21869. }
  21870. ;// CONCATENATED MODULE: ./src/geometry/view.ts
  21871. /*
  21872. * encantar.js
  21873. * GPU-accelerated Augmented Reality for the web
  21874. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21875. *
  21876. * This program is free software: you can redistribute it and/or modify
  21877. * it under the terms of the GNU Lesser General Public License as published
  21878. * by the Free Software Foundation, either version 3 of the License, or
  21879. * (at your option) any later version.
  21880. *
  21881. * This program is distributed in the hope that it will be useful,
  21882. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21883. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21884. * GNU Lesser General Public License for more details.
  21885. *
  21886. * You should have received a copy of the GNU Lesser General Public License
  21887. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21888. *
  21889. * view.ts
  21890. * A view of the 3D world at a moment in time,
  21891. * featuring the means to project points into clip space
  21892. */
  21893. /** Default distance of the near plane to the optical center of the camera */
  21894. const DEFAULT_NEAR = 0.1;
  21895. /** Default distance of the far plane to the optical center of the camera */
  21896. const DEFAULT_FAR = 10000 * DEFAULT_NEAR;
  21897. /**
  21898. * A PerspectiveView is a View defining a symmetric frustum around the z-axis
  21899. * (perspective projection)
  21900. */
  21901. class PerspectiveView {
  21902. /**
  21903. * Constructor
  21904. * @param camera camera model
  21905. * @param near distance of the near plane
  21906. * @param far distance of the far plane
  21907. */
  21908. constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
  21909. this._near = +near;
  21910. this._far = +far;
  21911. if (this._near >= this._far)
  21912. throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
  21913. else if (this._near <= 0)
  21914. throw new IllegalArgumentError(`View expects a positive near (found ${this._near})`);
  21915. this._camera = camera;
  21916. this._projectionMatrix = camera.computeProjectionMatrix(this._near, this._far);
  21917. this._inverseProjection = null;
  21918. }
  21919. /**
  21920. * A 4x4 projection matrix for WebGL
  21921. */
  21922. get projectionMatrix() {
  21923. return this._projectionMatrix;
  21924. }
  21925. /**
  21926. * The inverse of the projection matrix
  21927. * @internal
  21928. */
  21929. get _projectionMatrixInverse() {
  21930. if (this._inverseProjection === null)
  21931. this._inverseProjection = speedy_vision_default().Matrix(this._projectionMatrix.inverse());
  21932. return this._inverseProjection;
  21933. }
  21934. /**
  21935. * Aspect ratio of the frustum
  21936. */
  21937. get aspect() {
  21938. return this._camera.aspectRatio;
  21939. }
  21940. /**
  21941. * Horizontal field-of-view of the frustum, measured in radians
  21942. */
  21943. get fovx() {
  21944. return this._camera.fovx;
  21945. }
  21946. /**
  21947. * Vertical field-of-view of the frustum, measured in radians
  21948. */
  21949. get fovy() {
  21950. return this._camera.fovy;
  21951. }
  21952. /**
  21953. * Distance of the near plane
  21954. */
  21955. get near() {
  21956. return this._near;
  21957. }
  21958. /**
  21959. * Distance of the far plane
  21960. */
  21961. get far() {
  21962. return this._far;
  21963. }
  21964. }
  21965. ;// CONCATENATED MODULE: ./src/geometry/ray.ts
  21966. /*
  21967. * encantar.js
  21968. * GPU-accelerated Augmented Reality for the web
  21969. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21970. *
  21971. * This program is free software: you can redistribute it and/or modify
  21972. * it under the terms of the GNU Lesser General Public License as published
  21973. * by the Free Software Foundation, either version 3 of the License, or
  21974. * (at your option) any later version.
  21975. *
  21976. * This program is distributed in the hope that it will be useful,
  21977. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21978. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21979. * GNU Lesser General Public License for more details.
  21980. *
  21981. * You should have received a copy of the GNU Lesser General Public License
  21982. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21983. *
  21984. * ray.ts
  21985. * Rays
  21986. */
  21987. /**
  21988. * A ray with origin and direction
  21989. */
  21990. class Ray {
  21991. /**
  21992. * Constructor
  21993. * @param origin a point
  21994. * @param direction a unit vector
  21995. */
  21996. constructor(origin, direction) {
  21997. this._origin = origin;
  21998. this._direction = direction;
  21999. }
  22000. /**
  22001. * The origin point of the ray
  22002. */
  22003. get origin() {
  22004. return this._origin;
  22005. }
  22006. /**
  22007. * The direction of the ray, a unit vector
  22008. */
  22009. get direction() {
  22010. return this._direction;
  22011. }
  22012. }
  22013. ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
  22014. /*
  22015. * encantar.js
  22016. * GPU-accelerated Augmented Reality for the web
  22017. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22018. *
  22019. * This program is free software: you can redistribute it and/or modify
  22020. * it under the terms of the GNU Lesser General Public License as published
  22021. * by the Free Software Foundation, either version 3 of the License, or
  22022. * (at your option) any later version.
  22023. *
  22024. * This program is distributed in the hope that it will be useful,
  22025. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22026. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22027. * GNU Lesser General Public License for more details.
  22028. *
  22029. * You should have received a copy of the GNU Lesser General Public License
  22030. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22031. *
  22032. * view.ts
  22033. * A viewer represents a virtual camera in 3D world space
  22034. */
  22035. /**
  22036. * A viewer represents a virtual camera in 3D world space
  22037. */
  22038. class Viewer {
  22039. /**
  22040. * Constructor
  22041. * @param camera camera model
  22042. */
  22043. constructor(camera) {
  22044. this._pose = new ViewerPose(camera);
  22045. this._views = [new PerspectiveView(camera)];
  22046. }
  22047. /**
  22048. * The pose of this viewer
  22049. */
  22050. get pose() {
  22051. return this._pose;
  22052. }
  22053. /**
  22054. * The view of this viewer (only for monoscopic rendering)
  22055. */
  22056. get view() {
  22057. /*
  22058. if(this._views.length > 1)
  22059. throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
  22060. */
  22061. return this._views[0];
  22062. }
  22063. /**
  22064. * The views of this viewer
  22065. */
  22066. /*
  22067. get views(): View[]
  22068. {
  22069. return this._views.concat([]);
  22070. }
  22071. */
  22072. /**
  22073. * Convert a pose from world space to viewer space
  22074. * @param pose a pose in world space
  22075. * @returns a pose in viewer space
  22076. */
  22077. convertToViewerSpace(pose) {
  22078. const modelMatrix = pose.transform.matrix;
  22079. const viewMatrix = this._pose.viewMatrix;
  22080. const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
  22081. const transform = new Transform(modelViewMatrix);
  22082. return new Pose(transform);
  22083. }
  22084. /**
  22085. * Cast a ray from a point in the image space associated with this Viewer
  22086. * @param position a point in image space, given in normalized units [-1,1]x[-1,1]
  22087. * @returns a ray in world space that corresponds to the given point
  22088. */
  22089. raycast(position) {
  22090. const projectionMatrixInverse = this.view._projectionMatrixInverse;
  22091. const viewMatrixInverse = this._pose.transform.matrix;
  22092. const pointInClipSpace = speedy_vision_default().Matrix(4, 1, [
  22093. // Normalized Device Coordinates (NDC)
  22094. position.x,
  22095. position.y,
  22096. 0,
  22097. 1 // homogeneous coordinates
  22098. ]);
  22099. const pointInViewSpace = projectionMatrixInverse.times(pointInClipSpace);
  22100. const pointInWorldSpace = viewMatrixInverse.times(pointInViewSpace);
  22101. const p = speedy_vision_default().Matrix(pointInWorldSpace).read();
  22102. /*
  22103. (*) since we're just interested in the direction, any z coordinate in
  22104. clip space [-1,1] will give us a suitable point p in world space.
  22105. */
  22106. const origin = this._pose.transform.position;
  22107. const direction = new Vector3(p[0] / p[3], p[1] / p[3], p[2] / p[3])
  22108. ._subtract(origin)._normalize();
  22109. return new Ray(origin, direction);
  22110. }
  22111. /**
  22112. * Compute a ray in the forward direction from the viewer
  22113. * @returns a new ray in world space
  22114. */
  22115. forwardRay() {
  22116. const origin = this._pose.transform.position;
  22117. const direction = this._pose.transform.forward;
  22118. return new Ray(origin, direction);
  22119. }
  22120. }
  22121. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
  22122. /*
  22123. * encantar.js
  22124. * GPU-accelerated Augmented Reality for the web
  22125. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22126. *
  22127. * This program is free software: you can redistribute it and/or modify
  22128. * it under the terms of the GNU Lesser General Public License as published
  22129. * by the Free Software Foundation, either version 3 of the License, or
  22130. * (at your option) any later version.
  22131. *
  22132. * This program is distributed in the hope that it will be useful,
  22133. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22134. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22135. * GNU Lesser General Public License for more details.
  22136. *
  22137. * You should have received a copy of the GNU Lesser General Public License
  22138. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22139. *
  22140. * tracking.ts
  22141. * Tracking state of the Image Tracker
  22142. */
  22143. /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
  22144. const USE_TURBO = true;
  22145. /** Number of PBOs; meaningful only when using turbo */
  22146. const NUMBER_OF_PBOS = 2;
  22147. /**
  22148. * The tracking state of the Image Tracker tracks
  22149. * keypoints of the image target and updates the
  22150. * rectification matrix
  22151. */
  22152. class ImageTrackerTrackingState extends ImageTrackerState {
  22153. /**
  22154. * Constructor
  22155. * @param imageTracker
  22156. */
  22157. constructor(imageTracker) {
  22158. super('tracking', imageTracker);
  22159. this._referenceImage = null;
  22160. this._warpHomography = speedy_vision_default().Matrix.Eye(3);
  22161. this._poseHomography = speedy_vision_default().Matrix.Eye(3);
  22162. this._templateKeypoints = [];
  22163. this._initialScreenSize = speedy_vision_default().Size(1, 1);
  22164. this._lastOutput = {};
  22165. this._lastPipelineOutput = { keypoints: [] };
  22166. this._skipCounter = 0;
  22167. this._counter = 0;
  22168. this._lostCounter = 0;
  22169. this._camera = new CameraModel();
  22170. this._fixedCamera = new CameraModel();
  22171. }
  22172. /**
  22173. * Called as soon as this becomes the active state, just before update() runs for the first time
  22174. * @param settings
  22175. */
  22176. onEnterState(settings) {
  22177. const homography = settings.homography; // NDC, from reference image to video
  22178. const referenceImage = settings.referenceImage;
  22179. const templateKeypoints = settings.templateKeypoints;
  22180. const templateKeypointPortalSink = settings.templateKeypointPortalSink;
  22181. const initialScreenSize = settings.initialScreenSize; // this.screenSize is not yet set
  22182. const keypointPortalSource = this._pipeline.node('keypointPortalSource');
  22183. // this shouldn't happen
  22184. if (!referenceImage)
  22185. throw new IllegalOperationError(`Can't track a null reference image`);
  22186. // set attributes
  22187. this._referenceImage = referenceImage;
  22188. this._warpHomography = speedy_vision_default().Matrix(homography);
  22189. this._poseHomography = speedy_vision_default().Matrix(homography);
  22190. this._templateKeypoints = templateKeypoints;
  22191. this._initialScreenSize = speedy_vision_default().Size(initialScreenSize.width, initialScreenSize.height);
  22192. this._lastOutput = {};
  22193. this._lastPipelineOutput = { keypoints: [] };
  22194. this._skipCounter = 0;
  22195. this._counter = 0;
  22196. this._lostCounter = 0;
  22197. // setup portals
  22198. keypointPortalSource.source = templateKeypointPortalSink;
  22199. // setup the cameras
  22200. const aspectRatio = initialScreenSize.width / initialScreenSize.height;
  22201. this._camera.init(aspectRatio);
  22202. this._fixedCamera.init(aspectRatio);
  22203. // emit event
  22204. const ev = new ImageTrackerEvent('targetfound', referenceImage);
  22205. this._imageTracker.dispatchEvent(ev);
  22206. // log
  22207. Utils.log(`Tracking image "${referenceImage.name}"...`);
  22208. }
  22209. /**
  22210. * Called when leaving the state
  22211. */
  22212. onLeaveState() {
  22213. // log
  22214. Utils.log(`No longer tracking image "${this._referenceImage.name}"!`);
  22215. // release the cameras
  22216. this._fixedCamera.release();
  22217. this._camera.release();
  22218. // emit event
  22219. const ev = new ImageTrackerEvent('targetlost', this._referenceImage);
  22220. this._imageTracker.dispatchEvent(ev);
  22221. }
  22222. /**
  22223. * Called just before the GPU processing
  22224. * @returns promise
  22225. */
  22226. _beforeUpdate() {
  22227. const imageRectifier = this._pipeline.node('imageRectifier');
  22228. const borderClipper = this._pipeline.node('borderClipper');
  22229. const keypointScaler = this._pipeline.node('keypointScaler');
  22230. const screenSize = this.screenSize;
  22231. /*
  22232. // pause media (test)
  22233. const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
  22234. const media = source.media as SpeedyMedia;
  22235. (media.source as HTMLVideoElement).pause();
  22236. */
  22237. // clip keypoints from the borders of the target image
  22238. borderClipper.imageSize = screenSize;
  22239. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  22240. // convert keypoints to NIS
  22241. keypointScaler.transform = ImageTrackerUtils.rasterToNIS(screenSize);
  22242. // rectify the image
  22243. const scale = TRACK_RECTIFIED_SCALE;
  22244. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(screenSize, this._referenceImage);
  22245. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  22246. const undistort = this._warpHomography.inverse();
  22247. const toScreen = ImageTrackerUtils.NDCToRaster(screenSize);
  22248. const toNDC = ImageTrackerUtils.rasterToNDC(screenSize);
  22249. return imageRectifier.transform.setTo(toScreen.times(shrink.times(undistort)).times(toNDC)).then(() => void 0);
  22250. }
  22251. /**
  22252. * GPU processing
  22253. * @returns promise with the pipeline results
  22254. */
  22255. _gpuUpdate() {
  22256. // Run the pipeline as usual
  22257. if (!USE_TURBO || Settings.powerPreference == 'low-power') // || Settings.powerPreference == 'high-performance')
  22258. return super._gpuUpdate();
  22259. // When using turbo, we reduce the GPU usage by skipping every other frame
  22260. if (0 == (this._skipCounter = 1 - this._skipCounter)) {
  22261. const templateKeypoints = this._templateKeypoints;
  22262. const previousKeypoints = this._lastPipelineOutput.keypoints;
  22263. //const currentKeypoints = this._predictKeypoints(previousKeypoints, templateKeypoints);
  22264. const currentKeypoints = previousKeypoints; // this actually works
  22265. this._lastPipelineOutput.keypoints = currentKeypoints;
  22266. return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
  22267. }
  22268. // Run the pipeline and store the results
  22269. return super._gpuUpdate().then(result => {
  22270. this._lastPipelineOutput = result;
  22271. return result;
  22272. });
  22273. }
  22274. /**
  22275. * Post processing that takes place just after the GPU processing
  22276. * @param result pipeline results
  22277. * @returns state output
  22278. */
  22279. _afterUpdate(result) {
  22280. const keypoints = result.keypoints;
  22281. const image = result.image;
  22282. const referenceImage = this._referenceImage;
  22283. const screenSize = this.screenSize;
  22284. // track the target
  22285. return speedy_vision_default().Promise.resolve()
  22286. .then(() => {
  22287. // if a change in screen size occurs, we need to recalibrate
  22288. // (perform a new pre-training)
  22289. if (!screenSize.equals(this._initialScreenSize))
  22290. throw new TrackingError('Detected a change in screen size');
  22291. // find matching pairs of keypoints
  22292. const allPairs = this._findMatchingPairs(this._templateKeypoints, keypoints);
  22293. const pairs = ImageTrackerUtils.refineMatchingPairs(allPairs);
  22294. if (pairs.length < TRACK_MIN_MATCHES)
  22295. throw new TrackingError('Not enough data points to continue the tracking');
  22296. // find motion models
  22297. const points = ImageTrackerUtils.compilePairsOfKeypointsNDC(pairs);
  22298. return speedy_vision_default().Promise.all([
  22299. this._findAffineMotionNDC(points),
  22300. this._findPerspectiveMotionNDC(points)
  22301. ]);
  22302. })
  22303. .then(([affineMotion, perspectiveMotion]) => {
  22304. const lowPower = (Settings.powerPreference == 'low-power');
  22305. const delay = NUMBER_OF_PBOS * (!lowPower ? 2 : 1);
  22306. // update warp homography
  22307. if (!USE_TURBO || this._counter % delay == 1) // skip the first frame (PBOs)
  22308. this._warpHomography.setToSync(affineMotion.times(this._warpHomography));
  22309. // update pose homography
  22310. this._poseHomography.setToSync(perspectiveMotion.times(this._warpHomography));
  22311. if (Number.isNaN(this._poseHomography.at(0, 0)))
  22312. throw new NumericalError('Bad homography'); // normalize? 1 / h33
  22313. // update counter
  22314. this._counter = (this._counter + 1) % delay;
  22315. /*
  22316. // test
  22317. console.log("POSE ", this._poseHomography.toString());
  22318. console.log("WARP ", this._warpHomography.toString());
  22319. console.log("AMOT ", Speedy.Matrix(affineMotion).toString());
  22320. console.log("PMOT ", Speedy.Matrix(perspectiveMotion).toString());
  22321. */
  22322. // We transform the keypoints of the reference image to NDC as a
  22323. // convenience. However, doing so distorts the aspect ratio. Here
  22324. // we undo the distortion.
  22325. //const scale = ImageTrackerUtils.inverseBestFitScaleNDC(referenceImage.aspectRatio); // not preferred; extrapolates the bounds of NDC
  22326. const scale = ImageTrackerUtils.bestFitScaleNDC(1 / referenceImage.aspectRatio); // preferred
  22327. const homography = speedy_vision_default().Matrix(this._poseHomography.times(scale));
  22328. //this._poseHomography = homography; // visualize the polyline becoming a square
  22329. // update camera model
  22330. return this._camera.update(homography);
  22331. })
  22332. .then(() => {
  22333. /*
  22334. Q: should the camera move relative to the target image, or should
  22335. the target image move relative to the camera?
  22336. A: the target image should move and the camera should stay fixed.
  22337. Movements of the target image in the video should not affect the
  22338. rendering of all virtual elements in world space. They should
  22339. only affect the rendering of virtual elements positioned at the
  22340. local space linked to the target ("ar.root").
  22341. */
  22342. // the target moves and the camera stays fixed at the origin
  22343. const modelMatrix = this._camera.computeViewMatrix(); // p_view = V M p_model
  22344. const transform = new Transform(modelMatrix);
  22345. const pose = new Pose(transform);
  22346. const viewer = new Viewer(this._fixedCamera); // view matrix = I
  22347. /*
  22348. // this is the opposite reasoning: the camera moves and the target
  22349. // image stays fixed at the origin of world space
  22350. const modelMatrix = Speedy.Matrix.Eye(4);
  22351. const transform = new Transform(modelMatrix);
  22352. const pose = new Pose(transform);
  22353. const viewer = new Viewer(this._camera);
  22354. */
  22355. // the trackable object
  22356. const trackable = {
  22357. pose: pose,
  22358. referenceImage: referenceImage
  22359. };
  22360. // the result generated by the image tracker
  22361. const result = {
  22362. tracker: this._imageTracker,
  22363. trackables: [trackable],
  22364. viewer: viewer
  22365. };
  22366. // tracker output
  22367. const trackerOutput = {
  22368. exports: result,
  22369. keypoints: keypoints,
  22370. //keypointsNIS: image !== undefined ? keypoints : undefined, // debug only
  22371. image: image,
  22372. polylineNDC: ImageTrackerUtils.findPolylineNDC(this._poseHomography),
  22373. camera: this._camera,
  22374. };
  22375. // save the last output
  22376. this._lastOutput = trackerOutput;
  22377. // we have successfully tracked the target in this frame
  22378. this._lostCounter = 0;
  22379. // done!
  22380. return {
  22381. nextState: 'tracking',
  22382. trackerOutput: trackerOutput
  22383. };
  22384. })
  22385. .catch(err => {
  22386. // give some tolerance to tracking errors
  22387. if (err instanceof TrackingError) {
  22388. if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
  22389. return {
  22390. nextState: 'tracking',
  22391. trackerOutput: this._lastOutput
  22392. };
  22393. }
  22394. }
  22395. // log
  22396. Utils.warning(`The target has been lost! ${err.toString()}`);
  22397. // go back to the scanning state
  22398. return {
  22399. nextState: 'scanning',
  22400. trackerOutput: {}
  22401. };
  22402. });
  22403. }
  22404. /**
  22405. * Find an affine motion model in NDC between pairs of keypoints in NDC
  22406. * given as a 2 x 2n [ src | dest ] matrix
  22407. * @param points compiled pairs of keypoints in NDC
  22408. * @returns a promise that resolves to a 3x3 warp in NDC that maps source to destination
  22409. */
  22410. _findAffineMotionNDC(points) {
  22411. /*
  22412. We can probably get more accurate motion estimates if we
  22413. work in 3D rather than in 2D. We're currently estimating an
  22414. affine motion in 2D NDC space, which does not account for
  22415. perspective distortions. What if we projected the keypoints
  22416. into 3D NDC space, estimated the camera motion (rotation and
  22417. translation) that best describes the observed observed motion
  22418. of the keypoints, and then projected things back to 2D NDC
  22419. space? Need to figure this out; we'll get a homography matrix.
  22420. Note: work with a 6 DoF perspective transform instead of 8.
  22421. */
  22422. return ImageTrackerUtils.findAffineWarpNDC(points, {
  22423. method: 'pransac',
  22424. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR_NDC,
  22425. numberOfHypotheses: 512 * 4,
  22426. bundleSize: 128,
  22427. mask: undefined // score is not needed
  22428. }).then(([warp, score]) => {
  22429. const scale = TRACK_RECTIFIED_SCALE;
  22430. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(this.screenSize, this._referenceImage);
  22431. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  22432. const grow = ImageTrackerUtils.inverseBestFitScaleNDC(aspectRatio, scale);
  22433. const scaledWarp = grow.times(warp).times(shrink);
  22434. const distort = this._warpHomography;
  22435. const undistort = distort.inverse();
  22436. const correctedWarp = distort.times(scaledWarp).times(undistort);
  22437. return correctedWarp;
  22438. }).catch(err => {
  22439. throw new TrackingError(`Can't find an affine motion`, err);
  22440. });
  22441. }
  22442. /**
  22443. * Find a perspective motion model in NDC between pairs of keypoints in NDC
  22444. * given as a 2 x 2n [ src | dest ] matrix
  22445. * @param points compiled pairs of keypoints in NDC
  22446. * @returns a promise that resolves to a 3x3 warp in NDC that maps source to destination
  22447. */
  22448. _findPerspectiveMotionNDC(points) {
  22449. return ImageTrackerUtils.findPerspectiveWarpNDC(points, {
  22450. method: 'pransac',
  22451. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR_NDC,
  22452. numberOfHypotheses: 512 * 2,
  22453. bundleSize: 128,
  22454. mask: undefined // score is not needed
  22455. }).then(([warp, score]) => {
  22456. const scale = TRACK_RECTIFIED_SCALE;
  22457. const aspectRatio = ImageTrackerUtils.bestFitAspectRatioNDC(this.screenSize, this._referenceImage);
  22458. const shrink = ImageTrackerUtils.bestFitScaleNDC(aspectRatio, scale);
  22459. const grow = ImageTrackerUtils.inverseBestFitScaleNDC(aspectRatio, scale);
  22460. const scaledWarp = grow.times(warp).times(shrink);
  22461. const distort = this._poseHomography;
  22462. const undistort = distort.inverse();
  22463. const correctedWarp = distort.times(scaledWarp).times(undistort);
  22464. return correctedWarp;
  22465. }).catch(err => {
  22466. throw new TrackingError(`Can't find a perspective motion`, err);
  22467. });
  22468. }
  22469. /**
  22470. * Find matching pairs of two sets of keypoints matched via brute force
  22471. * @param srcKeypoints source (database)
  22472. * @param destKeypoints destination
  22473. * @returns an array of matching pairs [src, dest]
  22474. */
  22475. _findMatchingPairs(srcKeypoints, destKeypoints) {
  22476. const pairs = [];
  22477. for (let i = 0; i < destKeypoints.length; i++) {
  22478. const destKeypoint = destKeypoints[i];
  22479. if (destKeypoint.matches[0].index >= 0 && destKeypoint.matches[1].index >= 0) {
  22480. const d1 = destKeypoint.matches[0].distance;
  22481. const d2 = destKeypoint.matches[1].distance;
  22482. // the best match should be "much better" than the second best match,
  22483. // which means that they are "distinct enough"
  22484. if (d1 <= TRACK_MATCH_RATIO * d2) {
  22485. const srcKeypoint = srcKeypoints[destKeypoint.matches[0].index];
  22486. pairs.push([srcKeypoint, destKeypoint]);
  22487. }
  22488. }
  22489. }
  22490. return pairs;
  22491. }
  22492. /**
  22493. * Predict the keypoints without actually looking at the image
  22494. * @param curr keypoints at time t (will modify the contents)
  22495. * @param prev keypoints at time t-1 (not just t = 0)
  22496. * @returns keypoints at time t+1
  22497. */
  22498. /*
  22499. private _predictKeypoints(curr: SpeedyMatchedKeypoint[], prev: SpeedyKeypoint[]): SpeedyMatchedKeypoint[]
  22500. {
  22501. // the target image is likely to be moving roughly in
  22502. // the same manner as it was in the previous frame
  22503. const alpha = 0.8; //0.2;
  22504. const next: SpeedyMatchedKeypoint[] = [];
  22505. const n = curr.length;
  22506. for(let i = 0; i < n; i++) {
  22507. const cur = curr[i];
  22508. if(cur.matches[0].index < 0 || cur.matches[1].index < 0)
  22509. continue;
  22510. //else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
  22511. // continue;
  22512. const prv = prev[cur.matches[0].index];
  22513. const dx = cur.position.x - prv.position.x;
  22514. const dy = cur.position.y - prv.position.y;
  22515. // a better mathematical model is needed
  22516. cur.position.x = prv.position.x + alpha * dx;
  22517. cur.position.y = prv.position.y + alpha * dy;
  22518. next.push(cur);
  22519. }
  22520. // done!
  22521. return next;
  22522. }
  22523. */
  22524. /**
  22525. * Create & setup the pipeline
  22526. * @returns pipeline
  22527. */
  22528. _createPipeline() {
  22529. const pipeline = speedy_vision_default().Pipeline();
  22530. const source = speedy_vision_default().Image.Source('source');
  22531. const screen = speedy_vision_default().Transform.Resize('screen');
  22532. const greyscale = speedy_vision_default().Filter.Greyscale();
  22533. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  22534. const nightvision = speedy_vision_default().Filter.Nightvision();
  22535. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  22536. const blur = speedy_vision_default().Filter.GaussianBlur();
  22537. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  22538. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  22539. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  22540. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  22541. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  22542. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  22543. const clipper = speedy_vision_default().Keypoint.Clipper();
  22544. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  22545. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  22546. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  22547. //const imageSink = Speedy.Image.Sink('image');
  22548. source.media = null;
  22549. screen.size = speedy_vision_default().Size(0, 0);
  22550. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  22551. nightvision.gain = NIGHTVISION_GAIN;
  22552. nightvision.offset = NIGHTVISION_OFFSET;
  22553. nightvision.decay = NIGHTVISION_DECAY;
  22554. nightvision.quality = NIGHTVISION_QUALITY;
  22555. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  22556. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  22557. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  22558. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  22559. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  22560. detector.quality = TRACK_HARRIS_QUALITY;
  22561. detector.capacity = TRACK_DETECTOR_CAPACITY;
  22562. subpixel.method = SUBPIXEL_METHOD;
  22563. clipper.size = TRACK_MAX_KEYPOINTS;
  22564. borderClipper.imageSize = screen.size;
  22565. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  22566. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  22567. matcher.k = 2;
  22568. keypointPortalSource.source = null;
  22569. keypointSink.turbo = USE_TURBO;
  22570. // prepare input
  22571. source.output().connectTo(screen.input());
  22572. screen.output().connectTo(greyscale.input());
  22573. // preprocess images
  22574. greyscale.output().connectTo(imageRectifier.input());
  22575. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  22576. imageRectifier.output().connectTo(nightvision.input());
  22577. nightvision.output().connectTo(nightvisionMux.input('in1'));
  22578. // keypoint detection & clipping
  22579. nightvisionMux.output().connectTo(detector.input());
  22580. detector.output().connectTo(borderClipper.input());
  22581. borderClipper.output().connectTo(clipper.input());
  22582. // keypoint refinement
  22583. imageRectifier.output().connectTo(denoiser.input());
  22584. denoiser.output().connectTo(subpixel.input('image'));
  22585. clipper.output().connectTo(subpixel.input('keypoints'));
  22586. // keypoint description
  22587. imageRectifier.output().connectTo(blur.input());
  22588. blur.output().connectTo(descriptor.input('image'));
  22589. subpixel.output().connectTo(descriptor.input('keypoints'));
  22590. // keypoint matching
  22591. keypointPortalSource.output().connectTo(matcher.input('database'));
  22592. descriptor.output().connectTo(matcher.input('keypoints'));
  22593. // prepare output
  22594. descriptor.output().connectTo(keypointScaler.input());
  22595. keypointScaler.output().connectTo(keypointSink.input());
  22596. matcher.output().connectTo(keypointSink.input('matches'));
  22597. //imageRectifier.output().connectTo(imageSink.input());
  22598. // done!
  22599. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointScaler, keypointSink);
  22600. return pipeline;
  22601. }
  22602. }
  22603. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
  22604. /*
  22605. * encantar.js
  22606. * GPU-accelerated Augmented Reality for the web
  22607. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22608. *
  22609. * This program is free software: you can redistribute it and/or modify
  22610. * it under the terms of the GNU Lesser General Public License as published
  22611. * by the Free Software Foundation, either version 3 of the License, or
  22612. * (at your option) any later version.
  22613. *
  22614. * This program is distributed in the hope that it will be useful,
  22615. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22616. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22617. * GNU Lesser General Public License for more details.
  22618. *
  22619. * You should have received a copy of the GNU Lesser General Public License
  22620. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22621. *
  22622. * image-tracker.ts
  22623. * Image Tracker
  22624. */
  22625. /** A helper */
  22626. const formatSize = (size) => `${size.width}x${size.height}`;
  22627. /** Default options for instantiating an ImageTracker */
  22628. const image_tracker_DEFAULT_OPTIONS = {
  22629. resolution: 'sm'
  22630. };
  22631. /**
  22632. * The ImageTracker tracks an image (one at a time)
  22633. */
  22634. class ImageTracker extends AREventTarget {
  22635. /**
  22636. * Constructor
  22637. * @param options
  22638. */
  22639. constructor(options) {
  22640. super();
  22641. // the states
  22642. this._state = {
  22643. 'initial': new ImageTrackerInitialState(this),
  22644. 'training': new ImageTrackerTrainingState(this),
  22645. 'scanning': new ImageTrackerScanningState(this),
  22646. 'pre-tracking-a': new ImageTrackerPreTrackingAState(this),
  22647. 'pre-tracking-b': new ImageTrackerPreTrackingBState(this),
  22648. 'tracking': new ImageTrackerTrackingState(this),
  22649. };
  22650. // initial setup
  22651. this._session = null;
  22652. this._source = null;
  22653. this._activeStateName = 'initial';
  22654. this._lastOutput = {};
  22655. this._database = new ReferenceImageDatabase();
  22656. // user settings
  22657. options = Object.assign({}, image_tracker_DEFAULT_OPTIONS, options);
  22658. this._resolution = options.resolution;
  22659. }
  22660. /**
  22661. * The type of the tracker
  22662. */
  22663. get type() {
  22664. return 'image-tracker';
  22665. }
  22666. /**
  22667. * Current state name
  22668. */
  22669. get state() {
  22670. return this._activeStateName;
  22671. }
  22672. /**
  22673. * Reference Image Database
  22674. * Must be configured before training the tracker
  22675. */
  22676. get database() {
  22677. return this._database;
  22678. }
  22679. /**
  22680. * Resolution of the AR screen space
  22681. */
  22682. get resolution() {
  22683. return this._resolution;
  22684. }
  22685. /**
  22686. * Resolution of the AR screen space
  22687. */
  22688. set resolution(resolution) {
  22689. this._resolution = resolution;
  22690. }
  22691. /**
  22692. * Size of the AR screen space, in pixels
  22693. * @internal
  22694. */
  22695. get screenSize() {
  22696. return this._state[this._activeStateName].screenSize;
  22697. }
  22698. /**
  22699. * Last emitted output
  22700. * @internal
  22701. */
  22702. get _output() {
  22703. return this._lastOutput;
  22704. }
  22705. /**
  22706. * Stats related to this tracker
  22707. * @internal
  22708. */
  22709. get _stats() {
  22710. return `${formatSize(this.screenSize)} ${this.state}`;
  22711. }
  22712. /**
  22713. * Initialize this tracker
  22714. * @param session
  22715. * @returns promise that resolves after the tracker has been initialized
  22716. * @internal
  22717. */
  22718. _init(session) {
  22719. // store the session
  22720. this._session = session;
  22721. // find a suitable source of data
  22722. // XXX also let the user specify a source manually?
  22723. for (const source of session.sources) {
  22724. // prefer video sources
  22725. if (source._type == 'video') {
  22726. this._source = source;
  22727. break;
  22728. }
  22729. else if (source._type == 'canvas')
  22730. this._source = source;
  22731. }
  22732. if (this._source === null)
  22733. throw new IllegalOperationError('The image tracker requires a suitable source of data');
  22734. // initialize states
  22735. for (const state of Object.values(this._state))
  22736. state.init();
  22737. // done!
  22738. return speedy_vision_default().Promise.resolve();
  22739. }
  22740. /**
  22741. * Release this tracker
  22742. * @returns promise that resolves after the tracker has been released
  22743. * @internal
  22744. */
  22745. _release() {
  22746. // release states
  22747. for (const state of Object.values(this._state))
  22748. state.release();
  22749. // unlink session
  22750. this._session = null;
  22751. // done!
  22752. return speedy_vision_default().Promise.resolve();
  22753. }
  22754. /**
  22755. * Update the tracker
  22756. * @returns promise
  22757. * @internal
  22758. */
  22759. _update() {
  22760. // validate
  22761. if (this._session == null)
  22762. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
  22763. // compute the screen size for image processing purposes
  22764. // note: this may change over time...!
  22765. const media = this._source._internalMedia;
  22766. const screenSize = this._computeScreenSize();
  22767. // run the active state
  22768. const activeState = this._state[this._activeStateName];
  22769. return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
  22770. // update the output of the tracker
  22771. this._lastOutput = trackerOutput;
  22772. // need to change the state?
  22773. if (this._activeStateName != nextState) {
  22774. activeState.onLeaveState();
  22775. this._activeStateName = nextState;
  22776. this._state[nextState].onEnterState(nextStateSettings || {});
  22777. }
  22778. });
  22779. }
  22780. /**
  22781. * Get reference image
  22782. * @param keypointIndex -1 if not found
  22783. * @returns reference image
  22784. * @internal
  22785. */
  22786. _referenceImageOfKeypoint(keypointIndex) {
  22787. const training = this._state.training;
  22788. return training.referenceImageOfKeypoint(keypointIndex);
  22789. }
  22790. /**
  22791. * Get reference image index
  22792. * @param keypointIndex -1 if not found
  22793. * @returns reference image index, or -1 if not found
  22794. * @internal
  22795. */
  22796. _referenceImageIndexOfKeypoint(keypointIndex) {
  22797. const training = this._state.training;
  22798. return training.referenceImageIndexOfKeypoint(keypointIndex);
  22799. }
  22800. /**
  22801. * Get a keypoint of the trained set
  22802. * @param keypointIndex
  22803. * @returns a keypoint
  22804. * @internal
  22805. */
  22806. _referenceKeypoint(keypointIndex) {
  22807. const training = this._state.training;
  22808. return training.referenceKeypoint(keypointIndex);
  22809. }
  22810. /**
  22811. * Compute the current size of the AR screen space
  22812. * Note that this may change over time
  22813. * @returns size
  22814. */
  22815. _computeScreenSize() {
  22816. const media = this._source._internalMedia;
  22817. const aspectRatio = media.width / media.height;
  22818. const screenSize = Utils.resolution(this._resolution, aspectRatio);
  22819. return screenSize;
  22820. }
  22821. }
  22822. ;// CONCATENATED MODULE: ./src/geometry/vector2.ts
  22823. /*
  22824. * encantar.js
  22825. * GPU-accelerated Augmented Reality for the web
  22826. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22827. *
  22828. * This program is free software: you can redistribute it and/or modify
  22829. * it under the terms of the GNU Lesser General Public License as published
  22830. * by the Free Software Foundation, either version 3 of the License, or
  22831. * (at your option) any later version.
  22832. *
  22833. * This program is distributed in the hope that it will be useful,
  22834. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22835. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22836. * GNU Lesser General Public License for more details.
  22837. *
  22838. * You should have received a copy of the GNU Lesser General Public License
  22839. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22840. *
  22841. * vector2.ts
  22842. * 2D vectors
  22843. */
  22844. /** Small number */
  22845. const vector2_EPSILON = 1e-6;
  22846. /** Immutable zero vector */
  22847. let vector2_ZERO = null;
  22848. // public / non-internal methods do not change the contents of the vector
  22849. /**
  22850. * A vector in 2D space
  22851. */
  22852. class Vector2 {
  22853. /**
  22854. * Constructor
  22855. */
  22856. constructor(x = 0, y = 0) {
  22857. this._x = +x;
  22858. this._y = +y;
  22859. }
  22860. /**
  22861. * Instantiate a zero vector
  22862. * @returns a new zero vector
  22863. */
  22864. static Zero() {
  22865. return new Vector2(0, 0);
  22866. }
  22867. /**
  22868. * Immutable zero vector
  22869. * @returns an immutable zero vector
  22870. */
  22871. static get ZERO() {
  22872. return vector2_ZERO || (vector2_ZERO = Object.freeze(Vector2.Zero()));
  22873. }
  22874. /**
  22875. * The x coordinate of the vector
  22876. */
  22877. get x() {
  22878. return this._x;
  22879. }
  22880. /**
  22881. * The y coordinate of the vector
  22882. */
  22883. get y() {
  22884. return this._y;
  22885. }
  22886. /**
  22887. * The length of this vector
  22888. * @returns sqrt(x^2 + y^2)
  22889. */
  22890. length() {
  22891. const x = this._x;
  22892. const y = this._y;
  22893. return Math.sqrt(x * x + y * y);
  22894. }
  22895. /**
  22896. * Compute the dot product of this and v
  22897. * @param v a vector
  22898. * @returns the dot product of the vectors
  22899. */
  22900. dot(v) {
  22901. return this._x * v._x + this._y * v._y;
  22902. }
  22903. /**
  22904. * Compute the distance between points this and v
  22905. * @param v a vector / point
  22906. * @returns the distance between the points
  22907. */
  22908. distanceTo(v) {
  22909. const dx = this._x - v._x;
  22910. const dy = this._y - v._y;
  22911. return Math.sqrt(dx * dx + dy * dy);
  22912. }
  22913. /**
  22914. * Compute the direction from this to v
  22915. * @param v a vector
  22916. * @returns a new unit vector pointing to v from this
  22917. */
  22918. directionTo(v) {
  22919. return v._clone()._subtract(this)._normalize();
  22920. }
  22921. /**
  22922. * Check if this and v have the same coordinates
  22923. * @param v a vector
  22924. * @returns true if this and v have the same coordinates
  22925. */
  22926. equals(v) {
  22927. return this._x === v._x && this._y === v._y;
  22928. }
  22929. /**
  22930. * Convert to string
  22931. * @returns a string
  22932. */
  22933. toString() {
  22934. const x = this._x.toFixed(5);
  22935. const y = this._y.toFixed(5);
  22936. return `Vector2(${x},${y})`;
  22937. }
  22938. /**
  22939. * Set the coordinates of this vector
  22940. * @param x x-coordinate
  22941. * @param y y-coordinate
  22942. * @returns this vector
  22943. * @internal
  22944. */
  22945. _set(x, y) {
  22946. this._x = +x;
  22947. this._y = +y;
  22948. return this;
  22949. }
  22950. /**
  22951. * Copy v to this vector
  22952. * @param v a vector
  22953. * @returns this vector
  22954. * @internal
  22955. */
  22956. _copyFrom(v) {
  22957. this._x = v._x;
  22958. this._y = v._y;
  22959. return this;
  22960. }
  22961. /**
  22962. * Normalize this vector
  22963. * @returns this vector, normalized
  22964. * @internal
  22965. */
  22966. _normalize() {
  22967. const length = this.length();
  22968. if (length < vector2_EPSILON) // zero?
  22969. return this;
  22970. this._x /= length;
  22971. this._y /= length;
  22972. return this;
  22973. }
  22974. /**
  22975. * Add v to this vector
  22976. * @param v a vector
  22977. * @returns this vector
  22978. * @internal
  22979. */
  22980. _add(v) {
  22981. this._x += v._x;
  22982. this._y += v._y;
  22983. return this;
  22984. }
  22985. /**
  22986. * Subtract v from this vector
  22987. * @param v a vector
  22988. * @returns this vector
  22989. * @internal
  22990. */
  22991. _subtract(v) {
  22992. this._x -= v._x;
  22993. this._y -= v._y;
  22994. return this;
  22995. }
  22996. /**
  22997. * Scale this vector by a scalar
  22998. * @param s scalar
  22999. * @returns this vector
  23000. * @internal
  23001. */
  23002. _scale(s) {
  23003. this._x *= s;
  23004. this._y *= s;
  23005. return this;
  23006. }
  23007. /**
  23008. * Clone this vector
  23009. * @returns a clone of this vector
  23010. * @internal
  23011. */
  23012. _clone() {
  23013. return new Vector2(this._x, this._y);
  23014. }
  23015. }
  23016. ;// CONCATENATED MODULE: ./src/trackers/pointer-tracker/pointer-tracker.ts
  23017. /*
  23018. * encantar.js
  23019. * GPU-accelerated Augmented Reality for the web
  23020. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23021. *
  23022. * This program is free software: you can redistribute it and/or modify
  23023. * it under the terms of the GNU Lesser General Public License as published
  23024. * by the Free Software Foundation, either version 3 of the License, or
  23025. * (at your option) any later version.
  23026. *
  23027. * This program is distributed in the hope that it will be useful,
  23028. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23029. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23030. * GNU Lesser General Public License for more details.
  23031. *
  23032. * You should have received a copy of the GNU Lesser General Public License
  23033. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23034. *
  23035. * pointer-tracker.ts
  23036. * Tracker of pointer-based input
  23037. */
  23038. /** Convert event type to trackable pointer phase */
  23039. const EVENTTYPE2PHASE = {
  23040. 'pointerdown': 'began',
  23041. 'pointerup': 'ended',
  23042. 'pointermove': 'moved',
  23043. 'pointercancel': 'canceled',
  23044. 'pointerleave': 'ended',
  23045. 'pointerenter': 'began',
  23046. };
  23047. /**
  23048. * A tracker of pointer-based input such as mouse, touch or pen
  23049. */
  23050. class PointerTracker {
  23051. /**
  23052. * Constructor
  23053. */
  23054. constructor() {
  23055. this._source = null;
  23056. this._viewport = null;
  23057. this._activePointers = new Map();
  23058. this._newPointers = new Map();
  23059. this._idMap = new Map();
  23060. this._nextId = 1;
  23061. this._previousOutput = this._generateOutput();
  23062. this._previousUpdateTime = Number.POSITIVE_INFINITY;
  23063. this._wantToReset = false;
  23064. this._resetInTheNextUpdate = this._resetInTheNextUpdate.bind(this);
  23065. }
  23066. /**
  23067. * The type of the tracker
  23068. */
  23069. get type() {
  23070. return 'pointer-tracker';
  23071. }
  23072. /**
  23073. * Initialize the tracker
  23074. * @param session
  23075. * @returns a promise that is resolved as soon as the tracker is initialized
  23076. * @internal
  23077. */
  23078. _init(session) {
  23079. Utils.log('Initializing PointerTracker...');
  23080. // set the viewport
  23081. this._viewport = session.viewport;
  23082. // find the pointer source
  23083. for (const source of session.sources) {
  23084. if (source._type == 'pointer-source') {
  23085. this._source = source;
  23086. break;
  23087. }
  23088. }
  23089. if (this._source === null)
  23090. return speedy_vision_default().Promise.reject(new IllegalOperationError('A PointerTracker expects a PointerSource'));
  23091. // link the pointer source to the viewport
  23092. this._source._setViewport(this._viewport);
  23093. // reset trackables
  23094. document.addEventListener('visibilitychange', this._resetInTheNextUpdate);
  23095. // done!
  23096. return speedy_vision_default().Promise.resolve();
  23097. }
  23098. /**
  23099. * Release the tracker
  23100. * @returns a promise that is resolved as soon as the tracker is released
  23101. * @internal
  23102. */
  23103. _release() {
  23104. this._source = null;
  23105. this._viewport = null;
  23106. this._activePointers.clear();
  23107. this._newPointers.clear();
  23108. this._idMap.clear();
  23109. document.removeEventListener('visibilitychange', this._resetInTheNextUpdate);
  23110. return speedy_vision_default().Promise.resolve();
  23111. }
  23112. /**
  23113. * Update the tracker (update cycle)
  23114. * @returns a promise that is resolved as soon as the tracker is updated
  23115. * @internal
  23116. */
  23117. _update() {
  23118. const canvas = this._viewport.canvas;
  23119. const rect = canvas.getBoundingClientRect(); // may be different in different frames!
  23120. // find the time between this and the previous update of this tracker
  23121. const deltaTime = this._updateTime();
  23122. const inverseDeltaTime = (deltaTime > 1e-5) ? 1 / deltaTime : 60; // 1/dt = 1 / (1/60) with 60 fps
  23123. // remove inactive trackables from the previous frame (update cycle)
  23124. const inactiveTrackables = this._findInactiveTrackables();
  23125. for (let i = inactiveTrackables.length - 1; i >= 0; i--)
  23126. this._activePointers.delete(inactiveTrackables[i].id);
  23127. // make all active trackables stationary
  23128. this._updateAllTrackables({
  23129. phase: 'stationary',
  23130. velocity: Vector2.ZERO,
  23131. deltaPosition: Vector2.ZERO
  23132. });
  23133. // want to reset?
  23134. if (this._wantToReset) {
  23135. this._reset();
  23136. this._wantToReset = false;
  23137. }
  23138. // consume events
  23139. let event;
  23140. while ((event = this._source._consume()) !== null) {
  23141. // sanity check
  23142. if (event.target !== canvas)
  23143. return speedy_vision_default().Promise.reject(new IllegalOperationError('Invalid PointerEvent target ' + event.target));
  23144. else if (!EVENTTYPE2PHASE.hasOwnProperty(event.type))
  23145. return speedy_vision_default().Promise.reject(new IllegalOperationError('Invalid PointerEvent type ' + event.type));
  23146. // determine the ID
  23147. const id = this._normalizeId(event.pointerId, event.pointerType);
  23148. // determine the previous states, if any, of the trackable
  23149. const previous = this._activePointers.get(id); // state in the previous frame
  23150. const current = this._newPointers.get(id); // previous state in the current frame
  23151. // determine the phase
  23152. const phase = EVENTTYPE2PHASE[event.type];
  23153. // new trackables always begin with a pointerdown event,
  23154. // or with a pointerenter event having buttons pressed
  23155. // (example: a mousemove without a previous mousedown isn't tracked)
  23156. if (!(event.type == 'pointerdown' || (event.type == 'pointerenter' && event.buttons > 0))) {
  23157. if (!previous && !current)
  23158. continue; // discard event
  23159. }
  23160. else if (previous) {
  23161. // discard a 'began' after another 'began'
  23162. continue;
  23163. }
  23164. else if (event.button != 0 && event.pointerType == 'mouse') {
  23165. // require left mouse click
  23166. continue;
  23167. }
  23168. // discard event if 'began' and 'ended' happened in the same frame
  23169. // (difficult to reproduce, but it can be done ;)
  23170. if (!previous) {
  23171. if (phase == 'ended' || phase == 'canceled') {
  23172. this._newPointers.delete(id);
  23173. continue;
  23174. }
  23175. }
  23176. // what if we receive 'began' after 'ended' in the same frame?
  23177. else if (phase == 'began' && current) {
  23178. if (current.phase == 'ended' || current.phase == 'canceled') {
  23179. this._newPointers.delete(id);
  23180. continue;
  23181. }
  23182. }
  23183. // discard previously canceled pointers (e.g., with a visibilitychange event)
  23184. if ((previous === null || previous === void 0 ? void 0 : previous.phase) == 'canceled')
  23185. continue;
  23186. // more special rules
  23187. switch (event.type) {
  23188. case 'pointermove':
  23189. if (event.buttons == 0 || (current === null || current === void 0 ? void 0 : current.phase) == 'began')
  23190. continue;
  23191. break;
  23192. case 'pointerenter':
  23193. if (event.buttons == 0 || (previous === null || previous === void 0 ? void 0 : previous.phase) == 'began' || (current === null || current === void 0 ? void 0 : current.phase) == 'began')
  23194. continue;
  23195. break;
  23196. case 'pointercancel': // purge everything
  23197. this._reset();
  23198. this._newPointers.clear();
  23199. continue;
  23200. }
  23201. // determine the current position
  23202. const absX = event.pageX - (rect.left + window.scrollX);
  23203. const absY = event.pageY - (rect.top + window.scrollY);
  23204. const relX = 2 * absX / rect.width - 1; // convert to [-1,1]
  23205. const relY = -(2 * absY / rect.height - 1); // flip Y axis
  23206. const position = new Vector2(relX, relY);
  23207. // determine the position delta
  23208. const deltaPosition = !previous ? Vector2.ZERO :
  23209. position._clone()._subtract(previous.position);
  23210. // determine the initial position
  23211. const initialPosition = previous ? previous.initialPosition :
  23212. Object.freeze(position._clone());
  23213. // determine the velocity
  23214. const velocity = deltaPosition._clone()._scale(inverseDeltaTime);
  23215. // determine the elapsed time since the tracking began
  23216. const elapsedTime = previous ? previous.elapsedTime + deltaTime : 0;
  23217. // determine whether or not this is the primary pointer for this type
  23218. const isPrimary = event.isPrimary;
  23219. // determine the type of the originating device
  23220. const kind = event.pointerType;
  23221. // we create new trackable instances on each frame;
  23222. // these will be exported and consumed by the user
  23223. this._newPointers.set(id, { id, phase, position, deltaPosition, initialPosition, velocity, elapsedTime, isPrimary, kind });
  23224. }
  23225. // update trackables
  23226. this._newPointers.forEach((trackable, id) => this._activePointers.set(id, trackable));
  23227. this._newPointers.clear();
  23228. this._advanceAllStationaryTrackables(deltaTime);
  23229. // discard unused IDs
  23230. if (this._activePointers.size == 0 && this._idMap.size > 0)
  23231. this._idMap.clear();
  23232. // generate output
  23233. this._previousOutput = this._generateOutput();
  23234. // test
  23235. //console.log(JSON.stringify(this._prevOutput.exports.trackables, null, 4));
  23236. // done!
  23237. return speedy_vision_default().Promise.resolve();
  23238. }
  23239. /**
  23240. * Output of the previous frame
  23241. * @internal
  23242. */
  23243. get _output() {
  23244. return this._previousOutput;
  23245. }
  23246. /**
  23247. * Stats info
  23248. * @internal
  23249. */
  23250. get _stats() {
  23251. const n = this._activePointers.size;
  23252. const s = n != 1 ? 's' : '';
  23253. return n + ' pointer' + s;
  23254. }
  23255. /**
  23256. * Generate tracker output
  23257. * @returns a new PointerTrackerOutput object
  23258. */
  23259. _generateOutput() {
  23260. const trackables = [];
  23261. this._activePointers.forEach(trackable => trackables.push(trackable));
  23262. return {
  23263. exports: {
  23264. tracker: this,
  23265. trackables: this._sortTrackables(trackables)
  23266. }
  23267. };
  23268. }
  23269. /**
  23270. * Update all active pointers
  23271. * @param fields
  23272. */
  23273. _updateAllTrackables(fields) {
  23274. this._activePointers.forEach((trackable, id) => {
  23275. this._activePointers.set(id, Object.assign({}, trackable, fields));
  23276. });
  23277. }
  23278. /**
  23279. * Advance the elapsed time of all stationary pointers
  23280. * @param deltaTime
  23281. */
  23282. _advanceAllStationaryTrackables(deltaTime) {
  23283. this._activePointers.forEach((trackable, id) => {
  23284. if (trackable.phase == 'stationary') {
  23285. trackable.elapsedTime += deltaTime;
  23286. /*
  23287. this._activePointers.set(id, Object.assign({}, trackable, {
  23288. elapsedTime: trackable.elapsedTime + deltaTime
  23289. }));
  23290. */
  23291. }
  23292. });
  23293. }
  23294. /**
  23295. * Normalize pointer IDs across browsers
  23296. * @param pointerId browser-provided pointer ID
  23297. * @param pointerType pointer type
  23298. * @returns a normalized pointer ID
  23299. */
  23300. _normalizeId(pointerId, pointerType) {
  23301. // XXX different hardware devices acting simultaneously may produce
  23302. // events with the same pointerId - handling this seems overkill?
  23303. if (pointerType == 'mouse')
  23304. return 0;
  23305. if (!this._idMap.has(pointerId))
  23306. this._idMap.set(pointerId, this._nextId++);
  23307. return this._idMap.get(pointerId);
  23308. }
  23309. /**
  23310. * Cancel all active pointers and consume all events
  23311. * @param deltaTime
  23312. */
  23313. _reset() {
  23314. // cancel all active pointers
  23315. this._updateAllTrackables({
  23316. phase: 'canceled',
  23317. velocity: Vector2.ZERO,
  23318. deltaPosition: Vector2.ZERO
  23319. });
  23320. // consume all events
  23321. while (this._source._consume() !== null)
  23322. ;
  23323. }
  23324. /**
  23325. * Reset in the next update of the tracker
  23326. */
  23327. _resetInTheNextUpdate() {
  23328. this._wantToReset = true;
  23329. }
  23330. /**
  23331. * As a convenience, let's make sure that a primary pointer, if any exists,
  23332. * is at the beginning of the trackables array
  23333. * @param trackables
  23334. * @returns sorted trackables
  23335. */
  23336. _sortTrackables(trackables) {
  23337. /*
  23338. Note: the browser may not report a new unique pointer (phase: "began")
  23339. as primary. This logic makes trackables[0] primary, or sort of primary.
  23340. Behavior on Chrome 130 on Android: when moving multiple touch points,
  23341. remove focus from the browser. Touch points will be canceled as
  23342. expected. When touching the screen again with a single finger, the
  23343. (only one) registered pointer will not be primary. That's undesirable.
  23344. Touching the screen again with multiple fingers (none will be primary),
  23345. and then releasing them, will restore the desired behavior.
  23346. */
  23347. // nothing to do
  23348. if (trackables.length <= 1 || trackables[0].isPrimary)
  23349. return trackables;
  23350. // find a primary pointer and swap
  23351. for (let j = 1; j < trackables.length; j++) {
  23352. if (trackables[j].isPrimary) {
  23353. const primary = trackables[j];
  23354. trackables[j] = trackables[0];
  23355. trackables[0] = primary;
  23356. break;
  23357. }
  23358. }
  23359. // done!
  23360. return trackables;
  23361. }
  23362. /**
  23363. * Find trackables to remove
  23364. * @returns a list of trackables to remove
  23365. */
  23366. _findInactiveTrackables() {
  23367. const trackables = [];
  23368. this._activePointers.forEach(trackable => {
  23369. if (trackable.phase == 'ended' || trackable.phase == 'canceled')
  23370. trackables.push(trackable);
  23371. });
  23372. return trackables;
  23373. }
  23374. /**
  23375. * Update the time
  23376. * @returns delta time in seconds
  23377. */
  23378. _updateTime() {
  23379. const now = performance.now() * 0.001;
  23380. if (this._previousUpdateTime > now)
  23381. this._previousUpdateTime = now;
  23382. const prev = this._previousUpdateTime;
  23383. this._previousUpdateTime = now;
  23384. return now - prev;
  23385. }
  23386. }
  23387. ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
  23388. /*
  23389. * encantar.js
  23390. * GPU-accelerated Augmented Reality for the web
  23391. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23392. *
  23393. * This program is free software: you can redistribute it and/or modify
  23394. * it under the terms of the GNU Lesser General Public License as published
  23395. * by the Free Software Foundation, either version 3 of the License, or
  23396. * (at your option) any later version.
  23397. *
  23398. * This program is distributed in the hope that it will be useful,
  23399. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23400. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23401. * GNU Lesser General Public License for more details.
  23402. *
  23403. * You should have received a copy of the GNU Lesser General Public License
  23404. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23405. *
  23406. * tracker-factory.ts
  23407. * Tracker factory
  23408. */
  23409. /**
  23410. * Tracker factory
  23411. */
  23412. class TrackerFactory {
  23413. /**
  23414. * Create an Image Tracker
  23415. * @param options
  23416. */
  23417. static Image(options = {}) {
  23418. return new ImageTracker(options);
  23419. }
  23420. /**
  23421. * Create an Image Tracker with default settings
  23422. * @deprecated
  23423. */
  23424. static ImageTracker() {
  23425. return this.Image();
  23426. }
  23427. /**
  23428. * Create a Pointer Tracker
  23429. */
  23430. static Pointer() {
  23431. return new PointerTracker();
  23432. }
  23433. }
  23434. ;// CONCATENATED MODULE: ./src/sources/video-source.ts
  23435. /*
  23436. * encantar.js
  23437. * GPU-accelerated Augmented Reality for the web
  23438. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23439. *
  23440. * This program is free software: you can redistribute it and/or modify
  23441. * it under the terms of the GNU Lesser General Public License as published
  23442. * by the Free Software Foundation, either version 3 of the License, or
  23443. * (at your option) any later version.
  23444. *
  23445. * This program is distributed in the hope that it will be useful,
  23446. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23447. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23448. * GNU Lesser General Public License for more details.
  23449. *
  23450. * You should have received a copy of the GNU Lesser General Public License
  23451. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23452. *
  23453. * video-source.ts
  23454. * HTMLVideoElement-based source of data
  23455. */
  23456. /** A message to be displayed if a video can't autoplay and user interaction is required */
  23457. const ALERT_MESSAGE = 'Tap on the screen to start';
  23458. /** Whether or not we have displayed the ALERT_MESSAGE */
  23459. let displayedAlertMessage = false;
  23460. /**
  23461. * HTMLVideoElement-based source of data
  23462. */
  23463. class VideoSource {
  23464. /**
  23465. * Constructor
  23466. */
  23467. constructor(video) {
  23468. Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
  23469. this._video = video;
  23470. this._media = null;
  23471. }
  23472. /**
  23473. * A type-identifier of the source of data
  23474. * @internal
  23475. */
  23476. get _type() {
  23477. return 'video';
  23478. }
  23479. /**
  23480. * Get media
  23481. * @internal
  23482. */
  23483. get _internalMedia() {
  23484. if (this._media == null)
  23485. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  23486. return this._media;
  23487. }
  23488. /**
  23489. * Stats related to this source of data
  23490. * @internal
  23491. */
  23492. get _stats() {
  23493. const media = this._media;
  23494. if (media != null)
  23495. return `${media.width}x${media.height} video`;
  23496. else
  23497. return 'uninitialized video';
  23498. }
  23499. /**
  23500. * Initialize this source of data
  23501. * @returns a promise that resolves as soon as this source of data is initialized
  23502. * @internal
  23503. */
  23504. _init() {
  23505. Utils.log(`Initializing ${this._type} source...`);
  23506. // prepare the video before loading the SpeedyMedia!
  23507. return this._prepareVideo(this._video).then(video => {
  23508. Utils.log('The video is prepared');
  23509. return speedy_vision_default().load(video).then(media => {
  23510. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  23511. this._media = media;
  23512. });
  23513. });
  23514. }
  23515. /**
  23516. * Release this source of data
  23517. * @returns a promise that resolves as soon as this source of data is released
  23518. * @internal
  23519. */
  23520. _release() {
  23521. if (this._media)
  23522. this._media.release();
  23523. this._media = null;
  23524. return speedy_vision_default().Promise.resolve();
  23525. }
  23526. /**
  23527. * Handle browser-specific quirks for <video> elements
  23528. * @param video a video element
  23529. * @returns a promise that resolves to the input video
  23530. */
  23531. _prepareVideo(video) {
  23532. // WebKit <video> policies for iOS:
  23533. // https://webkit.org/blog/6784/new-video-policies-for-ios/
  23534. // required on iOS; nice to have in all browsers
  23535. video.setAttribute('playsinline', '');
  23536. // handle autoplay
  23537. return this._handleAutoPlay(video).then(video => {
  23538. // handle WebKit quirks
  23539. if (Utils.isWebKit()) {
  23540. // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
  23541. // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
  23542. if (video.hidden) {
  23543. video.hidden = false;
  23544. video.style.setProperty('opacity', '0');
  23545. video.style.setProperty('position', 'fixed'); // make sure that it's visible on-screen
  23546. video.style.setProperty('left', '0');
  23547. video.style.setProperty('top', '0');
  23548. //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
  23549. //video.style.setProperty('visibility', 'hidden'); // doesn't work either
  23550. }
  23551. }
  23552. // done
  23553. return video;
  23554. });
  23555. }
  23556. /**
  23557. * Handle browser-specific quirks for videos marked with autoplay
  23558. * @param video a <video> marked with autoplay
  23559. * @returns a promise that resolves to the input video
  23560. */
  23561. _handleAutoPlay(video) {
  23562. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  23563. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  23564. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  23565. // nothing to do?
  23566. if (!video.autoplay)
  23567. return speedy_vision_default().Promise.resolve(video);
  23568. // videos marked with autoplay should be muted
  23569. if (!video.muted) {
  23570. Utils.warning('Videos marked with autoplay should be muted', video);
  23571. video.muted = true;
  23572. }
  23573. // the browser may not honor the autoplay attribute if the video is not
  23574. // visible on-screen. So, let's try to play the video in any case.
  23575. return this._waitUntilPlayable(video).then(video => {
  23576. // try to play the video
  23577. const promise = video.play();
  23578. // handle older browsers
  23579. if (promise === undefined)
  23580. return video;
  23581. // resolve if successful
  23582. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23583. promise.then(() => resolve(video), error => {
  23584. // can't play the video
  23585. Utils.error(`Can't autoplay video!`, error, video);
  23586. // autoplay is blocked for some reason
  23587. if (error.name == 'NotAllowedError') {
  23588. Utils.warning('Tip: allow manual playback');
  23589. if (Utils.isIOS())
  23590. Utils.warning('Is low power mode on?');
  23591. // User interaction is required to play the video. We can
  23592. // solve this here (easy and convenient to do) or at the
  23593. // application layer (for a better user experience). If the
  23594. // latter is preferred, just disable autoplay and play the
  23595. // video programatically.
  23596. if (video.hidden || !video.controls || video.parentNode === null) {
  23597. // this is added for convenience
  23598. document.body.addEventListener('pointerdown', () => video.play());
  23599. // ask only once for user interaction
  23600. if (!displayedAlertMessage) {
  23601. alert(ALERT_MESSAGE);
  23602. displayedAlertMessage = true;
  23603. }
  23604. // XXX what if the Session mode is inline? In this
  23605. // case, this convenience code may be undesirable.
  23606. // A workaround is to disable autoplay.
  23607. }
  23608. /*else {
  23609. // play the video after the first interaction with the page
  23610. const polling = setInterval(() => {
  23611. video.play().then(() => clearInterval(polling));
  23612. }, 500);
  23613. }*/
  23614. }
  23615. // unsupported media source
  23616. else if (error.name == 'NotSupportedError') {
  23617. reject(new NotSupportedError('Unsupported video format', error));
  23618. return;
  23619. }
  23620. // done
  23621. resolve(video);
  23622. });
  23623. });
  23624. });
  23625. }
  23626. /**
  23627. * Wait for the input video to be playable
  23628. * @param video
  23629. * @returns a promise that resolves to the input video when it can be played
  23630. */
  23631. _waitUntilPlayable(video) {
  23632. const TIMEOUT = 15000, INTERVAL = 500;
  23633. if (video.readyState >= 3)
  23634. return speedy_vision_default().Promise.resolve(video);
  23635. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23636. let ms = 0, t = setInterval(() => {
  23637. //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
  23638. if (video.readyState >= 3) {
  23639. clearInterval(t);
  23640. resolve(video);
  23641. }
  23642. else if ((ms += INTERVAL) >= TIMEOUT) {
  23643. clearInterval(t);
  23644. reject(new TimeoutError('The video took too long to load'));
  23645. }
  23646. }, INTERVAL);
  23647. });
  23648. }
  23649. }
  23650. ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
  23651. /*
  23652. * encantar.js
  23653. * GPU-accelerated Augmented Reality for the web
  23654. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23655. *
  23656. * This program is free software: you can redistribute it and/or modify
  23657. * it under the terms of the GNU Lesser General Public License as published
  23658. * by the Free Software Foundation, either version 3 of the License, or
  23659. * (at your option) any later version.
  23660. *
  23661. * This program is distributed in the hope that it will be useful,
  23662. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23663. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23664. * GNU Lesser General Public License for more details.
  23665. *
  23666. * You should have received a copy of the GNU Lesser General Public License
  23667. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23668. *
  23669. * canvas-source.ts
  23670. * HTMLCanvasElement-based source of data
  23671. */
  23672. /**
  23673. * HTMLCanvasElement-based source of data
  23674. */
  23675. class CanvasSource {
  23676. /**
  23677. * Constructor
  23678. */
  23679. constructor(canvas) {
  23680. Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
  23681. this._canvas = canvas;
  23682. this._media = null;
  23683. }
  23684. /**
  23685. * A type-identifier of the source of data
  23686. * @internal
  23687. */
  23688. get _type() {
  23689. return 'canvas';
  23690. }
  23691. /**
  23692. * Get media
  23693. * @internal
  23694. */
  23695. get _internalMedia() {
  23696. if (this._media == null)
  23697. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  23698. return this._media;
  23699. }
  23700. /**
  23701. * Stats related to this source of data
  23702. * @internal
  23703. */
  23704. get _stats() {
  23705. const media = this._media;
  23706. if (media != null)
  23707. return `${media.width}x${media.height} canvas`;
  23708. else
  23709. return 'uninitialized canvas';
  23710. }
  23711. /**
  23712. * Initialize this source of data
  23713. * @returns a promise that resolves as soon as this source of data is initialized
  23714. * @internal
  23715. */
  23716. _init() {
  23717. return speedy_vision_default().load(this._canvas).then(media => {
  23718. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  23719. this._media = media;
  23720. });
  23721. }
  23722. /**
  23723. * Release this source of data
  23724. * @returns a promise that resolves as soon as this source of data is released
  23725. * @internal
  23726. */
  23727. _release() {
  23728. if (this._media)
  23729. this._media.release();
  23730. this._media = null;
  23731. return speedy_vision_default().Promise.resolve();
  23732. }
  23733. }
  23734. ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
  23735. /*
  23736. * encantar.js
  23737. * GPU-accelerated Augmented Reality for the web
  23738. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23739. *
  23740. * This program is free software: you can redistribute it and/or modify
  23741. * it under the terms of the GNU Lesser General Public License as published
  23742. * by the Free Software Foundation, either version 3 of the License, or
  23743. * (at your option) any later version.
  23744. *
  23745. * This program is distributed in the hope that it will be useful,
  23746. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23747. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23748. * GNU Lesser General Public License for more details.
  23749. *
  23750. * You should have received a copy of the GNU Lesser General Public License
  23751. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23752. *
  23753. * camera-source.ts
  23754. * Webcam-based source of data
  23755. */
  23756. /** Default options for camera sources */
  23757. const DEFAULT_CAMERA_OPTIONS = {
  23758. resolution: 'md',
  23759. aspectRatio: 16 / 9,
  23760. constraints: { facingMode: 'environment' },
  23761. };
  23762. /**
  23763. * Webcam-based source of data
  23764. */
  23765. class CameraSource extends VideoSource {
  23766. /**
  23767. * Constructor
  23768. * @param options
  23769. */
  23770. constructor(options) {
  23771. const video = document.createElement('video');
  23772. super(video);
  23773. this._cameraVideo = video;
  23774. this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
  23775. }
  23776. /**
  23777. * Camera resolution
  23778. */
  23779. get resolution() {
  23780. return this._options.resolution;
  23781. }
  23782. /**
  23783. * Initialize this source of data
  23784. * @returns a promise that resolves as soon as this source of data is initialized
  23785. * @internal
  23786. */
  23787. _init() {
  23788. Utils.log('Accessing the webcam...');
  23789. // validate
  23790. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  23791. throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
  23792. // set up media constraints
  23793. const options = this._options;
  23794. const size = Utils.resolution(options.resolution, options.aspectRatio);
  23795. const constraints = {
  23796. audio: false,
  23797. video: Object.assign({ width: size.width, height: size.height }, options.constraints)
  23798. };
  23799. // load camera stream
  23800. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23801. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  23802. const video = this._cameraVideo;
  23803. video.onloadedmetadata = () => {
  23804. const promise = video.play();
  23805. const success = 'Access to the webcam has been granted.';
  23806. // handle older browsers
  23807. if (promise === undefined) {
  23808. Utils.log(success);
  23809. resolve(video);
  23810. return;
  23811. }
  23812. // handle promise
  23813. promise.then(() => {
  23814. Utils.log(success);
  23815. resolve(video);
  23816. }).catch(error => {
  23817. reject(new IllegalOperationError('Webcam error!', error));
  23818. });
  23819. };
  23820. video.setAttribute('playsinline', '');
  23821. video.setAttribute('autoplay', '');
  23822. video.setAttribute('muted', '');
  23823. video.srcObject = stream;
  23824. }).catch(error => {
  23825. reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
  23826. });
  23827. }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
  23828. }
  23829. /**
  23830. * Release this source of data
  23831. * @returns a promise that resolves as soon as this source of data is released
  23832. * @internal
  23833. */
  23834. _release() {
  23835. const stream = this._cameraVideo.srcObject;
  23836. const tracks = stream.getTracks();
  23837. // stop camera feed
  23838. tracks.forEach(track => track.stop());
  23839. this._cameraVideo.onloadedmetadata = null;
  23840. this._cameraVideo.srcObject = null;
  23841. // release the media
  23842. return super._release();
  23843. }
  23844. }
  23845. ;// CONCATENATED MODULE: ./src/sources/pointer-source.ts
  23846. /*
  23847. * encantar.js
  23848. * GPU-accelerated Augmented Reality for the web
  23849. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23850. *
  23851. * This program is free software: you can redistribute it and/or modify
  23852. * it under the terms of the GNU Lesser General Public License as published
  23853. * by the Free Software Foundation, either version 3 of the License, or
  23854. * (at your option) any later version.
  23855. *
  23856. * This program is distributed in the hope that it will be useful,
  23857. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23858. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23859. * GNU Lesser General Public License for more details.
  23860. *
  23861. * You should have received a copy of the GNU Lesser General Public License
  23862. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23863. *
  23864. * pointer-source.ts
  23865. * Source of data of pointer-based input: mouse, touch, pen...
  23866. */
  23867. /**
  23868. * Source of data of pointer-based input: mouse, touch, pen...
  23869. */
  23870. class PointerSource {
  23871. /**
  23872. * Constructor
  23873. */
  23874. constructor() {
  23875. this._queue = [];
  23876. this._viewport = null;
  23877. this._onPointerEvent = this._onPointerEvent.bind(this);
  23878. this._cancelEvent = this._cancelEvent.bind(this);
  23879. }
  23880. /**
  23881. * A type-identifier of the source of data
  23882. * @internal
  23883. */
  23884. get _type() {
  23885. return 'pointer-source';
  23886. }
  23887. /**
  23888. * Consume a pointer event
  23889. * @returns the next pointer event to be consumed, or null if there are none
  23890. * @internal
  23891. */
  23892. _consume() {
  23893. // producer-consumer mechanism
  23894. return this._queue.shift() || null;
  23895. }
  23896. /**
  23897. * Stats related to this source of data
  23898. * @internal
  23899. */
  23900. get _stats() {
  23901. return 'pointer input';
  23902. }
  23903. /**
  23904. * Initialize this source of data
  23905. * @returns a promise that resolves as soon as this source of data is initialized
  23906. * @internal
  23907. */
  23908. _init() {
  23909. Utils.log('Initializing PointerSource...');
  23910. // nothing to do yet; we need the viewport
  23911. return speedy_vision_default().Promise.resolve();
  23912. }
  23913. /**
  23914. * Release this source of data
  23915. * @returns a promise that resolves as soon as this source of data is released
  23916. * @internal
  23917. */
  23918. _release() {
  23919. this._setViewport(null);
  23920. return speedy_vision_default().Promise.resolve();
  23921. }
  23922. /**
  23923. * Link a viewport to this source of data
  23924. * @param viewport possibly null
  23925. * @internal
  23926. */
  23927. _setViewport(viewport) {
  23928. // unlink previous viewport, if any
  23929. if (this._viewport !== null) {
  23930. this._viewport.hud.container.style.removeProperty('pointer-events');
  23931. this._viewport._subContainer.style.removeProperty('pointer-events');
  23932. this._viewport.container.style.removeProperty('pointer-events');
  23933. this._viewport.canvas.style.removeProperty('pointer-events');
  23934. this._removeEventListeners(this._viewport.canvas);
  23935. }
  23936. // link new viewport, if any
  23937. if ((this._viewport = viewport) !== null) {
  23938. this._addEventListeners(this._viewport.canvas);
  23939. this._viewport.canvas.style.pointerEvents = 'auto';
  23940. this._viewport.container.style.pointerEvents = 'none';
  23941. this._viewport._subContainer.style.pointerEvents = 'none';
  23942. this._viewport.hud.container.style.pointerEvents = 'none';
  23943. // Make HUD elements accept pointer events
  23944. for (const element of this._viewport.hud.container.children) {
  23945. const el = element;
  23946. if (el.style.getPropertyValue('pointer-events') == '')
  23947. el.style.pointerEvents = 'auto';
  23948. }
  23949. }
  23950. }
  23951. /**
  23952. * Event handler
  23953. * @param event
  23954. */
  23955. _onPointerEvent(event) {
  23956. this._queue.push(event);
  23957. event.preventDefault();
  23958. }
  23959. /**
  23960. * Cancel event
  23961. * @param event
  23962. */
  23963. _cancelEvent(event) {
  23964. if (event.cancelable)
  23965. event.preventDefault();
  23966. }
  23967. /**
  23968. * Add event listeners
  23969. * @param canvas
  23970. */
  23971. _addEventListeners(canvas) {
  23972. canvas.addEventListener('pointerdown', this._onPointerEvent);
  23973. canvas.addEventListener('pointerup', this._onPointerEvent);
  23974. canvas.addEventListener('pointermove', this._onPointerEvent);
  23975. canvas.addEventListener('pointercancel', this._onPointerEvent);
  23976. canvas.addEventListener('pointerleave', this._onPointerEvent);
  23977. canvas.addEventListener('pointerenter', this._onPointerEvent);
  23978. canvas.addEventListener('touchstart', this._cancelEvent, { passive: false });
  23979. }
  23980. /**
  23981. * Remove event listeners
  23982. * @param canvas
  23983. */
  23984. _removeEventListeners(canvas) {
  23985. canvas.removeEventListener('touchstart', this._cancelEvent);
  23986. canvas.removeEventListener('pointerenter', this._onPointerEvent);
  23987. canvas.removeEventListener('pointerleave', this._onPointerEvent);
  23988. canvas.removeEventListener('pointercancel', this._onPointerEvent);
  23989. canvas.removeEventListener('pointermove', this._onPointerEvent);
  23990. canvas.removeEventListener('pointerup', this._onPointerEvent);
  23991. canvas.removeEventListener('pointerdown', this._onPointerEvent);
  23992. }
  23993. }
  23994. ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
  23995. /*
  23996. * encantar.js
  23997. * GPU-accelerated Augmented Reality for the web
  23998. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23999. *
  24000. * This program is free software: you can redistribute it and/or modify
  24001. * it under the terms of the GNU Lesser General Public License as published
  24002. * by the Free Software Foundation, either version 3 of the License, or
  24003. * (at your option) any later version.
  24004. *
  24005. * This program is distributed in the hope that it will be useful,
  24006. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24007. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24008. * GNU Lesser General Public License for more details.
  24009. *
  24010. * You should have received a copy of the GNU Lesser General Public License
  24011. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24012. *
  24013. * source-factory.ts
  24014. * Factory of sources of data
  24015. */
  24016. /**
  24017. * Factory of sources of data
  24018. */
  24019. class SourceFactory {
  24020. /**
  24021. * Create a <video>-based source of data
  24022. * @param video video element
  24023. * @returns a video source
  24024. */
  24025. static Video(video) {
  24026. return new VideoSource(video);
  24027. }
  24028. /**
  24029. * Create a <canvas>-based source of data
  24030. * @param canvas canvas element
  24031. * @returns a canvas source
  24032. */
  24033. static Canvas(canvas) {
  24034. return new CanvasSource(canvas);
  24035. }
  24036. /**
  24037. * Create a Webcam-based source of data
  24038. * @param options optional options object
  24039. * @returns a camera source
  24040. */
  24041. static Camera(options = {}) {
  24042. return new CameraSource(options);
  24043. }
  24044. /**
  24045. * Create a source of pointer-based input
  24046. * @returns a pointer source
  24047. */
  24048. static Pointer() {
  24049. return new PointerSource();
  24050. }
  24051. }
  24052. ;// CONCATENATED MODULE: ./src/core/hud.ts
  24053. /*
  24054. * encantar.js
  24055. * GPU-accelerated Augmented Reality for the web
  24056. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24057. *
  24058. * This program is free software: you can redistribute it and/or modify
  24059. * it under the terms of the GNU Lesser General Public License as published
  24060. * by the Free Software Foundation, either version 3 of the License, or
  24061. * (at your option) any later version.
  24062. *
  24063. * This program is distributed in the hope that it will be useful,
  24064. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24065. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24066. * GNU Lesser General Public License for more details.
  24067. *
  24068. * You should have received a copy of the GNU Lesser General Public License
  24069. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24070. *
  24071. * hud.ts
  24072. * Heads Up Display
  24073. */
  24074. /**
  24075. * Heads Up Display: an overlay displayed in front of the augmented scene
  24076. */
  24077. class HUD {
  24078. /**
  24079. * Constructor
  24080. * @param parent parent of the hud container
  24081. * @param hudContainer an existing hud container (optional)
  24082. */
  24083. constructor(parent, hudContainer) {
  24084. this._container = hudContainer || this._createContainer(parent);
  24085. this._isOwnContainer = (hudContainer == null);
  24086. // move the HUD container to the parent node
  24087. if (this._container.parentElement !== parent) {
  24088. this._container.remove();
  24089. parent.insertAdjacentElement('afterbegin', this._container);
  24090. }
  24091. // the HUD should be hidden initially
  24092. if (!this._container.hidden) {
  24093. Utils.warning(`The container of the HUD should have the hidden attribute`);
  24094. this._container.hidden = true;
  24095. }
  24096. }
  24097. /**
  24098. * The container of the HUD
  24099. */
  24100. get container() {
  24101. return this._container;
  24102. }
  24103. /**
  24104. * Whether or not the HUD is visible
  24105. */
  24106. get visible() {
  24107. return !this._container.hidden;
  24108. }
  24109. /**
  24110. * Whether or not the HUD is visible
  24111. */
  24112. set visible(visible) {
  24113. this._container.hidden = !visible;
  24114. }
  24115. /**
  24116. * Initialize the HUD
  24117. * @param zIndex the z-index of the container
  24118. * @internal
  24119. */
  24120. _init(zIndex) {
  24121. const container = this._container;
  24122. container.style.position = 'absolute';
  24123. container.style.left = container.style.top = '0px';
  24124. container.style.right = container.style.bottom = '0px';
  24125. container.style.padding = container.style.margin = '0px';
  24126. container.style.zIndex = String(zIndex);
  24127. container.style.userSelect = 'none';
  24128. this.visible = true;
  24129. }
  24130. /**
  24131. * Release the HUD
  24132. * @internal
  24133. */
  24134. _release() {
  24135. this.visible = false;
  24136. if (this._isOwnContainer) {
  24137. this._isOwnContainer = false;
  24138. this._container.remove();
  24139. }
  24140. }
  24141. /**
  24142. * Create a HUD container as an immediate child of the input node
  24143. * @param parent parent container
  24144. * @returns HUD container
  24145. */
  24146. _createContainer(parent) {
  24147. const node = document.createElement('div');
  24148. node.hidden = true;
  24149. parent.insertAdjacentElement('afterbegin', node);
  24150. return node;
  24151. }
  24152. }
  24153. ;// CONCATENATED MODULE: ./src/ui/fullscreen-button.ts
  24154. /*
  24155. * encantar.js
  24156. * GPU-accelerated Augmented Reality for the web
  24157. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24158. *
  24159. * This program is free software: you can redistribute it and/or modify
  24160. * it under the terms of the GNU Lesser General Public License as published
  24161. * by the Free Software Foundation, either version 3 of the License, or
  24162. * (at your option) any later version.
  24163. *
  24164. * This program is distributed in the hope that it will be useful,
  24165. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24166. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24167. * GNU Lesser General Public License for more details.
  24168. *
  24169. * You should have received a copy of the GNU Lesser General Public License
  24170. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24171. *
  24172. * fullscreen-button.ts
  24173. * A built-in fullscreen button introduced as a convenience
  24174. */
  24175. /** Button icon to be displayed when the fullscreen mode is disabled */
  24176. const BUTTON_ICON_OFF = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAbUlEQVRYR+2WOQ4AIAgE5f+PVhobDZANBZAsraAwXMoqFil+f9GBj8BW8dIiKt45at/XgShStHgvmfdekwAdIIEyAmh1Z/U5ikmABPoRsLZWtt+5DUlgHgGr6qM1Pf9XnO131L7fJEQjyOqXEzjP1YAhNmUTrgAAAABJRU5ErkJggg==';
  24177. /** Button icon to be displayed when the fullscreen mode is enabled */
  24178. const BUTTON_ICON_ON = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAZElEQVRYR+2WwRIAEAhE9f8fTQ5OhtkLxbzOyc5rJSvBYcH3FwTIBKpHb5d57Nqm5o0aCIBAPgLDxSunq69APT8RCBdwezTLHjglDAEQgEC+QZR2EqqbjprHRgSB9wjwHX9LoAHP1YAhXF4Z/QAAAABJRU5ErkJggg==';
  24179. /** Button size, in pixels */
  24180. const BUTTON_SIZE = 64;
  24181. /** Button margin, in pixels */
  24182. const BUTTON_MARGIN = 24;
  24183. /**
  24184. * Built-in fullscreen button
  24185. */
  24186. class FullscreenButton {
  24187. /**
  24188. * Constructor
  24189. * @param viewport Viewport
  24190. */
  24191. constructor(viewport) {
  24192. this._viewport = viewport;
  24193. this._button = this._createButton();
  24194. this._boundEventHandler = this._handleFullscreenEvent.bind(this);
  24195. }
  24196. /**
  24197. * Initialize
  24198. */
  24199. init() {
  24200. this._viewport.hud.container.appendChild(this._button);
  24201. this._viewport.addEventListener('fullscreenchange', this._boundEventHandler);
  24202. }
  24203. /**
  24204. * Release
  24205. */
  24206. release() {
  24207. this._viewport.removeEventListener('fullscreenchange', this._boundEventHandler);
  24208. this._button.remove();
  24209. }
  24210. /**
  24211. * Create the <button> element
  24212. */
  24213. _createButton() {
  24214. const button = document.createElement('button');
  24215. button.style.position = 'absolute';
  24216. button.style.bottom = BUTTON_MARGIN + 'px';
  24217. button.style.right = BUTTON_MARGIN + 'px';
  24218. button.style.width = BUTTON_SIZE + 'px';
  24219. button.style.height = BUTTON_SIZE + 'px';
  24220. button.style.opacity = '0.5';
  24221. button.style.cursor = 'pointer';
  24222. button.style.outline = 'none';
  24223. button.style['-webkit-tap-highlight-color'] = 'transparent';
  24224. button.draggable = false;
  24225. button.style.backgroundColor = 'transparent';
  24226. button.style.backgroundImage = 'url(' + BUTTON_ICON_OFF + ')';
  24227. button.style.backgroundSize = 'cover';
  24228. button.style.imageRendering = 'pixelated';
  24229. button.style.borderColor = 'white';
  24230. button.style.borderStyle = 'solid';
  24231. button.style.borderWidth = '2px';
  24232. button.style.borderRadius = '8px';
  24233. const highlight = () => {
  24234. button.style.backgroundColor = '#ffd500';
  24235. button.style.borderColor = '#ffd500';
  24236. button.style.opacity = '1.0';
  24237. };
  24238. const dehighlight = () => {
  24239. button.style.backgroundColor = 'transparent';
  24240. button.style.borderColor = 'white';
  24241. button.style.opacity = '0.5';
  24242. };
  24243. button.addEventListener('pointerdown', highlight);
  24244. button.addEventListener('pointerup', dehighlight);
  24245. button.addEventListener('pointerleave', dehighlight);
  24246. button.addEventListener('click', () => {
  24247. if (!this._viewport.fullscreen) {
  24248. this._viewport.requestFullscreen().catch(err => {
  24249. alert(`Can't enable the fullscreen mode. ` + err.toString());
  24250. });
  24251. }
  24252. else {
  24253. this._viewport.exitFullscreen();
  24254. }
  24255. });
  24256. return button;
  24257. }
  24258. /**
  24259. * Handle a fullscreenchange event
  24260. */
  24261. _handleFullscreenEvent(event) {
  24262. const img = this._viewport.fullscreen ? BUTTON_ICON_ON : BUTTON_ICON_OFF;
  24263. this._button.style.backgroundImage = 'url(' + img + ')';
  24264. }
  24265. }
  24266. ;// CONCATENATED MODULE: ./src/core/viewport.ts
  24267. /*
  24268. * encantar.js
  24269. * GPU-accelerated Augmented Reality for the web
  24270. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24271. *
  24272. * This program is free software: you can redistribute it and/or modify
  24273. * it under the terms of the GNU Lesser General Public License as published
  24274. * by the Free Software Foundation, either version 3 of the License, or
  24275. * (at your option) any later version.
  24276. *
  24277. * This program is distributed in the hope that it will be useful,
  24278. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24279. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24280. * GNU Lesser General Public License for more details.
  24281. *
  24282. * You should have received a copy of the GNU Lesser General Public License
  24283. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24284. *
  24285. * viewport.ts
  24286. * Viewport
  24287. */
  24288. /** An event emitted by a Viewport */
  24289. class ViewportEvent extends AREvent {
  24290. }
  24291. /** Viewport event target */
  24292. class ViewportEventTarget extends AREventTarget {
  24293. }
  24294. /** Default viewport constructor settings */
  24295. const DEFAULT_VIEWPORT_SETTINGS = {
  24296. container: null,
  24297. hudContainer: null,
  24298. resolution: 'lg',
  24299. style: 'best-fit',
  24300. canvas: null,
  24301. fullscreenUI: true,
  24302. };
  24303. /** Base z-index of the children of the viewport container */
  24304. const BASE_ZINDEX = 0;
  24305. /** Z-index of the background canvas */
  24306. const BACKGROUND_ZINDEX = BASE_ZINDEX + 0;
  24307. /** Z-index of the foreground canvas */
  24308. const FOREGROUND_ZINDEX = BASE_ZINDEX + 1;
  24309. /** Z-index of the HUD */
  24310. const HUD_ZINDEX = BASE_ZINDEX + 2;
  24311. /**
  24312. * Helper class to work with the containers of the viewport
  24313. */
  24314. class ViewportContainers {
  24315. /**
  24316. * Constructor
  24317. * @param container viewport container
  24318. */
  24319. constructor(container) {
  24320. // validate
  24321. if (container == null)
  24322. throw new IllegalArgumentError('Unspecified viewport container');
  24323. else if (!(container instanceof HTMLElement))
  24324. throw new IllegalArgumentError('Invalid viewport container');
  24325. // store the viewport container
  24326. this._container = container;
  24327. // create the sub-container
  24328. this._subContainer = document.createElement('div');
  24329. container.appendChild(this._subContainer);
  24330. }
  24331. /**
  24332. * The viewport container
  24333. */
  24334. get container() {
  24335. return this._container;
  24336. }
  24337. /**
  24338. * The sub-container
  24339. */
  24340. get subContainer() {
  24341. return this._subContainer;
  24342. }
  24343. /**
  24344. * Initialize
  24345. */
  24346. init() {
  24347. this._container.style.touchAction = 'none';
  24348. this._container.style.backgroundColor = 'black';
  24349. }
  24350. /**
  24351. * Release
  24352. */
  24353. release() {
  24354. this._container.style.removeProperty('background-color');
  24355. this._container.style.removeProperty('touch-action');
  24356. }
  24357. }
  24358. /**
  24359. * Helper class to work with the canvases of the viewport
  24360. */
  24361. class ViewportCanvases {
  24362. /**
  24363. * Constructor
  24364. * @param parent container for the canvases
  24365. * @param initialSize initial size of the canvases
  24366. * @param fgCanvas optional existing foreground canvas
  24367. */
  24368. constructor(parent, initialSize, fgCanvas = null) {
  24369. if (fgCanvas !== null && !(fgCanvas instanceof HTMLCanvasElement))
  24370. throw new IllegalArgumentError('Not a canvas: ' + fgCanvas);
  24371. this._originalCSSTextOfForegroundCanvas = fgCanvas ? fgCanvas.style.cssText : '';
  24372. this._foregroundCanvas = this._styleCanvas(fgCanvas || this._createCanvas(initialSize), FOREGROUND_ZINDEX);
  24373. this._foregroundCanvas.style.background = 'transparent';
  24374. this._backgroundCanvas = this._styleCanvas(this._createCanvas(initialSize), BACKGROUND_ZINDEX);
  24375. this._backgroundCanvas.hidden = true;
  24376. this._foregroundCanvas.hidden = true;
  24377. const engineInfo = 'encantar.js ' + AR.version;
  24378. this._backgroundCanvas.dataset.arEngine = engineInfo;
  24379. this._foregroundCanvas.dataset.arEngine = engineInfo;
  24380. parent.appendChild(this._backgroundCanvas);
  24381. parent.appendChild(this._foregroundCanvas);
  24382. }
  24383. /**
  24384. * The background canvas
  24385. */
  24386. get backgroundCanvas() {
  24387. return this._backgroundCanvas;
  24388. }
  24389. /**
  24390. * The foreground canvas
  24391. */
  24392. get foregroundCanvas() {
  24393. return this._foregroundCanvas;
  24394. }
  24395. /**
  24396. * Initialize
  24397. */
  24398. init() {
  24399. this._backgroundCanvas.hidden = false;
  24400. this._foregroundCanvas.hidden = false;
  24401. }
  24402. /**
  24403. * Release
  24404. */
  24405. release() {
  24406. this._backgroundCanvas.hidden = true;
  24407. this._foregroundCanvas.hidden = true;
  24408. this._backgroundCanvas.style.cssText = '';
  24409. this._foregroundCanvas.style.cssText = this._originalCSSTextOfForegroundCanvas;
  24410. }
  24411. /**
  24412. * Create a canvas
  24413. * @param size size of the drawing buffer
  24414. * @returns a new canvas
  24415. */
  24416. _createCanvas(size) {
  24417. const canvas = document.createElement('canvas');
  24418. canvas.width = size.width;
  24419. canvas.height = size.height;
  24420. return canvas;
  24421. }
  24422. /**
  24423. * Add suitable CSS rules to a canvas
  24424. * @param canvas
  24425. * @param zIndex
  24426. * @returns canvas
  24427. */
  24428. _styleCanvas(canvas, zIndex) {
  24429. canvas.style.position = 'absolute';
  24430. canvas.style.left = '0px';
  24431. canvas.style.top = '0px';
  24432. canvas.style.width = '100%';
  24433. canvas.style.height = '100%';
  24434. canvas.style.zIndex = String(zIndex);
  24435. return canvas;
  24436. }
  24437. }
  24438. /**
  24439. * Fullscreen utilities
  24440. */
  24441. class ViewportFullscreenHelper {
  24442. /**
  24443. * Constructor
  24444. * @param viewport Viewport
  24445. */
  24446. constructor(viewport) {
  24447. this._viewport = viewport;
  24448. this._container = viewport.container;
  24449. this._boundEventHandler = this._triggerEvent.bind(this);
  24450. }
  24451. /**
  24452. * Initialize
  24453. */
  24454. init() {
  24455. this._container.addEventListener('fullscreenchange', this._boundEventHandler);
  24456. }
  24457. /**
  24458. * Release
  24459. */
  24460. release() {
  24461. this._container.removeEventListener('fullscreenchange', this._boundEventHandler);
  24462. }
  24463. /**
  24464. * Make a request to the user agent so that the viewport container is
  24465. * displayed in fullscreen mode. The container must be a compatible element[1]
  24466. * and the user must interact with the page in order to comply with browser
  24467. * policies[2]. In case of error, the returned promise is rejected.
  24468. * [1] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#compatible_elements
  24469. * [2] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#security
  24470. * @returns promise
  24471. */
  24472. request() {
  24473. const container = this._container;
  24474. // fallback for older WebKit versions
  24475. if (container.requestFullscreen === undefined) {
  24476. if (container.webkitRequestFullscreen === undefined)
  24477. return speedy_vision_default().Promise.reject(new NotSupportedError());
  24478. else if (!document.webkitFullscreenEnabled)
  24479. return speedy_vision_default().Promise.reject(new AccessDeniedError());
  24480. // webkitRequestFullscreen() does not return a value
  24481. container.webkitRequestFullscreen();
  24482. return new (speedy_vision_default()).Promise((resolve, reject) => {
  24483. setTimeout(() => {
  24484. if (container === document.webkitFullscreenElement) {
  24485. Utils.log('Entering fullscreen mode...');
  24486. resolve();
  24487. }
  24488. else
  24489. reject(new TypeError());
  24490. }, 100);
  24491. });
  24492. }
  24493. // check if the fullscreen mode is available
  24494. if (!document.fullscreenEnabled)
  24495. return speedy_vision_default().Promise.reject(new AccessDeniedError());
  24496. // request fullscreen
  24497. return new (speedy_vision_default()).Promise((resolve, reject) => {
  24498. container.requestFullscreen({
  24499. navigationUI: 'hide'
  24500. }).then(() => {
  24501. Utils.log('Entering fullscreen mode...');
  24502. resolve();
  24503. }, reject);
  24504. });
  24505. }
  24506. /**
  24507. * Exit fullscreen mode
  24508. * @returns promise
  24509. */
  24510. exit() {
  24511. // fallback for older WebKit versions
  24512. if (document.exitFullscreen === undefined) {
  24513. const doc = document;
  24514. if (doc.webkitExitFullscreen === undefined)
  24515. return speedy_vision_default().Promise.reject(new NotSupportedError());
  24516. else if (doc.webkitFullscreenElement === null)
  24517. return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
  24518. // webkitExitFullscreen() does not return a value
  24519. doc.webkitExitFullscreen();
  24520. return new (speedy_vision_default()).Promise((resolve, reject) => {
  24521. setTimeout(() => {
  24522. if (doc.webkitFullscreenElement === null) {
  24523. Utils.log('Exiting fullscreen mode...');
  24524. resolve();
  24525. }
  24526. else
  24527. reject(new TypeError());
  24528. }, 100);
  24529. });
  24530. }
  24531. // error if not in fullscreen mode
  24532. if (document.fullscreenElement === null)
  24533. return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
  24534. // exit fullscreen
  24535. return new (speedy_vision_default()).Promise((resolve, reject) => {
  24536. document.exitFullscreen().then(() => {
  24537. Utils.log('Exiting fullscreen mode...');
  24538. resolve();
  24539. }, reject);
  24540. });
  24541. }
  24542. /**
  24543. * Is the fullscreen mode available in this platform?
  24544. * @returns true if the fullscreen mode is available in this platform
  24545. */
  24546. isAvailable() {
  24547. return document.fullscreenEnabled ||
  24548. !!(document.webkitFullscreenEnabled);
  24549. }
  24550. /**
  24551. * Is the container currently being displayed in fullscreen mode?
  24552. * @returns true if the container is currently being displayed in fullscreen mode
  24553. */
  24554. isActivated() {
  24555. if (document.fullscreenElement !== undefined)
  24556. return document.fullscreenElement === this._container;
  24557. else if (document.webkitFullscreenElement !== undefined)
  24558. return document.webkitFullscreenElement === this._container;
  24559. else
  24560. return false;
  24561. }
  24562. /**
  24563. * Trigger a fullscreenchange event
  24564. */
  24565. _triggerEvent() {
  24566. const event = new ViewportEvent('fullscreenchange');
  24567. this._viewport.dispatchEvent(event);
  24568. }
  24569. }
  24570. /**
  24571. * Helper class to resize the viewport
  24572. */
  24573. class ViewportResizer {
  24574. /**
  24575. * Constructor
  24576. * @param viewport the viewport to be resized
  24577. */
  24578. constructor(viewport) {
  24579. this._viewport = viewport;
  24580. this._timeout = null;
  24581. this._resize = this._onResize.bind(this);
  24582. this._triggerResize = this.triggerResize.bind(this);
  24583. this._resizeStrategy = new InlineResizeStrategy();
  24584. // initial setup
  24585. // (the size is yet unknown)
  24586. this._viewport.addEventListener('resize', this._resize);
  24587. this.triggerResize(0);
  24588. }
  24589. /**
  24590. * Initialize
  24591. */
  24592. init() {
  24593. // Configure the resize listener. We want the viewport to adjust itself
  24594. // if the phone/screen is resized or changes orientation
  24595. window.addEventListener('resize', this._triggerResize); // a delay is welcome
  24596. // handle changes of orientation
  24597. // (is this needed? we already listen to resize events)
  24598. if (screen.orientation !== undefined)
  24599. screen.orientation.addEventListener('change', this._triggerResize);
  24600. else
  24601. window.addEventListener('orientationchange', this._triggerResize); // deprecated
  24602. // trigger a resize to setup the sizes / the CSS
  24603. this.triggerResize(0);
  24604. }
  24605. /**
  24606. * Release
  24607. */
  24608. release() {
  24609. if (screen.orientation !== undefined)
  24610. screen.orientation.removeEventListener('change', this._triggerResize);
  24611. else
  24612. window.removeEventListener('orientationchange', this._triggerResize);
  24613. window.removeEventListener('resize', this._triggerResize);
  24614. this._viewport.removeEventListener('resize', this._resize);
  24615. this._resizeStrategy.clear(this._viewport);
  24616. }
  24617. /**
  24618. * Trigger a resize event after a delay
  24619. * @param delay in milliseconds
  24620. */
  24621. triggerResize(delay = 100) {
  24622. const event = new ViewportEvent('resize');
  24623. if (delay <= 0) {
  24624. this._viewport.dispatchEvent(event);
  24625. return;
  24626. }
  24627. if (this._timeout !== null)
  24628. clearTimeout(this._timeout);
  24629. this._timeout = setTimeout(() => {
  24630. this._timeout = null;
  24631. this._viewport.dispatchEvent(event);
  24632. }, delay);
  24633. }
  24634. /**
  24635. * Change the resize strategy
  24636. * @param strategy new strategy
  24637. */
  24638. setStrategy(strategy) {
  24639. this._resizeStrategy.clear(this._viewport);
  24640. this._resizeStrategy = strategy;
  24641. this.triggerResize(0);
  24642. }
  24643. /**
  24644. * Change the resize strategy
  24645. * @param strategyName name of the new strategy
  24646. */
  24647. setStrategyByName(strategyName) {
  24648. switch (strategyName) {
  24649. case 'best-fit':
  24650. this.setStrategy(new BestFitResizeStrategy());
  24651. break;
  24652. case 'stretch':
  24653. this.setStrategy(new StretchResizeStrategy());
  24654. break;
  24655. case 'inline':
  24656. this.setStrategy(new InlineResizeStrategy());
  24657. break;
  24658. default:
  24659. throw new IllegalArgumentError('Invalid viewport style: ' + strategyName);
  24660. }
  24661. }
  24662. /**
  24663. * Resize callback
  24664. */
  24665. _onResize() {
  24666. const viewport = this._viewport;
  24667. // Resize the drawing buffer of the foreground canvas, so that it
  24668. // matches the desired resolution, as well as the aspect ratio of the
  24669. // background canvas
  24670. const foregroundCanvas = viewport.canvas;
  24671. const virtualSize = viewport.virtualSize;
  24672. foregroundCanvas.width = virtualSize.width;
  24673. foregroundCanvas.height = virtualSize.height;
  24674. // Resize the drawing buffer of the background canvas
  24675. const backgroundCanvas = viewport._backgroundCanvas;
  24676. const realSize = viewport._realSize;
  24677. backgroundCanvas.width = realSize.width;
  24678. backgroundCanvas.height = realSize.height;
  24679. // Call strategy
  24680. this._resizeStrategy.resize(viewport);
  24681. }
  24682. }
  24683. /**
  24684. * Resize strategies
  24685. */
  24686. class ViewportResizeStrategy {
  24687. /**
  24688. * Clear CSS rules
  24689. * @param viewport
  24690. */
  24691. clear(viewport) {
  24692. viewport.container.style.cssText = '';
  24693. viewport._subContainer.style.cssText = '';
  24694. }
  24695. }
  24696. /**
  24697. * Inline viewport: it follows the typical flow of a web page
  24698. */
  24699. class InlineResizeStrategy extends ViewportResizeStrategy {
  24700. /**
  24701. * Resize the viewport
  24702. * @param viewport
  24703. */
  24704. resize(viewport) {
  24705. const container = viewport.container;
  24706. const subContainer = viewport._subContainer;
  24707. const virtualSize = viewport.virtualSize;
  24708. container.style.display = 'inline-block'; // fixes a potential issue of the viewport not showing up
  24709. container.style.position = 'relative';
  24710. container.style.left = '0px';
  24711. container.style.top = '0px';
  24712. container.style.width = virtualSize.width + 'px';
  24713. container.style.height = virtualSize.height + 'px';
  24714. subContainer.style.position = 'absolute';
  24715. subContainer.style.left = '0px';
  24716. subContainer.style.top = '0px';
  24717. subContainer.style.width = '100%';
  24718. subContainer.style.height = '100%';
  24719. }
  24720. }
  24721. /**
  24722. * Immersive viewport: it occupies the entire page
  24723. */
  24724. class ImmersiveResizeStrategy extends ViewportResizeStrategy {
  24725. /**
  24726. * Resize the viewport
  24727. * @param viewport
  24728. */
  24729. resize(viewport) {
  24730. const CONTAINER_ZINDEX = 1000000000;
  24731. const container = viewport.container;
  24732. container.style.position = 'fixed';
  24733. container.style.left = '0px';
  24734. container.style.top = '0px';
  24735. container.style.width = '100vw';
  24736. container.style.height = '100vh';
  24737. container.style.zIndex = String(CONTAINER_ZINDEX);
  24738. }
  24739. }
  24740. /**
  24741. * Immersive viewport with best-fit style: it occupies the entire page and
  24742. * preserves the aspect ratio of the media
  24743. */
  24744. class BestFitResizeStrategy extends ImmersiveResizeStrategy {
  24745. /**
  24746. * Resize the viewport
  24747. * @param viewport
  24748. */
  24749. resize(viewport) {
  24750. const subContainer = viewport._subContainer;
  24751. const windowAspectRatio = window.innerWidth / window.innerHeight;
  24752. const viewportAspectRatio = viewport._realSize.width / viewport._realSize.height;
  24753. let width = 1, height = 1, left = '0px', top = '0px';
  24754. if (viewportAspectRatio <= windowAspectRatio) {
  24755. height = window.innerHeight;
  24756. width = Math.round(height * viewportAspectRatio);
  24757. width -= width % 2;
  24758. left = `calc(50% - ${width >>> 1}px)`;
  24759. }
  24760. else {
  24761. width = window.innerWidth;
  24762. height = Math.round(width / viewportAspectRatio);
  24763. height -= height % 2;
  24764. top = `calc(50% - ${height >>> 1}px)`;
  24765. }
  24766. subContainer.style.position = 'absolute';
  24767. subContainer.style.left = left;
  24768. subContainer.style.top = top;
  24769. subContainer.style.width = width + 'px';
  24770. subContainer.style.height = height + 'px';
  24771. super.resize(viewport);
  24772. }
  24773. }
  24774. /**
  24775. * Immersive viewport with stretch style: it occupies the entire page and
  24776. * fully stretches the media
  24777. */
  24778. class StretchResizeStrategy extends ImmersiveResizeStrategy {
  24779. /**
  24780. * Resize the viewport
  24781. * @param viewport
  24782. */
  24783. resize(viewport) {
  24784. const subContainer = viewport._subContainer;
  24785. subContainer.style.position = 'absolute';
  24786. subContainer.style.left = '0px';
  24787. subContainer.style.top = '0px';
  24788. subContainer.style.width = window.innerWidth + 'px';
  24789. subContainer.style.height = window.innerHeight + 'px';
  24790. super.resize(viewport);
  24791. }
  24792. }
  24793. /**
  24794. * Viewport
  24795. */
  24796. class Viewport extends ViewportEventTarget {
  24797. /**
  24798. * Constructor
  24799. * @param viewportSettings
  24800. */
  24801. constructor(viewportSettings) {
  24802. const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
  24803. super();
  24804. const guessedAspectRatio = window.innerWidth / window.innerHeight;
  24805. const initialSize = Utils.resolution(settings.resolution, guessedAspectRatio);
  24806. this._mediaSize = () => initialSize;
  24807. this._resolution = settings.resolution;
  24808. this._style = settings.style;
  24809. this._containers = new ViewportContainers(settings.container);
  24810. this._hud = new HUD(this._subContainer, settings.hudContainer);
  24811. this._canvases = new ViewportCanvases(this._subContainer, initialSize, settings.canvas);
  24812. this._resizer = new ViewportResizer(this);
  24813. this._resizer.setStrategyByName(this._style);
  24814. this._fullscreen = new ViewportFullscreenHelper(this);
  24815. this._fullscreenButton = null;
  24816. if (settings.fullscreenUI && this.fullscreenAvailable)
  24817. this._fullscreenButton = new FullscreenButton(this);
  24818. }
  24819. /**
  24820. * Viewport container
  24821. */
  24822. get container() {
  24823. return this._containers.container;
  24824. }
  24825. /**
  24826. * Viewport style
  24827. */
  24828. get style() {
  24829. return this._style;
  24830. }
  24831. /**
  24832. * Set viewport style
  24833. */
  24834. /*
  24835. set style(value: ViewportStyle)
  24836. {
  24837. // note: the viewport style is independent of the session mode!
  24838. if(value !== this._style) {
  24839. this._resizer.setStrategyByName(value);
  24840. this._style = value;
  24841. }
  24842. }
  24843. */
  24844. /**
  24845. * HUD
  24846. */
  24847. get hud() {
  24848. return this._hud;
  24849. }
  24850. /**
  24851. * Resolution of the virtual scene
  24852. */
  24853. get resolution() {
  24854. return this._resolution;
  24855. }
  24856. /**
  24857. * Size in pixels of the drawing buffer of the canvas
  24858. * on which the virtual scene will be drawn
  24859. */
  24860. get virtualSize() {
  24861. const size = this._realSize;
  24862. const aspectRatio = size.width / size.height;
  24863. return Utils.resolution(this._resolution, aspectRatio);
  24864. }
  24865. /**
  24866. * Is the viewport currently being displayed in fullscreen mode?
  24867. */
  24868. get fullscreen() {
  24869. return this._fullscreen.isActivated();
  24870. }
  24871. /**
  24872. * Is the fullscreen mode available in this platform?
  24873. */
  24874. get fullscreenAvailable() {
  24875. return this._fullscreen.isAvailable();
  24876. }
  24877. /**
  24878. * The canvas on which the virtual scene will be drawn
  24879. */
  24880. get canvas() {
  24881. return this._canvases.foregroundCanvas;
  24882. }
  24883. /**
  24884. * The canvas on which the physical scene will be drawn
  24885. * @internal
  24886. */
  24887. get _backgroundCanvas() {
  24888. return this._canvases.backgroundCanvas;
  24889. }
  24890. /**
  24891. * Size of the drawing buffer of the background canvas, in pixels
  24892. * @internal
  24893. */
  24894. get _realSize() {
  24895. return this._mediaSize();
  24896. }
  24897. /**
  24898. * Sub-container of the viewport container
  24899. * @internal
  24900. */
  24901. get _subContainer() {
  24902. return this._containers.subContainer;
  24903. }
  24904. /**
  24905. * Request fullscreen mode
  24906. * @returns promise
  24907. */
  24908. requestFullscreen() {
  24909. return this._fullscreen.request();
  24910. }
  24911. /**
  24912. * Exit fullscreen mode
  24913. * @returns promise
  24914. */
  24915. exitFullscreen() {
  24916. return this._fullscreen.exit();
  24917. }
  24918. /**
  24919. * Convert a position given in normalized units to a corresponding pixel
  24920. * position in canvas space. Normalized units range from -1 to +1. The
  24921. * center of the canvas is at (0,0). The top right corner is at (1,1).
  24922. * The bottom left corner is at (-1,-1).
  24923. * @param position in normalized units
  24924. * @returns an equivalent pixel position in canvas space
  24925. */
  24926. convertToPixels(position) {
  24927. const canvas = this.canvas;
  24928. const x = 0.5 * (1 + position.x) * canvas.width;
  24929. const y = 0.5 * (1 - position.y) * canvas.height;
  24930. return new Vector2(x, y);
  24931. }
  24932. /**
  24933. * Initialize the viewport (when the session starts)
  24934. * @param getMediaSize
  24935. * @param sessionMode
  24936. * @internal
  24937. */
  24938. _init(getMediaSize, sessionMode) {
  24939. var _a;
  24940. // validate if the viewport style matches the session mode
  24941. if (sessionMode == 'immersive') {
  24942. if (this._style != 'best-fit' && this._style != 'stretch') {
  24943. Utils.warning(`Invalid viewport style \"${this._style}\" for the \"${sessionMode}\" mode`);
  24944. this._style = 'best-fit';
  24945. this._resizer.setStrategyByName(this._style);
  24946. }
  24947. }
  24948. else if (sessionMode == 'inline') {
  24949. if (this._style != 'inline') {
  24950. Utils.warning(`Invalid viewport style \"${this._style}\" for the \"${sessionMode}\" mode`);
  24951. this._style = 'inline';
  24952. this._resizer.setStrategyByName(this._style);
  24953. }
  24954. }
  24955. // set the media size getter
  24956. this._mediaSize = getMediaSize;
  24957. // initialize the components
  24958. this._containers.init();
  24959. this._hud._init(HUD_ZINDEX);
  24960. this._canvases.init();
  24961. this._resizer.init();
  24962. this._fullscreen.init();
  24963. (_a = this._fullscreenButton) === null || _a === void 0 ? void 0 : _a.init();
  24964. }
  24965. /**
  24966. * Release the viewport (when the session ends)
  24967. * @internal
  24968. */
  24969. _release() {
  24970. var _a;
  24971. (_a = this._fullscreenButton) === null || _a === void 0 ? void 0 : _a.release();
  24972. this._fullscreen.release();
  24973. this._resizer.release();
  24974. this._canvases.release();
  24975. this._hud._release();
  24976. this._containers.release();
  24977. }
  24978. }
  24979. ;// CONCATENATED MODULE: ./src/main.ts
  24980. /*
  24981. * encantar.js
  24982. * GPU-accelerated Augmented Reality for the web
  24983. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24984. *
  24985. * This program is free software: you can redistribute it and/or modify
  24986. * it under the terms of the GNU Lesser General Public License as published
  24987. * by the Free Software Foundation, either version 3 of the License, or
  24988. * (at your option) any later version.
  24989. *
  24990. * This program is distributed in the hope that it will be useful,
  24991. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24992. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24993. * GNU Lesser General Public License for more details.
  24994. *
  24995. * You should have received a copy of the GNU Lesser General Public License
  24996. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24997. *
  24998. * main.ts
  24999. * Entry point
  25000. */
  25001. /**
  25002. * GPU-accelerated Augmented Reality for the web
  25003. */
  25004. class AR {
  25005. /**
  25006. * Start a new session
  25007. * @param options
  25008. * @returns a promise that resolves to a new session
  25009. */
  25010. static startSession(options) {
  25011. return Session.instantiate(options);
  25012. }
  25013. /**
  25014. * Checks if the engine can be run in the browser the client is using
  25015. * @returns true if the engine is compatible with the browser
  25016. */
  25017. static isSupported() {
  25018. return Session.isSupported();
  25019. }
  25020. /**
  25021. * Engine version
  25022. */
  25023. static get version() {
  25024. if (false)
  25025. {}
  25026. else
  25027. return "0.4.0";
  25028. }
  25029. /**
  25030. * Speedy Vision
  25031. */
  25032. static get Speedy() {
  25033. return (speedy_vision_default());
  25034. }
  25035. /**
  25036. * Trackers
  25037. */
  25038. static get Tracker() {
  25039. return TrackerFactory;
  25040. }
  25041. /**
  25042. * Sources of data
  25043. */
  25044. static get Source() {
  25045. return SourceFactory;
  25046. }
  25047. /**
  25048. * Create a viewport
  25049. * @param settings
  25050. * @returns a new viewport with the specified settings
  25051. */
  25052. static Viewport(settings) {
  25053. return new Viewport(settings);
  25054. }
  25055. /**
  25056. * Create a new 2D vector
  25057. * @param x x-coordinate
  25058. * @param y y-coordinate
  25059. * @returns a new 2D vector with the provided coordinates
  25060. */
  25061. static Vector2(x, y) {
  25062. return new Vector2(x, y);
  25063. }
  25064. /**
  25065. * Create a new 3D vector
  25066. * @param x x-coordinate
  25067. * @param y y-coordinate
  25068. * @param z z-coordinate
  25069. * @returns a new 3D vector with the provided coordinates
  25070. */
  25071. static Vector3(x, y, z) {
  25072. return new Vector3(x, y, z);
  25073. }
  25074. /**
  25075. * Global Settings
  25076. */
  25077. static get Settings() {
  25078. return Settings;
  25079. }
  25080. }
  25081. // Freeze the namespace
  25082. Object.freeze(AR);
  25083. // Add Speedy Vision to global scope
  25084. ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
  25085. // Display a notice
  25086. Utils.log(`encantar.js version ${AR.version}. ` +
  25087. `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
  25088. "https://github.com/alemart/encantar-js");
  25089. })();
  25090. __webpack_exports__ = __webpack_exports__["default"];
  25091. /******/ return __webpack_exports__;
  25092. /******/ })()
  25093. ;
  25094. });