123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076 |
- /*!
- * MARTINS.js version 0.2.1-wip
- * GPU-accelerated Augmented Reality for the web
- * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
- * https://github.com/alemart/martins-js
- *
- * @license LGPL-3.0-or-later
- * Date: 2024-07-29T00:26:10.319Z
- */
- (function webpackUniversalModuleDefinition(root, factory) {
- if(typeof exports === 'object' && typeof module === 'object')
- module.exports = factory();
- else if(typeof define === 'function' && define.amd)
- define([], factory);
- else if(typeof exports === 'object')
- exports["Martins"] = factory();
- else
- root["Martins"] = factory();
- })(self, () => {
- return /******/ (() => { // webpackBootstrap
- /******/ var __webpack_modules__ = ({
-
- /***/ 774:
- /***/ ((module) => {
-
- /*!
- * Speedy Vision version 0.9.1
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
- * https://github.com/alemart/speedy-vision
- *
- * @license Apache-2.0
- * Date: 2024-07-03T02:16:25.769Z
- */
- (function webpackUniversalModuleDefinition(root, factory) {
- if(true)
- module.exports = factory();
- else {}
- })(self, () => {
- return /******/ (() => { // webpackBootstrap
- /******/ var __webpack_modules__ = ({
-
- /***/ 2199:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
- /* harmony export */ w: () => (/* binding */ Settings)
- /* harmony export */ });
- /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(6634);
- /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1001);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * settings.js
- * Global settings
- */
-
-
-
-
-
-
- /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
- /** @typedef {"raf" | "asap"} GPUPollingMode */
- /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
-
- /** @type {GPUPollingMode} Default GPU polling mode */
- const DEFAULT_GPU_POLLING_MODE = 'raf';
-
- /** @type {GPUPollingMode} GPU polling mode */
- let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
-
- /** @type {LoggingMode} logging mode */
- let loggingMode = 'default';
-
- /**
- * Global settings
- */
- class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q {
- /**
- * Power preference of the WebGL context
- * @returns {PowerPreference}
- */
- static get powerPreference() {
- return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
- }
-
- /**
- * Power preference of the WebGL context
- * @param {PowerPreference} value
- */
- static set powerPreference(value) {
- _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
- }
-
- /**
- * GPU polling mode
- * @returns {GPUPollingMode}
- */
- static get gpuPollingMode() {
- return gpuPollingMode;
- }
-
- /**
- * GPU polling mode
- * @param {GPUPollingMode} value
- */
- static set gpuPollingMode(value) {
- if (value !== 'raf' && value !== 'asap') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
- gpuPollingMode = value;
- }
-
- /**
- * Logging mode
- * @returns {LoggingMode}
- */
- static get logging() {
- return loggingMode;
- }
-
- /**
- * Logging mode
- * @param {LoggingMode} mode
- */
- static set logging(mode) {
- if (mode !== 'default' && mode !== 'none' && mode !== 'diagnostic') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);else if (mode === 'diagnostic') _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
- loggingMode = mode;
- }
- }
-
- /***/ }),
-
- /***/ 6306:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4248__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_4248__.d(__nested_webpack_exports__, {
- /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
- /* harmony export */ });
- /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4248__(6465);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4248__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4248__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-expr.js
- * Symbolic matrix expressions
- */
-
-
-
-
-
- /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
- /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
- /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
-
- /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
-
- /** @const {Dtype2BufferType} */
- const DTYPE_TO_BUFFER_TYPE = Object.freeze({
- 'float32': Float32Array
- });
-
- /**
- * @abstract Matrix expression
- * It's an opaque object representing an algebraic
- * expression. It has no data attached to it.
- */
- class SpeedyMatrixExpr {
- /**
- * Constructor
- * @param {number} rows
- * @param {number} columns
- * @param {SpeedyMatrixDtype} dtype
- */
- constructor(rows, columns, dtype) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
-
- /** @type {number} number of rows */
- this._rows = rows | 0;
-
- /** @type {number} number of columns */
- this._columns = columns | 0;
-
- /** @type {SpeedyMatrixDtype} data type */
- this._dtype = dtype;
- }
-
- /**
- * Number of rows
- * @returns {number}
- */
- get rows() {
- return this._rows;
- }
-
- /**
- * Number of columns
- * @returns {number}
- */
- get columns() {
- return this._columns;
- }
-
- /**
- * Data type
- * @returns {SpeedyMatrixDtype}
- */
- get dtype() {
- return this._dtype;
- }
-
- /**
- * Default data type
- * @returns {SpeedyMatrixDtype}
- */
- static get DEFAULT_DTYPE() {
- return 'float32';
- }
-
- /**
- * Buffer types
- * @returns {Dtype2BufferType}
- */
- static get BUFFER_TYPE() {
- return DTYPE_TO_BUFFER_TYPE;
- }
-
- /**
- * Matrix addition
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- plus(expr) {
- return new SpeedyMatrixAddExpr(this, expr);
- }
-
- /**
- * Matrix subtraction
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- minus(expr) {
- return new SpeedyMatrixSubtractExpr(this, expr);
- }
-
- /**
- * Matrix multiplication
- * @param {SpeedyMatrixExpr|number} expr
- * @returns {SpeedyMatrixExpr}
- */
- times(expr) {
- if (typeof expr === 'number') return new SpeedyMatrixScaleExpr(this, expr);else return new SpeedyMatrixMultiplyExpr(this, expr);
- }
-
- /**
- * Matrix transposition
- * @returns {SpeedyMatrixExpr}
- */
- transpose() {
- return new SpeedyMatrixTransposeExpr(this);
- }
-
- /**
- * Matrix inversion
- * @returns {SpeedyMatrixExpr}
- */
- inverse() {
- return new SpeedyMatrixInvertExpr(this);
- }
-
- /**
- * Component-wise multiplication
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- compMult(expr) {
- return new SpeedyMatrixCompMultExpr(this, expr);
- }
-
- /**
- * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- ldiv(expr) {
- return new SpeedyMatrixLdivExpr(this, expr);
- }
-
- /**
- * Returns a human-readable string representation of the matrix expression
- * @returns {string}
- */
- toString() {
- return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
- }
-
- /**
- * Evaluate this expression
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
- const {
- SpeedyMatrix
- } = __nested_webpack_require_4248__(4188);
-
- /**
- * @abstract operation storing a temporary matrix
- */
- class SpeedyMatrixTempExpr extends SpeedyMatrixExpr {
- /**
- * Constructor
- * @param {number} rows
- * @param {number} columns
- * @param {SpeedyMatrixDtype} dtype
- */
- constructor(rows, columns, dtype) {
- super(rows, columns, dtype);
-
- /** @type {SpeedyMatrix} holds the results of a computation */
- this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
- }
- }
-
- /**
- * @abstract unary operation
- */
- class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr {
- /**
- * Constructor
- * @param {number} rows rows of the output matrix
- * @param {number} columns columns of the output matrix
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(rows, columns, operand) {
- super(rows, columns, operand.dtype);
-
- /** @type {SpeedyMatrixExpr} operand */
- this._operand = operand;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- const operand = this._operand._evaluate(wasm, memory);
- const result = this._tempMatrix;
-
- // allocate matrices
- const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
- const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
-
- // copy operand to WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
-
- // run the WASM routine
- this._compute(wasm, memory, resultptr, operandptr);
-
- // copy result from WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
-
- // deallocate matrices
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
-
- // done!
- return result;
- }
-
- /**
- * Compute the result of this operation
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
-
- /**
- * @abstract binary operation
- */
- class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr {
- /**
- * Constructor
- * @param {number} rows rows of the output matrix
- * @param {number} columns columns of the output matrix
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(rows, columns, left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
- super(rows, columns, left.dtype);
-
- /** @type {SpeedyMatrixExpr} left operand */
- this._left = left;
-
- /** @type {SpeedyMatrixExpr} right operand */
- this._right = right;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- const left = this._left._evaluate(wasm, memory);
- const right = this._right._evaluate(wasm, memory);
- const result = this._tempMatrix;
-
- // allocate matrices
- const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
- const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
- const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
-
- // copy input matrices to WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
-
- // run the WASM routine
- this._compute(wasm, memory, resultptr, leftptr, rightptr);
-
- // copy output matrix from WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
-
- // deallocate matrices
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
-
- // done!
- return result;
- }
-
- /**
- * Compute the result of this operation
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
-
- /**
- * Transpose matrix
- */
- class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(operand) {
- super(operand.columns, operand.rows, operand);
- }
-
- /**
- * Compute result = operand^T
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- wasm.exports.Mat32_transpose(resultptr, operandptr);
- }
- }
-
- /**
- * Invert square matrix
- */
- class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(operand) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
- super(operand.rows, operand.columns, operand);
-
- /** @type {number} size of the matrix */
- this._size = operand.rows;
- }
-
- /**
- * Compute result = operand ^ (-1)
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- switch (this._size) {
- case 0:
- break;
- case 1:
- wasm.exports.Mat32_inverse1(resultptr, operandptr);
- break;
- case 2:
- wasm.exports.Mat32_inverse2(resultptr, operandptr);
- break;
- case 3:
- wasm.exports.Mat32_inverse3(resultptr, operandptr);
- break;
- default:
- wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
- break;
- }
- }
- }
-
- /**
- * Multiply matrix by a scalar value
- */
- class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- * @param {number} scalar
- */
- constructor(operand, scalar) {
- super(operand.rows, operand.columns, operand);
-
- /** @type {number} scalar value */
- this._scalar = +scalar;
- }
-
- /**
- * Compute result = scalar * operand
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
- }
- }
-
- /**
- * Matrix addition
- */
- class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(left.rows, left.columns, left, right);
- }
-
- /**
- * Compute result = left + right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Matrix subtraction
- */
- class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(left.rows, left.columns, left, right);
- }
-
- /**
- * Compute result = left - right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Matrix multiplication
- */
- class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
- super(left.rows, right.columns, left, right);
- }
-
- /**
- * Compute result = left * right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Component-wise multiplication
- */
- class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(right.rows, right.columns, left, right);
- }
-
- /**
- * Compute result = left <compMult> right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
- */
- class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- const m = left.rows,
- n = left.columns;
-
- // TODO right doesn't need to be a column vector
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
- super(n, 1, left, right);
- }
-
- /**
- * Compute result = left \ right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
- }
- }
-
- /***/ }),
-
- /***/ 6465:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_21592__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_21592__.d(__nested_webpack_exports__, {
- /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
- /* harmony export */ });
- /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_21592__(9192);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_21592__(8581);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_21592__(9037);
- /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_21592__(3816);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-wasm.js
- * WebAssembly bridge
- */
-
-
-
-
-
-
- /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
-
- /**
- * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
- * @property {object} as
- * @property {WebAssembly.Memory} as.object
- * @property {Uint8Array} as.uint8
- * @property {Int32Array} as.int32
- * @property {Uint32Array} as.uint32
- * @property {Float32Array} as.float32
- * @property {Float64Array} as.float64
- */
-
- /**
- * @typedef {object} SpeedyMatrixWASMHandle
- * @property {WebAssembly.Instance} wasm
- * @property {SpeedyMatrixWASMMemory} memory
- * @property {WebAssembly.Module} module
- */
-
- /** @type {Uint8Array} WebAssembly binary */
- const WASM_BINARY = __nested_webpack_require_21592__(3575);
-
- /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
- let _instance = null;
-
- /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
- let _module = null;
-
- /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
- const _memory = (mem => ({
- as: {
- object: mem,
- uint8: new Uint8Array(mem.buffer),
- int32: new Int32Array(mem.buffer),
- uint32: new Uint32Array(mem.buffer),
- float32: new Float32Array(mem.buffer),
- float64: new Float64Array(mem.buffer)
- }
- }))(typeof WebAssembly === 'undefined' ? new Uint8Array(1024) :
- // use a filler
- new WebAssembly.Memory({
- initial: 16,
- // 1 MB
- maximum: 256
- }));
-
- /**
- * WebAssembly utilities
- */
- class SpeedyMatrixWASM {
- /**
- * Gets you the WASM instance, augmented memory & module
- * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
- */
- static ready() {
- // Check if WebAssembly is supported
- if (typeof WebAssembly === 'undefined') return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM('This application requires WebAssembly. Please update your system.'));
-
- // Endianness check
- if (!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN) return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`));
-
- // Get the WASM instance
- return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
- SpeedyMatrixWASM._ready(resolve, reject);
- });
- }
-
- /**
- * Synchronously gets you the WASM instance, augmented memory & module
- * @returns {SpeedyMatrixWASMHandle}
- */
- static get handle() {
- if (!_instance || !_module) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
- return {
- wasm: _instance,
- memory: _memory,
- module: _module
- };
- }
-
- /**
- * Gets you the WASM imports bound to a memory object
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {Object<string,Function>}
- */
- static imports(memory) {
- const obj = new SpeedyMatrixWASMImports(memory);
- return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype).filter(property => typeof obj[property] === 'function' && property !== 'constructor').reduce((imports, methodName) => (imports[methodName] = obj[methodName], imports), Object.create(null));
- }
-
- /**
- * Allocate a Mat32 in WebAssembly memory without copying any data
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {SpeedyMatrix} matrix
- * @returns {number} pointer to the new Mat32
- */
- static allocateMat32(wasm, memory, matrix) {
- const dataptr = wasm.exports.malloc(matrix.data.byteLength);
- const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
- return matptr;
- }
-
- /**
- * Deallocate a Mat32 in WebAssembly
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to the allocated Mat32
- * @returns {number} NULL
- */
- static deallocateMat32(wasm, memory, matptr) {
- const dataptr = wasm.exports.Mat32_data(matptr);
- wasm.exports.free(matptr);
- wasm.exports.free(dataptr);
- return 0;
- }
-
- /**
- * Copy the data of a matrix to a WebAssembly Mat32
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to a Mat32
- * @param {SpeedyMatrix} matrix
- * @returns {number} matptr
- */
- static copyToMat32(wasm, memory, matptr, matrix) {
- // We assume the following:
- // 1. the host uses little-endian byte ordering (just like WebAssembly)
- // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
- // 3. the data type is float32
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
- //matrix.dtype === 'float32' &&
- matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
- const dataptr = wasm.exports.Mat32_data(matptr);
- memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
- return matptr;
- }
-
- /**
- * Copy the data of a WebAssembly Mat32 to a matrix
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to a Mat32
- * @param {SpeedyMatrix} matrix
- * @returns {number} matptr
- */
- static copyFromMat32(wasm, memory, matptr, matrix) {
- // We assume the following:
- // 1. the host uses little-endian byte ordering (just like WebAssembly)
- // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
- // 3. the data type is float32
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
- //matrix.dtype === 'float32' &&
- matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
- const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
- for (let offset = matrix.data.length - 1; offset >= 0; offset--) matrix.data[offset] = memory.as.float32[base + offset];
- return matptr;
- }
-
- /**
- * Polls the WebAssembly instance until it's ready
- * @param {function(SpeedyMatrixWASMHandle): void} resolve
- * @param {function(Error): void} reject
- * @param {number} [counter]
- */
- static _ready(resolve, reject, counter = 1000) {
- if (_instance !== null && _module !== null) resolve({
- wasm: _instance,
- memory: _memory,
- module: _module
- });else if (counter <= 0) reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));else setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
- }
- }
-
- /**
- * Methods called from WASM
- */
- class SpeedyMatrixWASMImports {
- /**
- * Constructor
- * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
- */
- constructor(memory) {
- // find all methods of this object
- const methodNames = Object.getOwnPropertyNames(this.constructor.prototype).filter(property => typeof this[property] === 'function').filter(property => property !== 'constructor');
-
- // bind all methods to this object
- methodNames.forEach(methodName => {
- this[methodName] = this[methodName].bind(this);
- });
-
- /** @type {SpeedyMatrixWASMMemory} WASM memory */
- this.memory = memory;
-
- /** @type {CStringUtils} utilities related to C strings */
- this.cstring = new CStringUtils(memory);
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * Prints a message
- * @param {number} ptr pointer to char
- */
- print(ptr) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
- }
-
- /**
- * Throws an error
- * @param {number} ptr pointer to char
- */
- fatal(ptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
- }
-
- /**
- * Fills a memory segment with a byte
- * @param {number} value byte
- * @param {number} start memory address, inclusive
- * @param {number} end memory address greater than start, exclusive
- */
- bytefill(value, start, end) {
- this.memory.as.uint8.fill(value, start, end);
- }
-
- /**
- * Copy a memory segment to another segment
- * @param {number} target memory address, where we'll start writing
- * @param {number} start memory address, where we'll start copying (inclusive)
- * @param {number} end memory address, where we'll end the copy (exclusive)
- */
- copyWithin(target, start, end) {
- this.memory.as.uint8.copyWithin(target, start, end);
- }
- }
-
- /**
- * Utilities related to C strings
- */
- class CStringUtils {
- /**
- * Constructor
- * @param {SpeedyMatrixWASMMemory} memory
- */
- constructor(memory) {
- /** @type {TextDecoder} */
- this._decoder = new TextDecoder('utf-8');
-
- /** @type {SpeedyMatrixWASMMemory} */
- this._memory = memory;
- }
-
- /**
- * Convert a C string to a JavaScript string
- * @param {number} ptr pointer to char
- * @returns {string}
- */
- get(ptr) {
- const byte = this._memory.as.uint8;
- const size = this._memory.as.uint8.byteLength;
- let p = ptr;
- while (p < size && 0 !== byte[p]) ++p;
- return this._decoder.decode(byte.subarray(ptr, p));
- }
- }
-
- /**
- * WebAssembly loader
- * @param {SpeedyMatrixWASMMemory} memory
- */
- (function loadWASM(memory) {
- const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
-
- // Skip if WebAssembly is unsupported
- if (typeof WebAssembly === 'undefined') return;
-
- // Load the WASM binary
- _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY).then(data => base64decode(data)).then(bytes => WebAssembly.instantiate(bytes, {
- env: Object.assign({
- memory: memory.as.object
- }, SpeedyMatrixWASM.imports(memory))
- })).then(wasm => {
- _instance = wasm.instance;
- _module = wasm.module;
- wasm.instance.exports.srand(Date.now() * 0.001 & 0xffffffff); // srand(time(NULL))
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
- }).catch(err => {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
- });
- })(_memory);
-
- /***/ }),
-
- /***/ 4188:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_33268__) => {
-
- "use strict";
- __nested_webpack_require_33268__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_33268__.d(__nested_webpack_exports__, {
- /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
- /* harmony export */ });
- /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_33268__(6306);
- /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_33268__(6465);
- /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_33268__(9192);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_33268__(9037);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix.js
- * Matrix class
- */
-
-
-
-
-
-
- /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
- /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
- /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
-
- /**
- * Matrix class
- */
- class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r {
- /**
- * @private
- *
- * Low-level constructor
- * @param {number} rows number of rows
- * @param {number} columns number of columns
- * @param {number} step0 step size between two consecutive elements (e.g., 1)
- * @param {number} step1 step size between two consecutive columns (e.g., rows)
- * @param {SpeedyMatrixBufferType} data entries in column-major format
- */
- constructor(rows, columns, step0, step1, data) {
- super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.length + rows * columns === 0 ||
- // empty matrix and empty buffer, or
- data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
- );
-
- /** @type {number} step size between two consecutive elements */
- this._step0 = step0 | 0;
-
- /** @type {number} step size between two consecutive columns */
- this._step1 = step1 | 0;
-
- /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
- this._data = data;
- }
-
- /**
- * Create a new matrix with the specified size and entries
- * @param {number} rows number of rows
- * @param {number} columns number of columns
- * @param {number[]} entries in column-major format
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
- }
-
- /**
- * Create a new matrix filled with zeros with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
- }
-
- /**
- * Create a new matrix filled with ones with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
- }
-
- /**
- * Create a new identity matrix with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
- for (let j = Math.min(rows, columns) - 1; j >= 0; j--) data[j * rows + j] = 1;
- return new SpeedyMatrix(rows, columns, 1, rows, data);
- }
-
- /**
- * Evaluate an expression synchronously and store the result in a new matrix
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix}
- */
- static From(expr) {
- return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
- }
-
- /**
- * Returns a promise that resolves immediately if the WebAssembly routines
- * are ready to be used, or as soon as they do become ready
- * @returns {SpeedyPromise<void>}
- */
- static ready() {
- return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void 0);
- }
-
- /**
- * Get the underlying buffer
- * @returns {SpeedyMatrixBufferType}
- */
- get data() {
- return this._data;
- }
-
- /**
- * Row-step
- * @returns {number} defaults to 1
- */
- get step0() {
- return this._step0;
- }
-
- /**
- * Column-step
- * @returns {number} defaults to this.rows
- */
- get step1() {
- return this._step1;
- }
-
- /**
- * Extract a block from this matrix. Use a shared underlying buffer
- * @param {number} firstRow
- * @param {number} lastRow
- * @param {number} firstColumn
- * @param {number} lastColumn
- * @returns {SpeedyMatrix}
- */
- block(firstRow, lastRow, firstColumn, lastColumn) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(firstRow <= lastRow && firstColumn <= lastColumn, `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`);
-
- // ensure that the indices are within bounds
- firstRow = Math.max(firstRow, 0);
- lastRow = Math.min(lastRow, this._rows - 1);
- firstColumn = Math.max(firstColumn, 0);
- lastColumn = Math.min(lastColumn, this._columns - 1);
-
- // compute the dimensions of the new submatrix
- const rows = lastRow - firstRow + 1;
- const columns = lastColumn - firstColumn + 1;
-
- // obtain the relevant portion of the data
- const step0 = this._step0,
- step1 = this._step1;
- const begin = firstRow * step0 + firstColumn * step1; // inclusive
- const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
-
- // create new matrix
- return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
- }
-
- /**
- * Extract a row from this matrix
- * @param {number} index 0-based
- * @returns {SpeedyMatrix}
- */
- row(index) {
- return this.block(index, index, 0, this._columns - 1);
- }
-
- /**
- * Extract a column from this matrix
- * @param {number} index 0-based
- * @returns {SpeedyMatrix}
- */
- column(index) {
- return this.block(0, this._rows - 1, index, index);
- }
-
- /**
- * Extract the main diagonal from this matrix
- * @returns {SpeedyMatrix} as a column-vector
- */
- diagonal() {
- const diagsize = Math.min(this._rows, this._columns);
-
- // compute the dimensions of the new submatrix
- const rows = diagsize; // make it a column vector
- const columns = 1;
-
- // obtain the relevant portion of the data
- const diagstep = this._step0 + this._step1; // jump a row and a column
- const begin = 0; // inclusive
- const end = 1 + (diagsize - 1) * diagstep; // exclusive
-
- // create new matrix
- return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
- }
-
- /**
- * Read a single entry of this matrix
- * @param {number} row 0-based index
- * @param {number} column 0-based index
- * @returns {number}
- */
- at(row, column) {
- if (row >= 0 && row < this._rows && column >= 0 && column < this._columns) return this._data[this._step0 * row + this._step1 * column];else return Number.NaN;
- }
-
- /**
- * Read the entries of the matrix in column-major format
- * @returns {number[]}
- */
- read() {
- const entries = new Array(this._rows * this._columns);
- const step0 = this._step0,
- step1 = this._step1;
- let i = 0;
- for (let column = 0; column < this._columns; column++) {
- for (let row = 0; row < this._rows; row++) entries[i++] = this._data[row * step0 + column * step1];
- }
- return entries;
- }
-
- /**
- * Returns a human-readable string representation of the matrix
- * @returns {string}
- */
- toString() {
- const DECIMALS = 5;
- const rows = this.rows,
- columns = this.columns;
- const entries = this.read();
- const mat = /** @type {number[][]} */new Array(rows);
- for (let i = 0; i < rows; i++) {
- mat[i] = new Array(columns);
- for (let j = 0; j < columns; j++) mat[i][j] = entries[j * rows + i];
- }
- const fix = x => x.toFixed(DECIMALS);
- const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
- const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
- return str;
- }
-
- /**
- * Set the contents of this matrix to the result of an expression
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
- */
- setTo(expr) {
- return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
- // TODO: add support for WebWorkers
- return this.setToSync(expr);
- });
- }
-
- /**
- * Synchronously set the contents of this matrix to the result of an expression
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix} this
- */
- setToSync(expr) {
- const {
- wasm,
- memory
- } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
-
- // evaluate the expression
- const result = expr._evaluate(wasm, memory);
-
- /*
- // shallow copy the results to this matrix
- // limitation: can't handle blocks properly
- // (a tree-like structure could be useful)
- this._rows = result.rows;
- this._columns = result.columns;
- //this._dtype = result.dtype;
- this._data = result.data;
- this._step0 = result.step0;
- this._step1 = result.step1;
- */
-
- // validate shape
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype, `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`);
-
- // deep copy
- const step0 = this._step0,
- step1 = this._step1,
- rstep0 = result._step0,
- rstep1 = result._step1;
- if (step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
- // fast copy
- this._data.set(result._data);
- } else {
- // copy each element
- for (let column = this._columns - 1; column >= 0; column--) {
- for (let row = this._rows - 1; row >= 0; row--) this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
- }
- }
-
- // done!
- return this;
- }
-
- /**
- * Fill this matrix with a scalar value
- * @param {number} value
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
- */
- fill(value) {
- this.fillSync(value);
- return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
- }
-
- /**
- * Synchronously fill this matrix with a scalar value
- * @param {number} value
- * @returns {SpeedyMatrix} this
- */
- fillSync(value) {
- value = +value;
- if (this._rows * this._columns === this._data.length) {
- this._data.fill(value);
- return this;
- }
- for (let column = 0; column < this._columns; column++) {
- for (let row = 0; row < this._rows; row++) {
- this._data[row * this._step0 + column * this._step1] = value;
- }
- }
- return this;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- return this;
- }
- }
-
- /***/ }),
-
- /***/ 6634:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_48547__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_48547__.d(__nested_webpack_exports__, {
- /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
- /* harmony export */ });
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_48547__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-namespace.js
- * Symbolizes a namespace
- */
-
-
-
- /**
- * An abstract namespace
- * @abstract
- */
- class SpeedyNamespace {
- /**
- * Namespaces can't be instantiated.
- * Only static methods are allowed.
- * @abstract
- * @throws SpeedyError
- */
- constructor() {
- // only static methods are allowed
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
- }
- }
-
- /***/ }),
-
- /***/ 9192:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_50059__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_50059__.d(__nested_webpack_exports__, {
- /* harmony export */ i: () => (/* binding */ SpeedyPromise)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-promise.js
- * Speedy Promises: a fast implementation of Promises
- */
-
- const PENDING = 0;
- const FULFILLED = 1;
- const REJECTED = 2;
- const SUSPEND_ASYNC = 1;
- const asap = typeof queueMicrotask !== 'undefined' && queueMicrotask ||
- // browsers
- typeof process !== 'undefined' && process.nextTick || (
- // node.js
- f => Promise.resolve().then(() => f())); // most compatible
-
- /**
- * SpeedyPromise: Super Fast Promises. SpeedyPromises can
- * interoperate with ES6 Promises. This implementation is
- * based on the Promises/A+ specification.
- * @template T
- */
- class SpeedyPromise {
- /**
- * Constructor
- * @param {function(function(T=): void, function(Error): void): void} callback
- */
- constructor(callback) {
- this._state = PENDING;
- this._value = undefined;
- this._onFulfillment = null;
- this._onRejection = null;
- this._children = 0;
- this[0] = this;
- this._parent = undefined;
- this._flags = 0;
- this._fulfill = this._fulfill.bind(this);
- this._reject = this._reject.bind(this);
- this._resolve = this._resolve.bind(this);
- this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
- callback(this._fulfill, this._reject);
- }
-
- /**
- * Setup handlers
- * @template U, V=never
- * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
- * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
- * @returns {SpeedyPromise<U>}
- */
- then(onFulfillment, onRejection = null) {
- const child = new SpeedyPromise(this._nop);
- child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
- child._onRejection = typeof onRejection === 'function' && onRejection;
- child._parent = this;
- this[this._children++] = child; // attach child
- this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
- this._notify();
- return child;
- }
-
- /**
- * Setup rejection handler
- * @template U, V=never
- * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
- * @returns {SpeedyPromise<V>}
- */
- catch(onRejection) {
- return this.then(null, onRejection);
- }
-
- /**
- * Execute a callback when the promise is settled
- * (i.e., fulfilled or rejected)
- * @param {function(): void} onFinally
- * @returns {SpeedyPromise<T>}
- */
- finally(onFinally) {
- const fn = val => {
- onFinally();
- return val;
- };
- return this.then(fn, fn);
- }
-
- /**
- * Start the computation immediately, synchronously.
- * Can't afford to spend any time at all waiting for micro-tasks, etc.
- * @returns {SpeedyPromise<T>} this
- */
- turbocharge() {
- let my = this;
-
- // suspend the async behavior
- this._flags |= SUSPEND_ASYNC;
- while (my._parent !== undefined) {
- my = my._parent;
- my._flags |= SUSPEND_ASYNC;
- }
-
- // notify the children of the root
- my._notify(); // will be synchronous
-
- // return this SpeedyPromise
- return this;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- switch (this._state) {
- case PENDING:
- return `SpeedyPromise { <pending> }`;
- case FULFILLED:
- return `SpeedyPromise { <fulfilled> ${this._value} }`;
- case REJECTED:
- return `SpeedyPromise { <rejected> ${this._value} }`;
- default:
- return '';
- }
- }
-
- /**
- * Symbol.toStringTag
- * @returns {string}
- */
- get [Symbol.toStringTag]() {
- return 'SpeedyPromise';
- }
-
- /**
- * Creates a resolved SpeedyPromise
- * @template U
- * @param {U} [value]
- * @returns {SpeedyPromise<U>}
- */
- static resolve(value) {
- const promise = new SpeedyPromise(this._snop);
- if (typeof value === 'object' && value !== null && 'then' in value || typeof value === 'function' && 'then' in value) {
- // resolve asynchronously
- promise._resolve(value);
- } else {
- // fulfill synchronously
- promise._value = value;
- promise._state = FULFILLED;
- }
- return promise;
- }
-
- /**
- * Creates a rejected SpeedyPromise
- * @template U
- * @param {Error} reason
- * @returns {SpeedyPromise<U>}
- */
- static reject(reason) {
- const promise = new SpeedyPromise(this._snop);
- promise._value = reason;
- promise._state = REJECTED;
- return promise;
- }
-
- /**
- * Returns a SpeedyPromise that resolves to an array
- * containing the results of the input promises/values,
- * in their given order. The returned SpeedyPromise will
- * resolve if all input promises resolve, or reject if
- * any input promise rejects.
- * @template U
- * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
- * @returns {SpeedyPromise<U[]>}
- *
- * FIXME iterables need not be all <U>
- */
- static all(iterable) {
- return new SpeedyPromise((resolve, reject) => {
- const input = [];
-
- // get elements
- for (const element of iterable) input.push(element);
-
- // resolve synchronously if there are no elements
- const length = input.length;
- if (length == 0) {
- resolve([]);
- return;
- }
-
- // resolve asynchronously
- let counter = length;
- const output = new Array(length);
- const partialResolve = i => val => {
- output[i] = val;
- if (0 == --counter) resolve(output);
- };
- for (let i = 0; i < length; i++) {
- const element = input[i];
- if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(partialResolve(i), reject);else SpeedyPromise.resolve(element).then(partialResolve(i), reject);
- }
- });
- }
-
- /**
- * Returns a promise that gets fulfilled or rejected as soon
- * as the first promise in the iterable gets fulfilled or
- * rejected (with its value/reason).
- * @template U
- * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
- * @returns {SpeedyPromise<U>}
- */
- static race(iterable) {
- return new SpeedyPromise((resolve, reject) => {
- const input = [];
-
- // get elements
- for (const element of iterable) input.push(element);
-
- // if the iterable is empty, the promise
- // will be pending forever...
-
- // resolve asynchronously
- const length = input.length;
- for (let i = 0; i < length; i++) {
- const element = input[i];
- if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(resolve, reject);else SpeedyPromise.resolve(element).then(resolve, reject);
- }
- });
- }
-
- /**
- * Fulfill this promise with a value
- * @param {T} value
- */
- _fulfill(value) {
- this._setState(FULFILLED, value);
- }
-
- /**
- * Reject this promise with a reason
- * @param {Error} reason
- */
- _reject(reason) {
- this._setState(REJECTED, reason);
- }
-
- /**
- * Set the state and the value of this promise
- * @param {number} state
- * @param {T|Error} value
- */
- _setState(state, value) {
- // the promise is already fulfilled or rejected
- if (this._state != PENDING) return;
-
- // set the new state
- this._state = state;
- this._value = value;
- this._notify();
- }
-
- /**
- * Notify my children that this promise is no
- * longer pending. This is an async operation:
- * my childen will be notified "as soon
- * as possible" (it will be scheduled).
- * We may force this to be synchronous, though
- */
- _notify() {
- // nothing to do
- if (this._state == PENDING) return;
-
- // have we turbocharged this promise?
- if (this._flags & SUSPEND_ASYNC) {
- this._broadcast(); // execute synchronously
- return;
- }
-
- // install a timer (default behavior)
- asap(this._broadcastIfAsync);
- }
-
- /**
- * Helper method
- */
- _broadcastIfAsync() {
- // we may have installed a timer at some
- // point, but turbocharged the promise later
- if (!(this._flags & SUSPEND_ASYNC)) this._broadcast();
- }
-
- /**
- * Tell my children that this promise
- * is either fulfilled or rejected.
- * This is a synchronous operation
- */
- _broadcast() {
- const children = this._children;
- const state = this._state;
- if (state === FULFILLED) {
- for (let i = 0; i < children; i++) {
- const child = this[i];
- const callback = child._onFulfillment;
- try {
- if (callback) {
- if (callback !== child._nop) {
- child._resolve(callback(this._value)); // promise resolution procedure
- child._onFulfillment = child._nop; // will not be called again
- }
- } else child._fulfill(this._value);
- } catch (e) {
- child._reject(e);
- }
- }
- } else if (state === REJECTED) {
- for (let i = 0; i < children; i++) {
- const child = this[i];
- const callback = child._onRejection;
- try {
- if (callback) {
- if (callback !== child._nop) {
- child._resolve(callback(this._value)); // promise resolution procedure
- child._onRejection = child._nop; // will not be called again
- }
- } else child._reject(this._value);
- } catch (e) {
- child._reject(e);
- }
- }
- }
- }
-
- /**
- * Promise Resolution Procedure
- * based on the Promises/A+ spec
- * @param {T} x
- */
- _resolve(x) {
- if (typeof x !== 'object' && typeof x !== 'function' || x === null) {
- // if(x !== Object(x))
- this._fulfill(x);
- return;
- }
- if (x === this) throw new TypeError(); // Circular reference
-
- if (x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
- x.then(this._resolve, this._reject);
- return;
- }
- try {
- const then = x.then;
- if (typeof then === 'function') {
- let resolve = this._resolve,
- reject = this._reject;
- try {
- then.call(x, y => {
- resolve(y);
- resolve = reject = this._nop;
- }, r => {
- reject(r);
- resolve = reject = this._nop;
- });
- } catch (e) {
- if (resolve !== this._nop && reject !== this._nop) this._reject(e);
- }
- } else {
- this._fulfill(x);
- }
- } catch (e) {
- this._reject(e);
- }
- }
-
- /**
- * No-operation
- */
- _nop() {}
-
- /**
- * Static no-operation
- */
- static _snop() {}
- }
-
- //module.exports = { SpeedyPromise };
-
- /*
- // Uncomment to test performance with regular Promises
- module.exports = { SpeedyPromise: Promise };
- Promise.prototype.turbocharge = function() { return this };
- */
-
- /***/ }),
-
- /***/ 9420:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_61794__) => {
-
- "use strict";
-
- // EXPORTS
- __nested_webpack_require_61794__.d(__nested_webpack_exports__, {
- gx: () => (/* binding */ createShader),
- bf: () => (/* binding */ importShader)
- });
-
- // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
-
- // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
- var speedy_gl = __nested_webpack_require_61794__(1001);
- // EXTERNAL MODULE: ./src/utils/utils.js
- var utils = __nested_webpack_require_61794__(9037);
- // EXTERNAL MODULE: ./src/utils/types.js
- var types = __nested_webpack_require_61794__(6049);
- // EXTERNAL MODULE: ./src/utils/errors.js
- var errors = __nested_webpack_require_61794__(8581);
- ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
- function _wrapRegExp() { _wrapRegExp = function (e, r) { return new BabelRegExp(e, void 0, r); }; var e = RegExp.prototype, r = new WeakMap(); function BabelRegExp(e, t, p) { var o = RegExp(e, t); return r.set(o, p || r.get(e)), _setPrototypeOf(o, BabelRegExp.prototype); } function buildGroups(e, t) { var p = r.get(t); return Object.keys(p).reduce(function (r, t) { var o = p[t]; if ("number" == typeof o) r[t] = e[o];else { for (var i = 0; void 0 === e[o[i]] && i + 1 < o.length;) i++; r[t] = e[o[i]]; } return r; }, Object.create(null)); } return _inherits(BabelRegExp, RegExp), BabelRegExp.prototype.exec = function (r) { var t = e.exec.call(this, r); if (t) { t.groups = buildGroups(t, this); var p = t.indices; p && (p.groups = buildGroups(p, this)); } return t; }, BabelRegExp.prototype[Symbol.replace] = function (t, p) { if ("string" == typeof p) { var o = r.get(this); return e[Symbol.replace].call(this, t, p.replace(/\$<([^>]+)>/g, function (e, r) { var t = o[r]; return "$" + (Array.isArray(t) ? t.join("$") : t); })); } if ("function" == typeof p) { var i = this; return e[Symbol.replace].call(this, t, function () { var e = arguments; return "object" != typeof e[e.length - 1] && (e = [].slice.call(e)).push(buildGroups(e, i)), p.apply(this, e); }); } return e[Symbol.replace].call(this, t, p); }, _wrapRegExp.apply(this, arguments); }
- function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
- function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shader-preprocessor.js
- * Custom preprocessor for shaders
- */
-
-
-
-
-
-
- /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
- /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
-
- // Import numeric globals
- const globals = __nested_webpack_require_61794__(3816);
- const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */
- Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce((obj, key) => (obj[key] = globals[key], obj), {});
-
- /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
- const basicConstants = Object.freeze(Object.assign(Object.assign({}, numericGlobals), {}, {
- // fragment shader
- 'FS_USE_CUSTOM_PRECISION': 0,
- // use default precision settings
- 'FS_OUTPUT_TYPE': 0,
- // normalized RGBA
-
- // colors
- 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
- 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
- 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
- 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA
- }));
-
- /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
- const platformConstants = (platform, glRenderer) => Object.freeze({
- 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0,
- // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
- 'APPLE_GPU': /Apple/.test(glRenderer) | 0,
- // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
- 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0 // Intel[(R)] ... [HD] Graphics xyz ...
- });
-
- // Regular Expressions
- const commentsRegex = [/\/\*(.|\s)*?\*\//g, /\/\/.*$/gm];
- const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
- const constantRegex = /@(\w+)@/g;
- const unrollRegex = [/*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
- counter: 2
- }), /*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+=\s*(\x2D?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
- counter: 2
- })];
-
- /**
- * Custom preprocessor for the shaders
- */
- class ShaderPreprocessor {
- /**
- * Runs the preprocessor and generates GLSL code
- * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
- * @param {string} infix annotated GLSL code
- * @param {string} [prefix]
- * @param {string} [suffix]
- * @returns {string} preprocessed GLSL code
- */
- static generateGLSL(defines, infix, prefix = null, suffix = null) {
- //
- // The preprocessor will remove comments from GLSL code,
- // include requested GLSL files and import global constants
- // defined for all shaders (see above)
- //
- const errors = []; // compile-time errors
- const constants = generateConstants(defines);
- const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
- return unrollLoops(annotatedGLSL.replace(commentsRegex[0], '').replace(commentsRegex[1], '').replace(constantRegex, (_, name) => String(
- // Replace preprocessor @CONSTANTS@ by their numeric values
- constants.has(name) ? Number(constants.get(name)) : (errors.push(`Undefined constant ${name}`), 0))).replace(includeRegex, (_, filename) =>
- // Included files may include other files.
- // XXX no cycle detection!
- ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))), defines) + errors.map(msg => `\n#error ${msg}\n`).join('');
- }
- }
-
- /**
- * Generate GLSL code based on the input arguments
- * @param {ShaderPreprocessorConstants} defines
- * @param {string} infix
- * @param {string} [prefix]
- * @param {string} [suffix]
- * @returns {string} GLSL code
- */
- function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null) {
- const parts = [];
- if (prefix !== null) parts.push(prefix);
- for (const [key, value] of defines) parts.push(`#define ${key} ${Number(value)}`);
- parts.push(infix);
- if (suffix !== null) parts.push(suffix);
- return parts.join('\n');
- }
-
- /**
- * Generate pre-processor constants. Constants provided by the
- * user have higher priority than globally available constants.
- * @param {ShaderPreprocessorConstants} defines user-provided
- * @returns {ShaderPreprocessorConstants}
- */
- function generateConstants(defines) {
- utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
- const myConstants = /** @type {ShaderPreprocessorConstants} */new Map();
- const globalConstants = Object.assign(Object.create(null), basicConstants, platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer));
-
- // globally available constants have lower priority
- for (const key in globalConstants) {
- //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
- myConstants.set(key, globalConstants[key]);
- }
-
- // user-defined constants have higher priority
- for (const [key, value] of defines) myConstants.set(key, value);
-
- // done!
- return myConstants;
- }
-
- /**
- * Reads a shader from the shaders/include/ folder
- * @param {string} filename
- * @returns {string}
- */
- function readfileSync(filename) {
- if (String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/)) return __nested_webpack_require_61794__(5235)("./" + filename);
- throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
- }
-
- /**
- * Unroll for loops in our own preprocessor
- * @param {string} code
- * @param {ShaderPreprocessorConstants} defines
- * @returns {string}
- */
- function unrollLoops(code, defines) {
- //
- // Currently, only integer for loops with positive step values
- // can be unrolled. (TODO: negative step values?)
- //
- // The current implementation does not support curly braces
- // inside unrolled loops. You may define macros to get around
- // this, but do you actually need to unroll such loops?
- //
- // Loops that don't fit the supported pattern will crash
- // the preprocessor if you try to unroll them.
- //
- const fn = unroll.bind(defines); // CRAZY!
- const n = unrollRegex.length;
- for (let i = 0; i < n; i++) code = code.replace(unrollRegex[i], fn);
- return code;
- }
-
- /**
- * Unroll a loop pattern (regexp)
- * @param {string} match the matched for loop
- * @param {string} type
- * @param {string} counter
- * @param {string} start
- * @param {string} cmp
- * @param {string} end
- * @param {string} step
- * @param {string} loopcode
- * @returns {string} unrolled loop
- */
- function unroll(match, type, counter, start, cmp, end, step, loopcode) {
- const defines = /** @type {ShaderPreprocessorConstants} */this;
-
- // check if the loop limits are numeric constants or #defined numbers from the outside
- const hasStart = Number.isFinite(+start) || defines.has(start);
- const hasEnd = Number.isFinite(+end) || defines.has(end);
- if (!hasStart || !hasEnd) {
- if (defines.size > 0) throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);else return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
- }
-
- // parse and validate limits & step
- let istart = defines.has(start) ? defines.get(start) : parseInt(start);
- let iend = defines.has(end) ? defines.get(end) : parseInt(end);
- let istep = step.length == 0 ? 1 : parseInt(step);
- utils/* Utils */.A.assert(istart <= iend && istep > 0);
-
- /*
- // debug
- console.log(`Encontrei "${match}"`);
- console.log(`type="${type}"`);
- console.log(`counter="${counter}"`);
- console.log(`start="${start}"`);
- console.log(`cmp="${cmp}"`);
- console.log(`end="${end}"`);
- console.log(`step="${step}"`);
- console.log(`loopcode="${loopcode}"`)
- console.log('Defines:', defines);
- */
-
- // continue statements are not supported inside unrolled loops
- // and will generate a compiler error. Using break is ok.
- const hasBreak = loopcode.match(/\bbreak\s*;/) !== null;
-
- // create a new scope
- let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
-
- // declare counter
- unrolledCode += `${type} ${counter};\n`;
-
- // unroll loop
- iend += cmp == '<=' ? 1 : 0;
- for (let i = istart; i < iend; i += istep) unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
-
- // close scope
- unrolledCode += '}\n';
- //console.log('Unrolled code:\n\n' + unrolledCode);
-
- // done!
- return unrolledCode;
- }
- ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shader-declaration.js
- * Encapsulates a shader declaration
- */
-
-
-
-
- const DEFAULT_ATTRIBUTES = Object.freeze({
- position: 'a_position',
- texCoord: 'a_texCoord'
- });
- const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
- position: 0,
- // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
- texCoord: 1
- });
- const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
- precision highp float;
- precision highp int;
-
- layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
- layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
- out highp vec2 texCoord;
- uniform highp vec2 texSize;
-
- #define vsinit() \
- gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
- texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
- \n\n`;
- const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
- const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
- const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
-
- #if @FS_USE_CUSTOM_PRECISION@ == 0
- precision mediump float; // ~float16
- precision mediump sampler2D;
- precision highp int; // int32
- #endif
-
- #if @FS_OUTPUT_TYPE@ == 0
- #define OUT_TYPE mediump vec4
- #elif @FS_OUTPUT_TYPE@ == 1
- #define OUT_TYPE mediump ivec4
- #elif @FS_OUTPUT_TYPE@ == 2
- #define OUT_TYPE mediump uvec4
- #else
- #error Unknown FS_OUTPUT_TYPE
- #endif
-
- out OUT_TYPE color;
- in highp vec2 texCoord;
- uniform highp vec2 texSize;
-
- @include "global.glsl"\n\n`;
- const PRIVATE_TOKEN = Symbol();
-
- /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
- /** @typedef {string[]} ShaderDeclarationArgumentList */
- /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
- /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
-
- /**
- * Shader Declaration
- * @abstract
- */
- class ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
- * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsSource, vsSource) {
- // private constructor!
- if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er();
-
- /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
- this._arguments = [...argumentList];
-
- /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
- this._defines = new Map(defines);
-
- /** @type {string} preprocessed source code of the fragment shader */
- this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
-
- /** @type {string} preprocessed source code of the vertex shader */
- this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
-
- /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
- this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
-
- // validate arguments
- this._validateArguments(this._arguments, this._uniforms);
- }
-
- /**
- * Return the preprocessed GLSL source code of the fragment shader
- * @returns {string}
- */
- get fragmentSource() {
- return this._fragmentSource;
- }
-
- /**
- * Return the preprocessed GLSL source code of the vertex shader
- * @returns {string}
- */
- get vertexSource() {
- return this._vertexSource;
- }
-
- /**
- * Get the names of the vertex shader attributes
- * @returns {typeof DEFAULT_ATTRIBUTES}
- */
- get attributes() {
- return DEFAULT_ATTRIBUTES;
- }
-
- /**
- * Get the pre-defined locations of the vertex shader attributes
- * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
- */
- get locationOfAttributes() {
- return DEFAULT_ATTRIBUTES_LOCATION;
- }
-
- /**
- * Names of the arguments that will be passed to the Shader,
- * corresponding to GLSL uniforms, in the order they will be passed
- * @returns {string[]}
- */
- get arguments() {
- return [].concat(this._arguments);
- }
-
- /**
- * Names of the uniforms declared in the shader
- * @returns {string[]}
- */
- get uniforms() {
- return Array.from(this._uniforms.keys());
- }
-
- /**
- * The GLSL type of a uniform variable declared in the shader
- * @param {string} name
- * @returns {string}
- */
- uniformType(name) {
- if (!this._uniforms.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
- return this._uniforms.get(name);
- }
-
- /**
- * The value of an externally defined constant, i.e., via withDefines()
- * @param {string} name
- * @returns {number}
- */
- definedConstant(name) {
- if (!this._defines.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
- return this._defines.get(name);
- }
-
- /**
- * Parses a GLSL source and detects the uniform variables,
- * as well as their types
- * @param {string} preprocessedSource
- * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
- */
- _autodetectUniforms(preprocessedSource) {
- const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
- const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
- const uniforms = /** @type {ShaderDeclarationUniformTypes} */new Map();
- let match;
- while ((match = regex.exec(sourceWithoutComments)) !== null) {
- const type = match[2];
- const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
-
- for (const name of names) {
- if (name.endsWith(']')) {
- // is it an array?
- if (!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/))) throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
-
- // read array name & size
- const [array, size] = [match[1], Number(match[2])];
-
- // register uniforms
- for (let i = 0; i < size; i++) uniforms.set(`${array}[${i}]`, type);
- } else {
- // register a regular uniform
- if (!uniforms.has(name) || uniforms.get(name) === type) uniforms.set(name, type);else throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
- }
- }
- }
- return uniforms;
- }
-
- /**
- * Checks if all the arguments of the shader declaration are backed by a
- * uniform variable in GLSL code
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationUniformTypes} uniforms
- * @throws {IllegalArgumentError}
- */
- _validateArguments(argumentList, uniforms) {
- for (const argname of argumentList) {
- if (!uniforms.has(argname)) {
- if (!uniforms.has(argname + '[0]')) throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
- }
- }
- }
- }
-
- /**
- * A ShaderDeclaration that has its GLSL code stored in-memory
- */
- class MemoryShaderDeclaration extends ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
- * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER) {
- super(privateToken, argumentList, defines, fsSource, vsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
- this._fsUnprocessedSource = String(fsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
- this._vsUnprocessedSource = String(vsSource);
- }
- }
-
- /**
- * A ShaderDeclaration that has its GLSL code stored in a file
- */
- class FileShaderDeclaration extends ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
- * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '') {
- // validate paths
- if (!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);else if (vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
-
- // import files
- const fsSource = __nested_webpack_require_61794__(4606)("./" + String(fsFilepath));
- const vsSource = vsFilepath != '' ? __nested_webpack_require_61794__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
-
- // super class
- super(privateToken, argumentList, defines, fsSource, vsSource);
-
- /** @type {string} filepath of the fragment shader */
- this._fsFilepath = String(fsFilepath);
-
- /** @type {string} filepath of the vertex shader */
- this._vsFilepath = String(vsFilepath);
- }
-
- /**
- * Return the preprocessed GLSL source code of the fragment shader
- * @returns {string}
- */
- get fragmentSource() {
- // we override this method to include the filepath. The motivation
- // is to easily identify the file when debugging compiling errors.
- return this._addHeader('// File: ' + this._fsFilepath, super.fragmentSource);
- }
-
- /**
- * Return the preprocessed GLSL source code of the vertex shader
- * @returns {string}
- */
- get vertexSource() {
- // we override this method to include the filepath. The motivation
- // is to easily identify the file when debugging compiling errors.
- return this._addHeader('// File: ' + (this._vsFilepath != '' ? this._vsFilepath : '(default-vs) ' + this._fsFilepath), super.vertexSource);
- }
-
- /**
- * Add a header to a GLSL code
- * @param {string} header code to be added
- * @param {string} src pre-processed GLSL code
- * @returns {string} src with an added header
- */
- _addHeader(header, src) {
- utils/* Utils */.A.assert(header.startsWith('//') && !header.includes('\n'));
- const j = src.indexOf('\n');
- const versionDirective = src.substr(0, j);
- const body = src.substr(j);
- utils/* Utils */.A.assert(versionDirective.startsWith('#version '));
- const head = versionDirective + '\n' + header;
- return head + body;
- }
- }
-
- /**
- * A builder of a ShaderDeclaration
- * @abstract
- */
- class ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- */
- constructor(privateToken) {
- if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er(); // private constructor!
-
- /** @type {string[]} ordered list of uniform names */
- this._arguments = [];
-
- /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
- this._defines = new Map();
- }
-
- /**
- * Specify the list & order of arguments to be
- * passed to the shader
- * @param {string[]} args argument names
- * @returns {this}
- */
- withArguments(...args) {
- // the list of arguments may be declared only once
- if (this._arguments.length > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
-
- // get arguments
- for (let j = 0; j < args.length; j++) this._arguments.push(String(args[j]));
-
- // done!
- return this;
- }
-
- /**
- * Specify a set of #defines to be prepended to the shader
- * @param {Object<string,number>} defines key-value pairs
- * @returns {this}
- */
- withDefines(defines) {
- // the list of #defines may be defined only once
- if (this._defines.size > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
-
- // store and write the #defines
- const keys = Object.keys(defines);
- for (const key of keys) {
- const value = Number(defines[key]); // force numeric values (just in case)
- this._defines.set(key, value);
- }
-
- // done!
- return this;
- }
-
- /**
- * Build a ShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- throw new errors/* AbstractMethodError */.aQ();
- }
- }
-
- /**
- * A builder of a MemoryShaderDeclaration
- */
- class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource
- * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
- */
- constructor(privateToken, fsSource, vsSource) {
- super(privateToken);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
- this._fsSource = String(fsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
- this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
- }
-
- /**
- * Build a MemoryShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
- }
- }
-
- /**
- * A builder of a FileShaderDeclaration
- */
- class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {string} fsFilepath
- * @param {string} [vsFilepath]
- */
- constructor(privateToken, fsFilepath, vsFilepath) {
- super(privateToken);
-
- /** @type {string} path to the unprocessed GLSL code of the fragment shader */
- this._fsFilepath = String(fsFilepath);
-
- /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
- this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
- }
-
- /**
- * Build a FileShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
- }
- }
-
- /**
- * Import a ShaderDeclaration from a GLSL file
- * @param {string} filepath relative to the shaders/ folder (a .glsl file)
- * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
- * @returns {ShaderDeclaration}
- */
- function importShader(filepath, vsfilepath = undefined) {
- return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
- }
-
- /**
- * Create a ShaderDeclaration from a GLSL source code
- * @param {string} source fragment shader
- * @param {string} [vssource] optional vertex shader
- * @returns {ShaderDeclaration}
- */
- function createShader(source, vssource = undefined) {
- return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
- }
-
- /***/ }),
-
- /***/ 1672:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_89681__) => {
-
- "use strict";
- __nested_webpack_require_89681__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_89681__.d(__nested_webpack_exports__, {
- /* harmony export */ conv2D: () => (/* binding */ conv2D),
- /* harmony export */ convX: () => (/* binding */ convX),
- /* harmony export */ convY: () => (/* binding */ convY)
- /* harmony export */ });
- /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_89681__(9420);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_89681__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_89681__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * convolution.js
- * Convolution shader generators
- */
-
-
-
-
-
- /**
- * Generate a 2D convolution with a square kernel
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function conv2D(kernel, normalizationConstant = 1.0) {
- const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
- const kSize = Math.sqrt(kernel32.length) | 0;
- const N = kSize >> 1; // idiv 2
-
- // validate input
- if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);else if (kSize * kSize != kernel32.length) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
-
- // select the appropriate pixel function
- const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
-
- // code generator
- const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(cur => fn(kernel32[(cur[0] + N) * kSize + (cur[1] + N)], cur[0], cur[1])).join('\n');
- const generateCode = (k, dy, dx) => `
- result += ${pixelAtOffset}(image, ivec2(${-dx | 0}, ${-dy | 0})) * float(${+k});
- `;
-
- // shader
- const source = `
- uniform sampler2D image;
-
- void main()
- {
- float alpha = threadPixel(image).a;
- vec4 result = vec4(0.0f);
-
- ${foreachKernelElement(generateCode)}
-
- color = vec4(result.rgb, alpha);
- }
- `;
-
- // done!
- return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
- }
-
- /**
- * Generate a 1D convolution function on the x-axis
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function convX(kernel, normalizationConstant = 1.0) {
- return conv1D('x', kernel, normalizationConstant);
- }
-
- /**
- * Generate a 1D convolution function on the y-axis
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function convY(kernel, normalizationConstant = 1.0) {
- return conv1D('y', kernel, normalizationConstant);
- }
-
- /**
- * 1D convolution function generator
- * @param {string} axis either "x" or "y"
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function conv1D(axis, kernel, normalizationConstant = 1.0) {
- const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
- const kSize = kernel32.length;
- const N = kSize >> 1; // idiv 2
-
- // validate input
- if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);else if (axis != 'x' && axis != 'y') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
-
- // select the appropriate pixel function
- const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
-
- // code generator
- const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce((acc, cur) => acc + fn(kernel32[cur + N], cur), '');
- const generateCode = (k, i) => axis == 'x' ? `
- pixel += ${pixelAtOffset}(image, ivec2(${-i | 0}, 0)) * float(${+k});
- ` : `
- pixel += ${pixelAtOffset}(image, ivec2(0, ${-i | 0})) * float(${+k});
- `;
-
- // shader
- const source = `
- uniform sampler2D image;
-
- void main()
- {
- float alpha = threadPixel(image).a;
- vec4 pixel = vec4(0.0f);
-
- ${foreachKernelElement(generateCode)}
-
- color = vec4(pixel.rgb, alpha);
- }
- `;
-
- // done!
- return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
- }
-
- /***/ }),
-
- /***/ 1001:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_95546__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_95546__.d(__nested_webpack_exports__, {
- /* harmony export */ c: () => (/* binding */ SpeedyGL)
- /* harmony export */ });
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_95546__(9037);
- /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_95546__(2199);
- /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_95546__(3211);
- /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_95546__(9192);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_95546__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-gl.js
- * A wrapper around the WebGL Rendering Context
- */
-
-
-
-
-
-
-
- /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
-
- // Constants
- const SINGLETON_KEY = Symbol();
- const DEFAULT_POWER_PREFERENCE = 'default';
-
- //
- // We use a small canvas to improve the performance
- // of createImageBitmap() on Firefox.
- //
- // A large canvas (2048x2048) causes a FPS drop, even
- // if we only extract a small region of it (this is
- // unlike Chrome, which is fast).
- //
- // Note: we automatically increase the size of the
- // canvas (as needed) when rendering to it.
- //
- const CANVAS_WIDTH = 16,
- CANVAS_HEIGHT = 16;
-
- /** @type {SpeedyGL} Singleton */
- let instance = null;
-
- /** @type {PowerPreference} power preference */
- let powerPreference = DEFAULT_POWER_PREFERENCE;
-
- /**
- * A wrapper around a WebGL Rendering Context
- */
- class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c {
- /**
- * Constructor
- * @param {Symbol} key
- * @private
- */
- constructor(key) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
- super();
-
- /** @type {boolean} internal flag */
- this._reinitializeOnContextLoss = true;
-
- /** @type {HTMLCanvasElement} internal canvas */
- this._canvas = this._createCanvas(this._reinitialize.bind(this));
-
- /** @type {WebGL2RenderingContext} WebGL rendering context */
- this._gl = this._createContext(this._canvas);
-
- /** @type {string} vendor string of the video driver */
- this._vendor = '';
-
- /** @type {string} renderer string of the video driver */
- this._renderer = '';
-
- // read driver info
- this._readDriverInfo();
-
- // log driver info
- if (_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic') this._logDriverInfo();
- }
-
- /**
- * Get Singleton
- * @returns {SpeedyGL}
- */
- static get instance() {
- return instance || (instance = new SpeedyGL(SINGLETON_KEY));
- }
-
- /**
- * The WebGL Rendering Context
- * Be careful not to cache this rendering context, as it may be lost!
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._gl;
- }
-
- /**
- * The internal canvas
- * @returns {HTMLCanvasElement}
- */
- get canvas() {
- return this._canvas;
- }
-
- /**
- * Renderer string of the video driver
- * @returns {string}
- */
- get renderer() {
- return this._renderer;
- }
-
- /**
- * Vendor string of the video driver
- * @returns {string}
- */
- get vendor() {
- return this._vendor;
- }
-
- /**
- * Create a WebGL-capable canvas
- * @param {Function} reinitialize to be called if we get a WebGL context loss event
- * @returns {HTMLCanvasElement}
- */
- _createCanvas(reinitialize) {
- const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
- canvas.addEventListener('webglcontextlost', ev => {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
- setTimeout(reinitialize, 0);
- ev.preventDefault();
- }, false);
-
- /*canvas.addEventListener('webglcontextrestored', ev => {
- Utils.warning(`Restored WebGL2 context`);
- ev.preventDefault();
- }, false);*/
-
- return canvas;
- }
-
- /**
- * Create a WebGL2 Rendering Context
- * @param {HTMLCanvasElement} canvas
- * @returns {WebGL2RenderingContext}
- */
- _createContext(canvas) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
-
- // does the browser support WebGL2?
- if (typeof WebGL2RenderingContext === 'undefined') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please update your system.`);
- const gl = canvas.getContext('webgl2', {
- premultipliedAlpha: false,
- preserveDrawingBuffer: false,
- powerPreference: powerPreference,
- alpha: true,
- // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
- antialias: false,
- depth: false,
- stencil: false,
- desynchronized: true
- });
- if (!gl) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
- return gl;
- }
-
- /**
- * Reinitialize WebGL
- */
- _reinitialize() {
- // disable reinitialization?
- if (!this._reinitializeOnContextLoss) return;
-
- // warning
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
-
- // create new canvas
- this._canvas.remove();
- this._canvas = this._createCanvas(this._reinitialize.bind(this));
-
- // create new context
- this._gl = this._createContext(this._canvas);
-
- // is this needed?
- this._readDriverInfo();
-
- // notify observers: we have a new context!
- // we need to recreate all textures...
- this._notify();
- }
-
- /**
- * Read debugging information about the video driver of the user
- */
- _readDriverInfo() {
- // Depending on the privacy settings of the browser, this information
- // may be unavailable. When available, it may not be entirely correct.
- // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
- const gl = this._gl;
- let debugInfo = null;
- if (navigator.userAgent.includes('Firefox')) {
- this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
- this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
- } else if (null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
- this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
- this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
- } else {
- this._vendor = ''; // unavailable information
- this._renderer = '';
- }
- }
-
- /**
- * Log debugging information about the video driver and the platform
- */
- _logDriverInfo() {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
- }
-
- /**
- * Lose the WebGL context. This is used to manually
- * free resources, and also for purposes of testing
- * @returns {WEBGL_lose_context}
- */
- loseContext() {
- const gl = this._gl;
-
- // find the appropriate extension
- const ext = gl.getExtension('WEBGL_lose_context');
- if (!ext) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
-
- // nothing to do?
- if (gl.isContextLost()) return ext;
-
- // disable reinitialization
- this._reinitializeOnContextLoss = false;
-
- // lose context
- ext.loseContext();
-
- // done!
- return ext;
- }
-
- /**
- * Lose & restore the WebGL context
- * @param {number} [secondsToRestore]
- * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
- */
- loseAndRestoreContext(secondsToRestore = 1) {
- const ms = Math.max(secondsToRestore, 0) * 1000;
- const ext = this.loseContext();
- return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
- setTimeout(() => {
- //ext.restoreContext();
- this._reinitializeOnContextLoss = true;
- this._reinitialize();
- setTimeout(() => resolve(ext), 0); // next frame
- }, ms);
- });
- }
-
- /**
- * Power preference for the WebGL context
- * @returns {PowerPreference}
- */
- static get powerPreference() {
- return powerPreference;
- }
-
- /**
- * Power preference for the WebGL context
- * @param {PowerPreference} value
- */
- static set powerPreference(value) {
- // validate
- if (!(value === 'default' || value === 'low-power' || value === 'high-performance')) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
-
- // the power preference should be set before we create the WebGL context
- if (instance == null || powerPreference !== value) {
- powerPreference = value;
-
- // recreate the context if it already exists. Experimental.
- if (instance != null) instance.loseAndRestoreContext();
- }
- }
-
- /**
- * Check if an instance of SpeedyGL has already been created
- * @returns {boolean}
- */
- static isInitialized() {
- return instance != null;
- }
- }
-
- /***/ }),
-
- /***/ 8581:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_105678__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_105678__.d(__nested_webpack_exports__, {
- /* harmony export */ EM: () => (/* binding */ NotSupportedError),
- /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
- /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
- /* harmony export */ MU: () => (/* binding */ TimeoutError),
- /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
- /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
- /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
- /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
- /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
- /* harmony export */ mB: () => (/* binding */ ParseError),
- /* harmony export */ pf: () => (/* binding */ AssertionError),
- /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
- /* harmony export */ wB: () => (/* binding */ GLError),
- /* harmony export */ xB: () => (/* binding */ SpeedyError)
- /* harmony export */ });
- /* unused harmony export NotImplementedError */
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * errors.js
- * Error classes
- */
-
- /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
-
- /**
- * Generic error class for Speedy
- */
- class SpeedyError extends Error {
- /**
- * Class constructor
- * @param {string} message message text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message, cause = null) {
- super([message, cause ? cause.toString() : '[speedy-vision.js]'].join('\n-> '));
-
- /** @type {SpeedyErrorCause} cause of the error */
- this._cause = cause;
- }
-
- /**
- * Error name
- * @returns {string}
- */
- get name() {
- return this.constructor.name;
- }
-
- /**
- * Set error name (ignored)
- * @param {string} _ ignored
- */
- set name(_) {
- void 0;
- }
-
- /**
- * Get the cause of the error. Available if
- * it has been specified in the constructor
- * @returns {SpeedyErrorCause}
- */
- get cause() {
- return this._cause;
- }
- }
-
- /**
- * Unsupported operation error
- * The requested operation is not supported
- */
- class NotSupportedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Unsupported operation. ${message}`, cause);
- }
- }
-
- /**
- * Not implemented error
- * The called method is not implemented
- */
- class NotImplementedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Method not implemented. ${message}`, cause);
- }
- }
-
- /**
- * WebGL error
- */
- class GLError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`WebGL error. ${message}`, cause);
- }
-
- /**
- * Get an error object describing the latest WebGL error
- * @param {WebGL2RenderingContext} gl
- * @returns {GLError}
- */
- static from(gl) {
- const recognizedErrors = ['NO_ERROR', 'INVALID_ENUM', 'INVALID_VALUE', 'INVALID_OPERATION', 'INVALID_FRAMEBUFFER_OPERATION', 'OUT_OF_MEMORY', 'CONTEXT_LOST_WEBGL'];
- const glError = gl.getError();
- const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
- return new GLError(message);
- }
- }
-
- /**
- * AbstractMethodError
- * Thrown when one tries to call an abstract method
- */
- class AbstractMethodError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Can't call abstract method. ${message}`, cause);
- }
- }
-
- /**
- * Illegal argument error
- * A method has received one or more illegal arguments
- */
- class IllegalArgumentError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Illegal argument. ${message}`, cause);
- }
- }
-
- /**
- * Illegal operation error
- * The method arguments are valid, but the method can't
- * be called due to the current the state of the object
- */
- class IllegalOperationError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Illegal operation. ${message}`, cause);
- }
- }
-
- /**
- * Out of memory
- */
- class OutOfMemoryError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Out of memory. ${message}`, cause);
- }
- }
-
- /**
- * File not found error
- */
- class FileNotFoundError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`File not found. ${message}`, cause);
- }
- }
-
- /**
- * Resource not loaded error
- */
- class ResourceNotLoadedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Resource not loaded. ${message}`, cause);
- }
- }
-
- /**
- * Timeout error
- */
- class TimeoutError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Timeout error. ${message}`, cause);
- }
- }
-
- /**
- * Parse error
- */
- class ParseError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Parse error. ${message}`, cause);
- }
- }
-
- /**
- * Assertion error
- */
- class AssertionError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Assertion failed. ${message}`, cause);
- }
- }
-
- /**
- * Access denied
- */
- class AccessDeniedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Access denied. ${message}`, cause);
- }
- }
-
- /**
- * WebAssembly error
- */
- class WebAssemblyError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`WebAssembly error. ${message}`, cause);
- }
- }
-
- /***/ }),
-
- /***/ 3816:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_113692__) => {
-
- "use strict";
- __nested_webpack_require_113692__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_113692__.d(__nested_webpack_exports__, {
- /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
- /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
- /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
- /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
- /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
- /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
- /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
- /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
- /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
- /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
- /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
- /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
- /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
- /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
- /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
- /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
- /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * globals.js
- * Global constants
- */
-
- // -----------------------------------------------------------------
- // IMAGE PYRAMIDS & SCALE-SPACE
- // -----------------------------------------------------------------
-
- /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
- const PYRAMID_MAX_LEVELS = 8;
-
- /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
- const LOG2_PYRAMID_MAX_SCALE = 0;
-
- /** @type {number} The maximum supported scale for a pyramid level */
- const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
-
- // -----------------------------------------------------------------
- // FIXED-POINT MATH
- // -----------------------------------------------------------------
-
- /** @type {number} How many bits do we use to store fractional data? */
- const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
-
- /** @type {number} Fixed-point resolution */
- const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
-
- // -----------------------------------------------------------------
- // TEXTURE LIMITS
- // -----------------------------------------------------------------
-
- /** @type {number} Maximum texture length (width, height) */
- const MAX_TEXTURE_LENGTH = (1 << 16 - FIX_BITS) - 1; // must be 2^n - 1 due to keypoint encoding
-
- // -----------------------------------------------------------------
- // KEYPOINTS
- // -----------------------------------------------------------------
-
- /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
- const MIN_KEYPOINT_SIZE = 8;
-
- /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
- const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
-
- /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
- const MAX_ENCODER_CAPACITY = 8192;
-
- /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
- const DEFAULT_ENCODER_CAPACITY = 2048;
-
- /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
- const LOG2_MAX_DESCRIPTOR_SIZE = 6;
-
- /** @type {number} maximum size of a keypoint descriptor, in bytes */
- const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
-
- /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
- const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
-
- /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
- const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
-
- /** @type {number} Maximum size of the database of keypoints for matching */
- const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
-
- /** @type {number} The maximum distance that can be stored in a match */
- const MATCH_MAX_DISTANCE = (1 << 32 - MATCH_INDEX_BITS) - 1;
-
- // -----------------------------------------------------------------
- // MISC
- // -----------------------------------------------------------------
-
- /** @type {boolean} Are we in a little-endian machine? */
- const LITTLE_ENDIAN = function () {
- return 0xCAFE === new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer)[0];
- }();
-
- /***/ }),
-
- /***/ 3211:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_119275__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_119275__.d(__nested_webpack_exports__, {
- /* harmony export */ c: () => (/* binding */ Observable)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * observable.js
- * Observer design pattern
- */
-
- /**
- * Implementation of the Observer design pattern
- * @abstract
- */
- class Observable {
- /**
- * Constructor
- */
- constructor() {
- /** @type {Function[]} subscribers / callbacks */
- this._subscribers = [];
-
- /** @type {object[]} "this" pointers */
- this._thisptr = [];
-
- /** @type {Array<any[]>} function arguments */
- this._args = [];
- }
-
- /**
- * Add subscriber
- * @param {Function} fn callback
- * @param {object} [thisptr] "this" pointer to be used when invoking the callback
- * @param {...any} args arguments to be passed to the callback
- */
- subscribe(fn, thisptr, ...args) {
- this._subscribers.push(fn);
- this._thisptr.push(thisptr);
- this._args.push(args);
- }
-
- /**
- * Remove subscriber
- * @param {Function} fn previously added callback
- * @param {object} [thisptr] "this" pointer
- */
- unsubscribe(fn, thisptr) {
- for (let j = this._subscribers.length - 1; j >= 0; j--) {
- if (this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
- this._subscribers.splice(j, 1);
- this._thisptr.splice(j, 1);
- this._args.splice(j, 1);
- break;
- }
- }
- }
-
- /**
- * Notify all subscribers about a state change
- * @protected
- */
- _notify() {
- for (let i = 0; i < this._subscribers.length; i++) this._subscribers[i].apply(this._thisptr[i], this._args[i]);
- }
- }
-
- /***/ }),
-
- /***/ 6049:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_121659__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_121659__.d(__nested_webpack_exports__, {
- /* harmony export */ f5: () => (/* binding */ ImageFormat),
- /* harmony export */ kQ: () => (/* binding */ PixelComponent),
- /* harmony export */ kg: () => (/* binding */ ColorComponentId),
- /* harmony export */ zu: () => (/* binding */ MediaType)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * types.js
- * Types & formats
- */
-
- /**
- * Media types
- * @enum {Symbol}
- */
- const MediaType = Object.freeze({
- Image: Symbol('Image'),
- Video: Symbol('Video'),
- Canvas: Symbol('Canvas'),
- OffscreenCanvas: Symbol('OffscreenCanvas'),
- Bitmap: Symbol('Bitmap'),
- Data: Symbol('Data')
- });
-
- /**
- * Image formats
- * @enum {Symbol}
- */
- const ImageFormat = Object.freeze({
- RGBA: Symbol('RGBA'),
- GREY: Symbol('GREY')
- });
-
- /**
- * Pixel component (bitwise flags)
- * @typedef {number} PixelComponent
- */
- const PixelComponent = Object.freeze({
- RED: 1,
- GREEN: 2,
- BLUE: 4,
- ALPHA: 8,
- ALL: 15 // = RED | GREEN | BLUE | ALPHA
- });
-
- /**
- * Component ID utility
- */
- const ColorComponentId = Object.freeze({
- [PixelComponent.RED]: 0,
- [PixelComponent.GREEN]: 1,
- [PixelComponent.BLUE]: 2,
- [PixelComponent.ALPHA]: 3
- });
-
- /***/ }),
-
- /***/ 9037:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_123644__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_123644__.d(__nested_webpack_exports__, {
- /* harmony export */ A: () => (/* binding */ Utils)
- /* harmony export */ });
- /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_123644__(8581);
- /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_123644__(9192);
- /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_123644__(2199);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * utils.js
- * Generic utilities
- */
-
-
-
-
-
- /**
- * Generic utilities
- */
- class Utils {
- /**
- * Generates a warning
- * @param {string} text message text
- * @param {...string} args optional text
- */
- static warning(text, ...args) {
- //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
- if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.warn('[speedy-vision] ' + text, ...args);
- }
-
- /**
- * Logs a message
- * @param {string} text message text
- * @param {...string} args optional text
- */
- static log(text, ...args) {
- if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.log('[speedy-vision] ' + text, ...args);
- }
-
- /**
- * Assertion
- * @param {boolean} expr expression
- * @param {string} [text] error message
- * @throws {AssertionError}
- */
- static assert(expr, text = '') {
- if (!expr) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
- }
-
- /**
- * Gets the names of the arguments of the specified function
- * @param {Function} fun
- * @returns {string[]}
- */
- static functionArguments(fun) {
- const code = fun.toString();
- const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' : code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>';
- const match = new RegExp(regex).exec(code);
- if (match !== null) {
- const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
- return args.split(',').map(argname => argname.replace(/=.*$/, '').trim() // remove default params & trim
- ).filter(argname => argname // handle trailing commas
- );
- } else throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
- }
-
- /**
- * Get all property descriptors from an object,
- * traversing its entire prototype chain
- * @param {object} obj
- * @returns {object}
- */
- static getAllPropertyDescriptors(obj) {
- if (obj) {
- const proto = Object.getPrototypeOf(obj);
- return Object.assign(Object.assign({}, Utils.getAllPropertyDescriptors(proto)), Object.getOwnPropertyDescriptors(obj));
- } else return Object.create(null);
- }
-
- /**
- * Creates a HTMLCanvasElement with the given dimensions
- * @param {number} width in pixels
- * @param {number} height in pixels
- * @returns {HTMLCanvasElement}
- */
- static createCanvas(width, height) {
- const canvas = document.createElement('canvas');
- canvas.width = width;
- canvas.height = height;
- return canvas;
- }
-
- /**
- * Generate a 1D gaussian kernel with custom sigma
- * Tip: use kernelSize >= (5 * sigma), kernelSize odd
- * @param {number} sigma gaussian sigma
- * @param {number} [kernelSize] kernel size, odd number
- * @param {boolean} [normalized] normalize entries so that their sum is 1
- * @returns {number[]}
- */
- static gaussianKernel(sigma, kernelSize = 0, normalized = true) {
- /*
- * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
- *
- * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
- *
- * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
- *
- * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
- * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
- *
- * Setting a constant c := sqrt(2) * sigma, it follows that:
- *
- * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
- */
-
- // default kernel size
- if (kernelSize == 0) {
- kernelSize = Math.ceil(5.0 * sigma) | 0;
- kernelSize += 1 - kernelSize % 2;
- }
-
- // validate input
- kernelSize |= 0;
- if (kernelSize < 1 || kernelSize % 2 == 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);else if (sigma <= 0.0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
-
- // function erf(x) = -erf(-x) can be approximated numerically. See:
- // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
- const kernel = new Array(kernelSize);
-
- // set constants
- const N = kernelSize >> 1; // integer (floor, div 2)
- const c = +sigma * 1.4142135623730951; // sigma * sqrt(2)
- const m = 0.3275911;
- const a1 = 0.254829592;
- const a2 = -0.284496736;
- const a3 = 1.421413741;
- const a4 = -1.453152027;
- const a5 = 1.061405429;
-
- // compute the kernel
- let sum = 0.0;
- for (let j = 0; j < kernelSize; j++) {
- let xa = (j - N + 0.5) / c;
- let xb = (j - N - 0.5) / c;
- let sa = 1.0,
- sb = 1.0;
- if (xa < 0.0) {
- sa = -1.0;
- xa = -xa;
- }
- if (xb < 0.0) {
- sb = -1.0;
- xb = -xb;
- }
- const ta = 1.0 / (1.0 + m * xa);
- const tb = 1.0 / (1.0 + m * xb);
- const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
- const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
- const ya = 1.0 - pa * Math.exp(-xa * xa);
- const yb = 1.0 - pb * Math.exp(-xb * xb);
- const erfa = sa * ya;
- const erfb = sb * yb;
- const fp = (erfa - erfb) / (2.0 * c);
- kernel[j] = fp;
- sum += fp;
- }
-
- // normalize the kernel
- if (normalized) {
- for (let j = 0; j < kernelSize; j++) kernel[j] /= sum;
- }
-
- // done!
- return kernel;
- }
-
- /**
- * Generate a 2D kernel in column-major format using two separable 1D kernels
- * @param {number[]} ka 1D kernel
- * @param {number[]} [kb]
- * @returns {number[]}
- */
- static kernel2d(ka, kb = ka) {
- const ksize = ka.length;
- Utils.assert(ka.length == ka.length);
- Utils.assert(ksize >= 1 && ksize % 2 == 1);
-
- // compute the outer product ka x kb
- let kernel2d = new Array(ksize * ksize),
- k = 0;
- for (let col = 0; col < ksize; col++) {
- for (let row = 0; row < ksize; row++) kernel2d[k++] = ka[row] * kb[col];
- }
- return kernel2d;
- }
-
- /**
- * Cartesian product a x b: [ [ai, bj] for all i, j ]
- * @param {number[]} a
- * @param {number[]} b
- * @returns {Array<[number,number]>}
- */
- static cartesian(a, b) {
- return [].concat(...a.map(a => b.map(b => [a, b])));
- }
-
- /**
- * Symmetric range
- * @param {number} n non-negative integer
- * @returns {number[]} [ -n, ..., n ]
- */
- static symmetricRange(n) {
- if ((n |= 0) < 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
- return [...Array(2 * n + 1).keys()].map(x => x - n);
- }
-
- /**
- * Compute the [0, n) range of integers
- * @param {number} n positive integer
- * @returns {number[]} [ 0, 1, ..., n-1 ]
- */
- static range(n) {
- if ((n |= 0) <= 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
- return [...Array(n).keys()];
- }
-
- /**
- * Shuffle in-place
- * @template T
- * @param {T[]} arr
- * @returns {T[]} arr
- */
- static shuffle(arr) {
- const len = arr.length;
- const m = len - 1;
-
- // Fisher-Yattes
- for (let i = 0; i < m; i++) {
- const j = i + (Math.random() * (len - i) | 0); // i <= j < arr.length
-
- if (i !== j) {
- const t = arr[i];
- arr[i] = arr[j];
- arr[j] = t;
- }
- }
- return arr;
- }
-
- /**
- * Flatten an array (1 level only)
- * @template U
- * @param {U[]} array
- * @returns {U[]}
- */
- static flatten(array) {
- //return array.flat();
- //return array.reduce((arr, val) => arr.concat(val), []);
-
- const flat = [];
- for (let i = 0, n = array.length; i < n; i++) {
- const entry = array[i];
- if (Array.isArray(entry)) {
- for (let j = 0, m = entry.length; j < m; j++) flat.push(entry[j]);
- } else flat.push(entry);
- }
- return flat;
- }
-
- /**
- * Decode a 16-bit float from a
- * unsigned 16-bit integer
- * @param {number} uint16
- * @returns {number}
- */
- static decodeFloat16(uint16) {
- // decode according to sec 2.1.2
- // 16-Bit Floating Point Numbers
- // of the OpenGL ES 3 spec
- const s = (uint16 & 0xFFFF) >> 15; // sign bit
- const e = (uint16 & 0x7FFF) >> 10; // exponent
- const m = uint16 & 0x3FF; // mantissa
- const sign = 1 - 2 * s; // (-1)^s
-
- if (e == 0) return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
- else if (e == 31) return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
- const f = e >= 15 ? 1 << e - 15 : 1.0 / (1 << 15 - e); // 2^(e-15)
- return sign * f * (1.0 + m * 0.0009765625); // normal
- }
-
- /**
- * Wrapper around getUserMedia()
- * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
- * @returns {SpeedyPromise<HTMLVideoElement>}
- */
- static requestCameraStream(constraints = {
- audio: false,
- video: true
- }) {
- Utils.log('Accessing the webcam...');
- if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
- return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
- navigator.mediaDevices.getUserMedia(constraints).then(stream => {
- const video = document.createElement('video');
- video.onloadedmetadata = () => {
- video.play();
- Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
- resolve(video);
- };
- video.setAttribute('playsinline', '');
- video.setAttribute('autoplay', '');
- if (constraints.audio === false || constraints.audio === undefined) video.setAttribute('muted', '');
- video.srcObject = stream;
- }).catch(err => {
- if (err.name === 'NotAllowedError') {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(`Please give access to the camera and reload the page.`, err));
- } else if (err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(`Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`, err));
- } else {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(`Can't access the webcam.`, err));
- }
- });
- });
- }
-
- /**
- * Format binary data as a string with hex values
- * @param {ArrayBuffer} bytes
- * @returns {string}
- */
- static formatBinaryData(bytes) {
- const uint8 = new Uint8Array(bytes);
- const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
- return array.join(' ');
- }
-
- /**
- * Returns a string containing platform brand information
- * @returns {string}
- */
- static platformString() {
- // navigator.userAgent is easily and often spoofed, and thus is unreliable
-
- // use the NavigatorUAData interface if available
- if (typeof navigator.userAgentData === 'object') {
- // use only low entropy data, so we don't need to ask the permission
- // of the user to read this string
- return navigator.userAgentData.platform;
- }
-
- // navigator.platform is deprecated. It can be spoofed on Firefox, but,
- // at the time of this writing, there is no alternative apparently.
- return navigator.platform;
- }
- }
-
- /***/ }),
-
- /***/ 5235:
- /***/ ((module, __unused_webpack_exports, __nested_webpack_require_136472__) => {
-
- var map = {
- "./colors.glsl": 8609,
- "./filters.glsl": 4672,
- "./fixed-point.glsl": 9778,
- "./float16.glsl": 8710,
- "./global.glsl": 2434,
- "./int32.glsl": 439,
- "./keypoint-descriptors.glsl": 8545,
- "./keypoint-matches.glsl": 6762,
- "./keypoints.glsl": 7639,
- "./math.glsl": 431,
- "./platform.glsl": 6822,
- "./pyramids.glsl": 2728,
- "./subpixel.glsl": 6823
- };
-
-
- function webpackContext(req) {
- var id = webpackContextResolve(req);
- return __nested_webpack_require_136472__(id);
- }
- function webpackContextResolve(req) {
- if(!__nested_webpack_require_136472__.o(map, req)) {
- var e = new Error("Cannot find module '" + req + "'");
- e.code = 'MODULE_NOT_FOUND';
- throw e;
- }
- return map[req];
- }
- webpackContext.keys = function webpackContextKeys() {
- return Object.keys(map);
- };
- webpackContext.resolve = webpackContextResolve;
- module.exports = webpackContext;
- webpackContext.id = 5235;
-
- /***/ }),
-
- /***/ 4606:
- /***/ ((module, __unused_webpack_exports, __nested_webpack_require_137422__) => {
-
- var map = {
- "./filters/convolution": 1672,
- "./filters/convolution.js": 1672,
- "./filters/convolution1d.glsl": 8211,
- "./filters/convolution2d.glsl": 7360,
- "./filters/fast-median.glsl": 8191,
- "./filters/nightvision.glsl": 4438,
- "./filters/normalize-image.glsl": 5867,
- "./filters/rgb2grey.glsl": 9252,
- "./include/colors.glsl": 8609,
- "./include/filters.glsl": 4672,
- "./include/fixed-point.glsl": 9778,
- "./include/float16.glsl": 8710,
- "./include/global.glsl": 2434,
- "./include/int32.glsl": 439,
- "./include/keypoint-descriptors.glsl": 8545,
- "./include/keypoint-matches.glsl": 6762,
- "./include/keypoints.glsl": 7639,
- "./include/math.glsl": 431,
- "./include/platform.glsl": 6822,
- "./include/pyramids.glsl": 2728,
- "./include/subpixel.glsl": 6823,
- "./keypoints/allocate-descriptors.glsl": 1341,
- "./keypoints/allocate-extra.glsl": 7833,
- "./keypoints/apply-homography.glsl": 2352,
- "./keypoints/bf-knn.glsl": 7541,
- "./keypoints/clip-border.glsl": 4868,
- "./keypoints/clip.glsl": 5591,
- "./keypoints/distance-filter.glsl": 191,
- "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
- "./keypoints/encode-keypoint-offsets.glsl": 336,
- "./keypoints/encode-keypoint-positions.glsl": 8968,
- "./keypoints/encode-keypoint-properties.glsl": 1733,
- "./keypoints/encode-keypoints.glsl": 9674,
- "./keypoints/encode-null-keypoints.glsl": 2090,
- "./keypoints/fast.glsl": 1855,
- "./keypoints/fast.vs.glsl": 4824,
- "./keypoints/hamming-distance-filter.glsl": 2381,
- "./keypoints/harris-cutoff.glsl": 6060,
- "./keypoints/harris.glsl": 9974,
- "./keypoints/knn-init.glsl": 3047,
- "./keypoints/knn-transfer.glsl": 3266,
- "./keypoints/laplacian.glsl": 8018,
- "./keypoints/lk.glsl": 3168,
- "./keypoints/lookup-of-locations.glsl": 3890,
- "./keypoints/lookup-of-locations.vs.glsl": 8647,
- "./keypoints/lsh-knn.glsl": 4776,
- "./keypoints/mix-keypoints.glsl": 2648,
- "./keypoints/nonmax-scale.glsl": 8825,
- "./keypoints/nonmax-space.glsl": 5693,
- "./keypoints/nonmax-suppression.glsl": 9280,
- "./keypoints/orb-descriptor.glsl": 9108,
- "./keypoints/orb-orientation.glsl": 7137,
- "./keypoints/refine-scale.glsl": 9739,
- "./keypoints/score-findmax.glsl": 8231,
- "./keypoints/shuffle.glsl": 2518,
- "./keypoints/sort-keypoints.glsl": 8096,
- "./keypoints/subpixel-refinement.glsl": 5795,
- "./keypoints/transfer-flow.glsl": 3169,
- "./keypoints/transfer-orientation.glsl": 1337,
- "./keypoints/transfer-to-extra.glsl": 6187,
- "./keypoints/upload-keypoints.glsl": 477,
- "./pyramids/downsample2.glsl": 4050,
- "./pyramids/upsample2.glsl": 5545,
- "./transforms/additive-mix.glsl": 7113,
- "./transforms/resize.glsl": 1202,
- "./transforms/warp-perspective.glsl": 7971,
- "./utils/copy-components.glsl": 6122,
- "./utils/copy-raster.glsl": 371,
- "./utils/copy.glsl": 7307,
- "./utils/fill-components.glsl": 8614,
- "./utils/fill.glsl": 6271,
- "./utils/flip-y.vs.glsl": 3016,
- "./utils/scan-minmax2d.glsl": 3630,
- "./utils/sobel-derivatives.glsl": 8508,
- "./utils/sobel-derivatives.vs.glsl": 8073
- };
-
-
- function webpackContext(req) {
- var id = webpackContextResolve(req);
- return __nested_webpack_require_137422__(id);
- }
- function webpackContextResolve(req) {
- if(!__nested_webpack_require_137422__.o(map, req)) {
- var e = new Error("Cannot find module '" + req + "'");
- e.code = 'MODULE_NOT_FOUND';
- throw e;
- }
- return map[req];
- }
- webpackContext.keys = function webpackContextKeys() {
- return Object.keys(map);
- };
- webpackContext.resolve = webpackContextResolve;
- module.exports = webpackContext;
- webpackContext.id = 4606;
-
- /***/ }),
-
- /***/ 8211:
- /***/ ((module) => {
-
- module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 7360:
- /***/ ((module) => {
-
- module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 8191:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 4438:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
-
- /***/ }),
-
- /***/ 5867:
- /***/ ((module) => {
-
- module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
-
- /***/ }),
-
- /***/ 9252:
- /***/ ((module) => {
-
- module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 8609:
- /***/ ((module) => {
-
- module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
-
- /***/ }),
-
- /***/ 4672:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
-
- /***/ }),
-
- /***/ 9778:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
-
- /***/ }),
-
- /***/ 8710:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
-
- /***/ }),
-
- /***/ 2434:
- /***/ ((module) => {
-
- module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
-
- /***/ }),
-
- /***/ 439:
- /***/ ((module) => {
-
- module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
-
- /***/ }),
-
- /***/ 8545:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
-
- /***/ }),
-
- /***/ 6762:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
-
- /***/ }),
-
- /***/ 7639:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
-
- /***/ }),
-
- /***/ 431:
- /***/ ((module) => {
-
- module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
-
- /***/ }),
-
- /***/ 6822:
- /***/ ((module) => {
-
- module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
-
- /***/ }),
-
- /***/ 2728:
- /***/ ((module) => {
-
- module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
-
- /***/ }),
-
- /***/ 6823:
- /***/ ((module) => {
-
- module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
-
- /***/ }),
-
- /***/ 1341:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 7833:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 2352:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
-
- /***/ }),
-
- /***/ 7541:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
-
- /***/ }),
-
- /***/ 4868:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
-
- /***/ }),
-
- /***/ 5591:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
-
- /***/ }),
-
- /***/ 191:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 5467:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
-
- /***/ }),
-
- /***/ 336:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
-
- /***/ }),
-
- /***/ 8968:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
-
- /***/ }),
-
- /***/ 1733:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
-
- /***/ }),
-
- /***/ 9674:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
-
- /***/ }),
-
- /***/ 2090:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
-
- /***/ }),
-
- /***/ 1855:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
-
- /***/ }),
-
- /***/ 4824:
- /***/ ((module) => {
-
- module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
-
- /***/ }),
-
- /***/ 2381:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 6060:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
-
- /***/ }),
-
- /***/ 9974:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
-
- /***/ }),
-
- /***/ 3047:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
-
- /***/ }),
-
- /***/ 3266:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
-
- /***/ }),
-
- /***/ 8018:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
-
- /***/ }),
-
- /***/ 3168:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
-
- /***/ }),
-
- /***/ 3890:
- /***/ ((module) => {
-
- module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
-
- /***/ }),
-
- /***/ 8647:
- /***/ ((module) => {
-
- module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
-
- /***/ }),
-
- /***/ 4776:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
-
- /***/ }),
-
- /***/ 2648:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
-
- /***/ }),
-
- /***/ 8825:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
-
- /***/ }),
-
- /***/ 5693:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
-
- /***/ }),
-
- /***/ 9280:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
-
- /***/ }),
-
- /***/ 9108:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
-
- /***/ }),
-
- /***/ 7137:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
-
- /***/ }),
-
- /***/ 9739:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
-
- /***/ }),
-
- /***/ 8231:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
-
- /***/ }),
-
- /***/ 2518:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 8096:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
-
- /***/ }),
-
- /***/ 5795:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
-
- /***/ }),
-
- /***/ 3169:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 1337:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
-
- /***/ }),
-
- /***/ 6187:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
-
- /***/ }),
-
- /***/ 477:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
-
- /***/ }),
-
- /***/ 4050:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
-
- /***/ }),
-
- /***/ 5545:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
-
- /***/ }),
-
- /***/ 7113:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 1202:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
-
- /***/ }),
-
- /***/ 7971:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
-
- /***/ }),
-
- /***/ 6122:
- /***/ ((module) => {
-
- module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
-
- /***/ }),
-
- /***/ 371:
- /***/ ((module) => {
-
- module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
-
- /***/ }),
-
- /***/ 7307:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
-
- /***/ }),
-
- /***/ 8614:
- /***/ ((module) => {
-
- module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
-
- /***/ }),
-
- /***/ 6271:
- /***/ ((module) => {
-
- module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
-
- /***/ }),
-
- /***/ 3016:
- /***/ ((module) => {
-
- module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
-
- /***/ }),
-
- /***/ 3630:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
-
- /***/ }),
-
- /***/ 8508:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
-
- /***/ }),
-
- /***/ 8073:
- /***/ ((module) => {
-
- module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
-
- /***/ }),
-
- /***/ 3575:
- /***/ ((module) => {
-
- module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
- f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
- f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
- BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
- AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
- CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
- Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
- AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
- dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
- TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
- X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
- MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
- ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
- PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
- CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
- AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
- gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
- AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
- QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
- AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
- gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
- IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
- gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
- hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
- nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
- AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
- EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
- IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
- AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
- IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
- gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
- AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
- IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
- AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
- AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
- IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
- AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
- QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
- gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
- IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
- OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
- IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
- AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
- APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
- IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
- AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
- IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
- CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
- hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
- IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
- AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
- BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
- QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
- ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
- AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
- BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
- iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
- IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
- AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
- gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
- QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
- QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
- ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
- gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
- NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
- AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
- gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
- IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
- a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
- KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
- QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
- CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
- ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
- QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
- CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
- AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
- QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
- ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
- BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
- AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
- KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
- EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
- DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
- BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
- Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
- DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
- AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
- QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
- IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
- QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
- QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
- IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
- CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
- KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
- IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
- ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
- KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
- AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
- DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
- QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
- QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
- ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
- EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
- SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
- KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
- gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
- ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
- ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
- IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
- IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
- IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
- DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
- A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
- akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
- DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
- u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
- AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
- IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
- IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
- aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
- QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
- KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
- bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
- IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
- IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
- IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
- An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
- DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
- KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
- QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
- BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
- QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
- KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
- ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
- GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
- QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
- ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
- B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
- DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
- BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
- bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
- IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
- DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
- IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
- QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
- FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
- DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
- AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
- AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
- QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
- AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
- EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
- SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
- Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
- ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
- IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
- fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
- IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
- gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
- gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
- CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
- IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
- ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
- DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
- KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
- gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
- ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
- AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
- gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
- aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
- SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
- CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
- CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
- CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
- CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
- AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
- ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
- gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
- QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
- AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
- lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
- kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
- 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
- ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
- NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
- gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
- BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
- AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
- QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
- GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
- C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
- DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
- QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
- IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
- DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
- f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
- gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
- gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
- IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
- AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
- aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
- bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
- IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
- AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
- ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
- gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
- AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
- IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
- BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
- l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
- gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
- AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
- AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
- aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
- ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
- aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
- AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
- FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
- IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
- oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
- AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
- IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
- IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
- 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
- oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
- IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
- PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
- APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
- oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
- GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
- IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
- mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
- BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
- AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
- oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
- oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
- FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
- oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
- IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
- JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
- ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
- AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
- DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
- IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
- ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
- CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
- QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
- IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
- IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
- ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
- AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
- CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
- dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
- KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
- AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
- HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
- BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
- AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
- gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
- QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
- AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
- BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
- IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
- kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
- BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
- IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
- AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
- AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
- QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
- CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
- lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
- dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
- IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
- IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
- C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
- IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
- AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
- KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
- Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
- ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
- IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
- EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
- IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
- IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
- QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
- IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
- AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
- ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
- CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
- AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
- QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
- kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
- GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
- ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
- OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
- BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
- AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
- IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
- A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
- dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
- KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
- IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
- FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
- GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
- PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
- E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
- IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
- IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
- KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
- Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
- ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
- gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
- BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
- EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
- gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
- AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
- IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
- AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
- lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
- b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
- ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
- IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
- gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
- ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
- KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
- AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
- dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
- NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
- ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
- DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
- IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
- IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
- GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
- BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
- ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
- gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
- IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
- FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
- ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
- QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
- DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
- dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
- IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
- IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
- IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
- AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
- D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
- EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
- QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
- IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
- IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
- KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
- QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
- GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
- AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
- ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
- DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
- AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
- IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
- zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
- ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
- AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
- IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
- AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
- QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
- B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
- DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
- gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
- KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
- AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
- KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
- HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
- IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
- IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
- HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
- QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
- gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
- QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
- QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
- ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
- ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
- ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
- EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
- KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
- X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
- ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
- QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
- dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
- AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
- AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
- AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
- KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
- koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
- CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
- CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
- IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
- aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
- ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
- IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
- EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
- A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
- A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
- KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
- AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
- lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
- ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
- IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
- aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
- iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
- DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
- AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
- AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
- IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
- lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
- QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
- IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
- Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
- BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
- A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
- ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
- IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
- taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
- NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
- gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
- Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
- DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
- aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
- ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
- C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
- dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
- IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
- dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
- NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
- YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
- bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
- AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
- b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
- QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
- AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
- dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
- AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
- dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
- NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
- ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
- YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
- aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
- MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
- dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
- ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
- cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
- AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
- AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
- OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
- MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
- MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
- cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
- aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
- LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
- ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
- bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
- dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
- MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
- YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
- YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
- cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
- b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
- AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
- NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
- bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
- aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
- `
-
- /***/ })
-
- /******/ });
- /************************************************************************/
- /******/ // The module cache
- /******/ var __webpack_module_cache__ = {};
- /******/
- /******/ // The require function
- /******/ function __nested_webpack_require_314174__(moduleId) {
- /******/ // Check if module is in cache
- /******/ var cachedModule = __webpack_module_cache__[moduleId];
- /******/ if (cachedModule !== undefined) {
- /******/ return cachedModule.exports;
- /******/ }
- /******/ // Create a new module (and put it into the cache)
- /******/ var module = __webpack_module_cache__[moduleId] = {
- /******/ // no module.id needed
- /******/ // no module.loaded needed
- /******/ exports: {}
- /******/ };
- /******/
- /******/ // Execute the module function
- /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_314174__);
- /******/
- /******/ // Return the exports of the module
- /******/ return module.exports;
- /******/ }
- /******/
- /************************************************************************/
- /******/ /* webpack/runtime/define property getters */
- /******/ (() => {
- /******/ // define getter functions for harmony exports
- /******/ __nested_webpack_require_314174__.d = (exports, definition) => {
- /******/ for(var key in definition) {
- /******/ if(__nested_webpack_require_314174__.o(definition, key) && !__nested_webpack_require_314174__.o(exports, key)) {
- /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
- /******/ }
- /******/ }
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/hasOwnProperty shorthand */
- /******/ (() => {
- /******/ __nested_webpack_require_314174__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
- /******/ })();
- /******/
- /******/ /* webpack/runtime/make namespace object */
- /******/ (() => {
- /******/ // define __esModule on exports
- /******/ __nested_webpack_require_314174__.r = (exports) => {
- /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
- /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
- /******/ }
- /******/ Object.defineProperty(exports, '__esModule', { value: true });
- /******/ };
- /******/ })();
- /******/
- /************************************************************************/
- var __nested_webpack_exports__ = {};
- // This entry need to be wrapped in an IIFE because it need to be in strict mode.
- (() => {
- "use strict";
-
- // EXPORTS
- __nested_webpack_require_314174__.d(__nested_webpack_exports__, {
- "default": () => (/* binding */ Speedy)
- });
-
- // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
- var speedy_gl = __nested_webpack_require_314174__(1001);
- // EXTERNAL MODULE: ./src/utils/utils.js
- var utils = __nested_webpack_require_314174__(9037);
- // EXTERNAL MODULE: ./src/core/settings.js
- var settings = __nested_webpack_require_314174__(2199);
- // EXTERNAL MODULE: ./src/core/speedy-promise.js
- var speedy_promise = __nested_webpack_require_314174__(9192);
- ;// CONCATENATED MODULE: ./src/utils/asap.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * asap.js
- * Schedule a function to run "as soon as possible"
- */
-
- /** callbacks */
- const callbacks = /** @type {Function[]} */[];
-
- /** arguments to be passed to the callbacks */
- const args = /** @type {any[][]} */[];
-
- /** asap key */
- const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
-
- // Register an event listener
- window.addEventListener('message', event => {
- if (event.source !== window || event.data !== ASAP_KEY) return;
- event.stopPropagation();
- if (callbacks.length == 0) return;
- const fn = callbacks.pop();
- const argArray = args.pop();
- fn.apply(undefined, argArray);
- }, true);
-
- /**
- * Schedule a function to run "as soon as possible"
- * @param {Function} fn callback
- * @param {any[]} params optional parameters
- */
- function asap(fn, ...params) {
- callbacks.unshift(fn);
- args.unshift(params);
- window.postMessage(ASAP_KEY, '*');
- }
- // EXTERNAL MODULE: ./src/utils/errors.js
- var utils_errors = __nested_webpack_require_314174__(8581);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture-reader.js
- * Reads data from textures
- */
-
-
-
-
-
-
-
-
-
- /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
- const DEFAULT_NUMBER_OF_BUFFERS = 2;
-
- /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
- const runOnNextFrame = navigator.userAgent.includes('Firefox') ? (fn, ...args) => setTimeout(fn, 10, ...args) :
- // RAF produces a warning on Firefox
- (fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args)); // reduce battery usage
-
- /**
- * Reads data from textures
- */
- class SpeedyTextureReader {
- /**
- * Constructor
- * @param {number} [numberOfBuffers]
- */
- constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS) {
- utils/* Utils */.A.assert(numberOfBuffers > 0);
-
- /** @type {boolean} is this object initialized? */
- this._initialized = false;
-
- /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
- this._pixelBuffer = new Array(numberOfBuffers).fill(null).map(() => new Uint8Array(0));
-
- /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
- this._pbo = new Array(numberOfBuffers).fill(null);
-
- /** @type {number} the index of the buffer that will be consumed in this frame */
- this._consumerIndex = 0;
-
- /** @type {number} the index of the buffer that will be produced next */
- this._producerIndex = numberOfBuffers - 1;
-
- /** @type {SpeedyPromise<void>[]} producer-consumer promises */
- this._promise = Array.from({
- length: numberOfBuffers
- }, () => speedy_promise/* SpeedyPromise */.i.resolve());
-
- /** @type {boolean[]} are the contents of the ith buffer being produced? */
- this._busy = new Array(numberOfBuffers).fill(false);
-
- /** @type {boolean[]} can the ith buffer be consumed? */
- this._ready = new Array(numberOfBuffers).fill(true);
- }
-
- /**
- * Initialize this object
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- this._allocatePBOs(gpu);
- gpu.subscribe(this._allocatePBOs, this, gpu);
- this._initialized = true;
- }
-
- /**
- * Release resources
- * @param {SpeedyGPU} gpu
- * @returns {null}
- */
- release(gpu) {
- gpu.unsubscribe(this._allocatePBOs, this);
- this._deallocatePBOs(gpu);
- this._initialized = false;
- return null;
- }
-
- /**
- * Read pixels from a texture, synchronously.
- * You may optionally specify a (x,y,width,height) sub-rectangle.
- * @param {SpeedyDrawableTexture} texture a texture with a FBO
- * @param {number} [x]
- * @param {number} [y]
- * @param {number} [width]
- * @param {number} [height]
- * @returns {Uint8Array} pixels in the RGBA format
- */
- readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height) {
- utils/* Utils */.A.assert(this._initialized);
- const gl = texture.gl;
- const fbo = texture.glFbo;
-
- // clamp values
- width = Math.max(0, Math.min(width, texture.width));
- height = Math.max(0, Math.min(height, texture.height));
- x = Math.max(0, Math.min(x, texture.width - width));
- y = Math.max(0, Math.min(y, texture.height - height));
-
- // buffer allocation
- const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
- this._reallocate(sizeofBuffer);
-
- // lost context?
- if (gl.isContextLost()) return this._pixelBuffer[0].subarray(0, sizeofBuffer);
-
- // read pixels
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // done!
- return this._pixelBuffer[0].subarray(0, sizeofBuffer);
- }
-
- /**
- * Read pixels from a texture, asynchronously, with PBOs.
- * You may optionally specify a (x,y,width,height) sub-rectangle.
- * @param {SpeedyDrawableTexture} texture a texture with a FBO
- * @param {number} [x]
- * @param {number} [y]
- * @param {number} [width]
- * @param {number} [height]
- * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
- * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
- */
- readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false) {
- utils/* Utils */.A.assert(this._initialized);
- const gl = texture.gl;
- const fbo = texture.glFbo;
-
- // clamp values
- width = Math.max(0, Math.min(width, texture.width));
- height = Math.max(0, Math.min(height, texture.height));
- x = Math.max(0, Math.min(x, texture.width - width));
- y = Math.max(0, Math.min(y, texture.height - height));
-
- // buffer allocation
- const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
- this._reallocate(sizeofBuffer);
-
- // lost context?
- if (gl.isContextLost()) return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
-
- // do not optimize?
- if (!useBufferedDownloads) {
- const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
- return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() => pixelBuffer);
- }
-
- // Hide latency with a Producer-Consumer mechanism
- const numberOfBuffers = this._pixelBuffer.length;
-
- // GPU needs to produce data
- const producerIndex = this._producerIndex;
- if (!this._busy[producerIndex]) {
- const pbo = this._pbo[producerIndex];
- const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
- this._producerIndex = (producerIndex + 1) % numberOfBuffers;
- this._ready[producerIndex] = false;
- this._busy[producerIndex] = true;
- //console.time("produce "+producerIndex);
- this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
- //console.timeEnd("produce "+producerIndex);
- this._busy[producerIndex] = false;
- this._ready[producerIndex] = true;
- });
- }
- //else console.log("skip",producerIndex);
- else /* skip frame */;
-
- // CPU needs to consume data
- const consumerIndex = this._consumerIndex;
- this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
- if (!this._ready[consumerIndex]) {
- //console.time("consume "+consumerIndex);
- return this._promise[consumerIndex].then(() => {
- //console.timeEnd("consume "+consumerIndex);
- this._ready[consumerIndex] = false;
- return this._pixelBuffer[consumerIndex];
- });
- }
-
- //console.log("NO WAIT "+consumerIndex);
- this._ready[consumerIndex] = false;
- return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
- }
-
- /**
- * Reallocate the pixel buffers, so that they can hold the required number of bytes
- * If the pixel buffers already have the required capacity, then nothing is done
- * @param {number} size in bytes
- */
- _reallocate(size) {
- // no need to reallocate
- if (size <= this._pixelBuffer[0].byteLength) return;
-
- // reallocate
- for (let i = 0; i < this._pixelBuffer.length; i++) {
- const newBuffer = new Uint8Array(size);
- //newBuffer.set(this._pixelBuffer[i]); // make this optional?
- this._pixelBuffer[i] = newBuffer;
- }
- }
-
- /**
- * Allocate PBOs
- * @param {SpeedyGPU} gpu
- */
- _allocatePBOs(gpu) {
- const gl = gpu.gl;
- for (let i = 0; i < this._pbo.length; i++) this._pbo[i] = gl.createBuffer();
- }
-
- /**
- * Deallocate PBOs
- * @param {SpeedyGPU} gpu
- */
- _deallocatePBOs(gpu) {
- const gl = gpu.gl;
- for (let i = this._pbo.length - 1; i >= 0; i--) {
- gl.deleteBuffer(this._pbo[i]);
- this._pbo[i] = null;
- }
- }
-
- /**
- * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
- * It's assumed that the target texture is in the RGBA8 format
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLBuffer} pbo
- * @param {Uint8Array} outputBuffer with size >= width * height * 4
- * @param {WebGLFramebuffer} fbo
- * @param {GLint} x
- * @param {GLint} y
- * @param {GLsizei} width
- * @param {GLsizei} height
- * @returns {SpeedyPromise<void>}
- */
- static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height) {
- /*
- When testing Speedy on Chrome (mobile) using about:tracing with the
- --enable-gpu-service-tracing flag, I found that A LOT of time is spent
- in TraceGLAPI::glMapBufferRange, which takes place just after
- GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
- Using multiple PBOs doesn't seem to impact Chrome too much. Performance
- is much better on Firefox. This suggests there is room for improvement.
- I do not yet understand clearly the cause for this lag on Chrome. It
- may be a CPU-GPU synchronization issue.
- EDIT: I have found that using gl.flush() aggressively greatly improves
- things. WebGL commands will be pushed frequently!
- See also:
- https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
- https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
- */
- const size = width * height * 4;
-
- // validate outputBuffer
- utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
-
- // read pixels into the PBO
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
- gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
-
- // create a fence
- const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
- gl.flush(); // make sure the sync command is read
-
- // wait for the commands to be processed by the GPU
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- // according to the WebGL2 spec sec 3.7.14 Sync objects,
- // "sync objects may only transition to the signaled state
- // when the user agent's event loop is not executing a task"
- // in other words, it won't be signaled in the same frame
- if (settings/* Settings */.w.gpuPollingMode != 'asap') runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);else asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
- }).then(() => {
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
- gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
- }).catch(err => {
- throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
- }).finally(() => {
- gl.deleteSync(sync);
- });
- }
-
- /**
- * Waits for a sync object to become signaled
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLSync} sync
- * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
- * @param {Function} resolve
- * @param {Function} reject
- * @param {number} [pollInterval] in milliseconds
- * @param {number} [remainingAttempts] for timeout
- */
- static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000) {
- (function poll() {
- const status = gl.clientWaitSync(sync, flags, 0);
- if (remainingAttempts-- <= 0) {
- reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
- } else if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
- resolve();
- } else {
- //setTimeout(poll, pollInterval);
- if (settings/* Settings */.w.gpuPollingMode != 'asap') requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
- else asap(poll);
- }
- })();
- }
- }
- // EXTERNAL MODULE: ./src/utils/globals.js
- var globals = __nested_webpack_require_314174__(3816);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture.js
- * A wrapper around WebGLTexture
- */
-
-
-
-
-
-
-
- /**
- * Get a buffer filled with zeros
- * @param {number} size number of bytes
- * @returns {Uint8Array}
- */
- /*
- const zeros = (function() {
- let buffer = new Uint8Array(4);
-
- return function(size) {
- if(size > buffer.length)
- buffer = new Uint8Array(size);
-
- return buffer.subarray(0, size);
- }
- })();
- */
-
- /**
- * A wrapper around WebGLTexture
- */
- class SpeedyTexture {
- /**
- * Constructor
- * @param {WebGL2RenderingContext} gl
- * @param {number} width texture width in pixels
- * @param {number} height texture height in pixels
- * @param {number} [format]
- * @param {number} [internalFormat]
- * @param {number} [dataType]
- * @param {number} [filter]
- * @param {number} [wrap]
- */
- constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT) {
- /** @type {WebGL2RenderingContext} rendering context */
- this._gl = gl;
-
- /** @type {number} width of the texture */
- this._width = Math.max(1, width | 0);
-
- /** @type {number} height of the texture */
- this._height = Math.max(1, height | 0);
-
- /** @type {boolean} have we generated mipmaps for this texture? */
- this._hasMipmaps = false;
-
- /** @type {number} texture format */
- this._format = format;
-
- /** @type {number} internal format (usually a sized format) */
- this._internalFormat = internalFormat;
-
- /** @type {number} data type */
- this._dataType = dataType;
-
- /** @type {number} texture filtering (min & mag) */
- this._filter = filter;
-
- /** @type {number} texture wrapping */
- this._wrap = wrap;
-
- /** @type {WebGLTexture} internal texture object */
- this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
- }
-
- /**
- * Releases the texture
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // already released?
- if (this._glTexture == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
-
- // release resources
- this.discardMipmaps();
- gl.deleteTexture(this._glTexture);
- this._glTexture = null;
- this._width = this._height = 0;
-
- // done!
- return null;
- }
-
- /**
- * Upload pixel data to the texture. The texture will be resized if needed.
- * @param {TexImageSource} data
- * @param {number} [width] in pixels
- * @param {number} [height] in pixels
- * @return {SpeedyTexture} this
- */
- upload(data, width = this._width, height = this._height) {
- const gl = this._gl;
-
- // bugfix: if the media is a video, we can't really
- // upload it to the GPU unless it's ready
- if (data instanceof HTMLVideoElement) {
- if (data.readyState < 2) {
- // this may happen when the video loops (Firefox)
- // keep the previously uploaded texture
- //Utils.warning(`Trying to process a video that isn't ready yet`);
- return this;
- }
- }
- utils/* Utils */.A.assert(width > 0 && height > 0);
- this.discardMipmaps();
- this._width = width;
- this._height = height;
- this._internalFormat = gl.RGBA8;
- this._format = gl.RGBA;
- this._dataType = gl.UNSIGNED_BYTE;
- SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
- return this;
- }
-
- /**
- * Clear the texture
- * @returns {this}
- */
- clear() {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // clear texture data
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // no mipmaps
- this.discardMipmaps();
-
- // done!
- return this;
- }
-
- /**
- * Resize this texture. Its content will be lost!
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @returns {this}
- */
- resize(width, height) {
- const gl = this._gl;
-
- // no need to resize?
- if (this._width === width && this._height === height) return this;
-
- // validate size
- width |= 0;
- height |= 0;
- if (width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH) throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);else if (width < 1 || height < 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // update dimensions
- this._width = width;
- this._height = height;
-
- // resize
- // Note: this is fast on Chrome, but seems slow on Firefox
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // no mipmaps
- this.discardMipmaps();
-
- // done!
- return this;
- }
-
- /**
- * Generate mipmap
- * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
- * @returns {SpeedyTexture} this
- */
- generateMipmaps(mipmap = []) {
- const gl = this._gl;
-
- // nothing to do
- if (this._hasMipmaps) return this;
-
- // let the hardware compute the all levels of the pyramid, up to 1x1
- // we also specify the TEXTURE_MIN_FILTER to be used from now on
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
- gl.generateMipmap(gl.TEXTURE_2D);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // accept custom textures
- if (mipmap.length > 0) {
- // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
- const width = this.width,
- height = this.height;
- const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
- utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
-
- // verify the dimensions of each level
- for (let level = 1; level < mipmap.length; level++) {
- // use max(1, floor(size / 2^lod)), in accordance to
- // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
- const w = Math.max(1, width >>> level);
- const h = Math.max(1, height >>> level);
-
- // verify the dimensions of this level
- utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
-
- // copy to mipmap
- mipmap[level].copyTo(this, level);
- }
- }
-
- // done!
- this._hasMipmaps = true;
- return this;
- }
-
- /**
- * Invalidates previously generated mipmap, if any
- */
- discardMipmaps() {
- const gl = this._gl;
-
- // nothing to do
- if (!this._hasMipmaps) return;
-
- // reset the min filter
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // done!
- this._hasMipmaps = false;
- }
-
- /**
- * Does this texture have a mipmap?
- * @returns {boolean}
- */
- hasMipmaps() {
- return this._hasMipmaps;
- }
-
- /**
- * Has this texture been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._glTexture == null;
- }
-
- /**
- * The internal WebGLTexture
- * @returns {WebGLTexture}
- */
- get glTexture() {
- return this._glTexture;
- }
-
- /**
- * The width of the texture, in pixels
- * @returns {number}
- */
- get width() {
- return this._width;
- }
-
- /**
- * The height of the texture, in pixels
- * @returns {number}
- */
- get height() {
- return this._height;
- }
-
- /**
- * The WebGL Context
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._gl;
- }
-
- /**
- * Create a WebGL texture
- * @param {WebGL2RenderingContext} gl
- * @param {number} width in pixels
- * @param {number} height in pixels
- * @param {number} format usually gl.RGBA
- * @param {number} internalFormat usually gl.RGBA8
- * @param {number} dataType usually gl.UNSIGNED_BYTE
- * @param {number} filter usually gl.NEAREST or gl.LINEAR
- * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
- * @returns {WebGLTexture}
- */
- static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap) {
- utils/* Utils */.A.assert(width > 0 && height > 0);
-
- // create & bind texture
- const texture = gl.createTexture();
- gl.bindTexture(gl.TEXTURE_2D, texture);
-
- // setup
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
- //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
- gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
-
- // unbind & return
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
-
- /**
- * Upload pixel data to a WebGL texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLTexture} texture
- * @param {GLsizei} width texture width
- * @param {GLsizei} height texture height
- * @param {TexImageSource} pixels
- * @param {GLint} lod mipmap level-of-detail
- * @param {number} format
- * @param {number} internalFormat
- * @param {number} dataType
- * @returns {WebGLTexture} texture
- */
- static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType) {
- // Prefer calling _upload() before gl.useProgram() to avoid the
- // needless switching of GL programs internally. See also:
- // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
- gl.bindTexture(gl.TEXTURE_2D, texture);
-
- /*
- // slower than texImage2D, unlike the spec?
- gl.texSubImage2D(gl.TEXTURE_2D, // target
- lod, // mip level
- 0, // x-offset
- 0, // y-offset
- width, // texture width
- height, // texture height
- gl.RGBA, // source format
- gl.UNSIGNED_BYTE, // source type
- pixels); // source data
- */
-
- gl.texImage2D(gl.TEXTURE_2D,
- // target
- lod,
- // mip level
- internalFormat,
- // internal format
- width,
- // texture width
- height,
- // texture height
- 0,
- // border
- format,
- // source format
- dataType,
- // source type
- pixels); // source data
-
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
- }
-
- /**
- * A SpeedyTexture with a framebuffer
- */
- class SpeedyDrawableTexture extends SpeedyTexture {
- /**
- * Constructor
- * @param {WebGL2RenderingContext} gl
- * @param {number} width texture width in pixels
- * @param {number} height texture height in pixels
- * @param {number} [format]
- * @param {number} [internalFormat]
- * @param {number} [dataType]
- * @param {number} [filter]
- * @param {number} [wrap]
- */
- constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined) {
- super(gl, width, height, format, internalFormat, dataType, filter, wrap);
-
- /** @type {WebGLFramebuffer} framebuffer */
- this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
- }
-
- /**
- * Releases the texture
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // already released?
- if (this._glFbo == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
-
- // release the framebuffer
- gl.deleteFramebuffer(this._glFbo);
- this._glFbo = null;
-
- // release the SpeedyTexture
- return super.release();
- }
-
- /**
- * The internal WebGLFramebuffer
- * @returns {WebGLFramebuffer}
- */
- get glFbo() {
- return this._glFbo;
- }
-
- /**
- * Copy this texture into another
- * (you may have to discard the mipmaps after calling this function)
- * @param {SpeedyTexture} texture target texture
- * @param {number} [lod] level-of-detail of the target texture
- */
- copyTo(texture, lod = 0) {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return;
-
- // compute texture size as max(1, floor(size / 2^lod)),
- // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
- // (Mipmapping)
- const pot = 1 << (lod |= 0);
- const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
- const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
-
- // validate
- utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
-
- // copy to texture
- SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
- }
-
- /*
- * Resize this texture
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
- * @returns {this}
- */
- /*resize(width, height, preserveContent = false)
- {
- const gl = this._gl;
- // no need to preserve the content?
- if(!preserveContent)
- return super.resize(width, height);
- // no need to resize?
- if(this._width === width && this._height === height)
- return this;
- // validate size
- width |= 0; height |= 0;
- Utils.assert(width > 0 && height > 0);
- // context loss?
- if(gl.isContextLost())
- return this;
- // allocate new texture
- const newTexture = SpeedyTexture._createTexture(gl, width, height);
- // initialize the new texture with zeros to avoid a
- // warning when calling copyTexSubImage2D() on Firefox
- // this may not be very efficient?
- SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
- // copy the old texture to the new one
- const oldWidth = this._width, oldHeight = this._height;
- SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
- // bind FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
- // invalidate old data (is this needed?)
- gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
- // attach the new texture to the existing framebuffer
- gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
- gl.COLOR_ATTACHMENT0, // color buffer
- gl.TEXTURE_2D, // tex target
- newTexture, // texture
- 0); // mipmap level
- // unbind FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- // release the old texture and replace it
- gl.deleteTexture(this._glTexture);
- this._glTexture = newTexture;
- // update dimensions & discard mipmaps
- this.discardMipmaps();
- this._width = width;
- this._height = height;
- // done!
- return this;
- }
- */
-
- /**
- * Clear the texture
- * @returns {this}
- */
- clear() {
- //
- // When we pass null to texImage2D(), it seems that Firefox
- // doesn't clear the texture. Instead, it displays this warning:
- //
- // "WebGL warning: drawArraysInstanced:
- // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
- //
- // Here is a workaround:
- //
- return this.clearToColor(0, 0, 0, 0);
- }
-
- /**
- * Clear the texture to a color
- * @param {number} r red component, a value in [0,1]
- * @param {number} g green component, a value in [0,1]
- * @param {number} b blue component, a value in [0,1]
- * @param {number} a alpha component, a value in [0,1]
- * @returns {this}
- */
- clearToColor(r, g, b, a) {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // clamp parameters
- r = Math.max(0.0, Math.min(+r, 1.0));
- g = Math.max(0.0, Math.min(+g, 1.0));
- b = Math.max(0.0, Math.min(+b, 1.0));
- a = Math.max(0.0, Math.min(+a, 1.0));
-
- // discard mipmaps, if any
- this.discardMipmaps();
-
- // clear the texture
- gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
- gl.viewport(0, 0, this._width, this._height);
- gl.clearColor(r, g, b, a);
- gl.clear(gl.COLOR_BUFFER_BIT);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // done!
- return this;
- }
-
- /**
- * Inspect the pixels of the texture for debugging purposes
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTextureReader} [textureReader] optional texture reader
- * @returns {Uint8Array}
- */
- inspect(gpu, textureReader) {
- if (textureReader === undefined) {
- textureReader = new SpeedyTextureReader();
- textureReader.init(gpu);
- const pixels = textureReader.readPixelsSync(this);
- textureReader.release(gpu);
- return new Uint8Array(pixels); // copy the array
- } else {
- const pixels = textureReader.readPixelsSync(this);
- return new Uint8Array(pixels);
- }
- }
-
- /**
- * Inspect the pixels of the texture as unsigned 32-bit integers
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTextureReader} [textureReader] optional texture reader
- * @returns {Uint32Array}
- */
- inspect32(gpu, textureReader) {
- utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
- return new Uint32Array(this.inspect(gpu, textureReader).buffer);
- }
-
- /**
- * Create a FBO associated with an existing texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLTexture} texture
- * @returns {WebGLFramebuffer}
- */
- static _createFramebuffer(gl, texture) {
- const fbo = gl.createFramebuffer();
-
- // setup framebuffer
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.framebufferTexture2D(gl.FRAMEBUFFER,
- // target
- gl.COLOR_ATTACHMENT0,
- // color buffer
- gl.TEXTURE_2D,
- // tex target
- texture,
- // texture
- 0); // mipmap level
-
- // check for errors
- const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
- if (status != gl.FRAMEBUFFER_COMPLETE) {
- const error = (() => ['FRAMEBUFFER_UNSUPPORTED', 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS', 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'].filter(err => gl[err] === status)[0] || 'unknown error')();
- throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
- }
-
- // unbind & return
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- return fbo;
- }
-
- /**
- * Copy data from a framebuffer to a texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLFramebuffer} fbo we'll read the data from this
- * @param {WebGLTexture} texture destination texture
- * @param {GLint} x xpos (where to start copying)
- * @param {GLint} y ypos (where to start copying)
- * @param {GLsizei} width width of the texture
- * @param {GLsizei} height height of the texture
- * @param {GLint} [lod] mipmap level-of-detail
- * @returns {WebGLTexture} texture
- */
- static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0) {
- //gl.activeTexture(gl.TEXTURE0);
- gl.bindTexture(gl.TEXTURE_2D, texture);
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.copyTexSubImage2D(gl.TEXTURE_2D,
- // target
- lod,
- // mipmap level
- 0,
- // xoffset
- 0,
- // yoffset
- x,
- // xpos (where to start copying)
- y,
- // ypos (where to start copying)
- width,
- // width of the texture
- height // height of the texture
- );
-
- /*
- gl.copyTexImage2D(
- gl.TEXTURE_2D, // target
- lod, // mipmap level
- gl.RGBA, // internal format
- x, // xpos (where to start copying)
- y, // ypos (where to start copying)
- width, // width of the texture
- height, // height of the texture
- 0 // border
- );
- */
-
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
- }
- // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
- var shader_declaration = __nested_webpack_require_314174__(9420);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program.js
- * SpeedyProgram class
- */
-
-
-
-
-
-
-
- /** @const {Object<string,string>} Map uniform type to a gl function */
- const UNIFORM_SETTERS = Object.freeze({
- 'sampler2D': 'uniform1i',
- 'isampler2D': 'uniform1i',
- 'usampler2D': 'uniform1i',
- 'float': 'uniform1f',
- 'int': 'uniform1i',
- 'uint': 'uniform1ui',
- 'bool': 'uniform1i',
- 'vec2': 'uniform2f',
- 'vec3': 'uniform3f',
- 'vec4': 'uniform4f',
- 'ivec2': 'uniform2i',
- 'ivec3': 'uniform3i',
- 'ivec4': 'uniform4i',
- 'uvec2': 'uniform2ui',
- 'uvec3': 'uniform3ui',
- 'uvec4': 'uniform4ui',
- 'bvec2': 'uniform2i',
- 'bvec3': 'uniform3i',
- 'bvec4': 'uniform4i',
- 'mat2': 'uniformMatrix2fv',
- 'mat3': 'uniformMatrix3fv',
- 'mat4': 'uniformMatrix4fv'
- });
-
- /**
- * @typedef {object} SpeedyProgramOptions
- * @property {boolean} [renderToTexture] render results to a texture?
- * @property {boolean} [pingpong] alternate output texture between calls
- */
-
- /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
-
- /**
- * A SpeedyProgram is a Function that runs GLSL code
- */
- class SpeedyProgram extends Function {
- /**
- * Creates a new SpeedyProgram
- * @param {WebGL2RenderingContext} gl WebGL context
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} [options] user options
- */
- constructor(gl, shaderdecl, options = {}) {
- super('...args', 'return this._self._call(...args)');
-
- /** @type {SpeedyProgram} this function bound to this function! */
- this._self = this.bind(this);
- this._self._init(gl, shaderdecl, options);
- return this._self;
- }
-
- /**
- * Initialize the SpeedyProgram
- * @param {WebGL2RenderingContext} gl WebGL context
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} options user options
- */
- _init(gl, shaderdecl, options) {
- // not a valid context?
- if (gl.isContextLost()) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
-
- // options object
- options = Object.assign({
- // default options
- renderToTexture: true,
- pingpong: false
- }, options);
-
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- /** @type {WebGLProgram} vertex shader + fragment shader */
- this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
-
- /** @type {ProgramGeometry} this is a quad */
- this._geometry = new ProgramGeometry(gl, {
- position: shaderdecl.locationOfAttributes.position,
- texCoord: shaderdecl.locationOfAttributes.texCoord
- });
-
- /** @type {string[]} names of the arguments of the SpeedyProgram */
- this._argnames = shaderdecl.arguments;
-
- /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
- this._argIsArray = new Array(this._argnames.length).fill(false);
-
- /** @type {UBOHelper} UBO helper (lazy instantiation) */
- this._ubo = null;
-
- /** @type {boolean} should we render to a texture? If false, we render to the canvas */
- this._renderToTexture = Boolean(options.renderToTexture);
-
- /** @type {number} width of the output */
- this._width = 1;
-
- /** @type {number} height of the output */
- this._height = 1;
-
- /** @type {[number,number]} cached object that stores the size of the output */
- this._size = [1, 1];
-
- /** @type {SpeedyDrawableTexture[]} output texture(s) */
- this._texture = new Array(options.pingpong ? 2 : 1).fill(null);
-
- /** @type {number} used for pingpong rendering */
- this._textureIndex = 0;
-
- /** @type {Map<string,UniformVariable>} uniform variables */
- this._uniform = new Map();
-
- /** @type {ShaderDeclaration} shader declaration */
- this._shaderdecl = shaderdecl;
-
- // autodetect uniforms
- gl.useProgram(this._program);
- for (const name of shaderdecl.uniforms) {
- const type = shaderdecl.uniformType(name);
- const location = gl.getUniformLocation(this._program, name);
- this._uniform.set(name, new UniformVariable(type, location));
- }
-
- // match arguments & uniforms
- for (let j = 0; j < this._argnames.length; j++) {
- const argname = this._argnames[j];
- if (!this._uniform.has(argname)) {
- this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
- if (!this._argIsArray[j]) throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
- }
- }
- }
-
- /**
- * Run the SpeedyProgram
- * @param {...SpeedyProgramUniformValue} args
- * @returns {SpeedyDrawableTexture}
- */
- _call(...args) {
- const gl = this._gl;
- const argnames = this._argnames;
- const texture = this._texture[this._textureIndex];
-
- // matching arguments?
- if (args.length != argnames.length) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
-
- // can't use the output texture as an input
- /*
- // slower method
- const flatArgs = Utils.flatten(args);
- for(let j = flatArgs.length - 1; j >= 0; j--) {
- if(flatArgs[j] === this._texture[this._textureIndex])
- throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
- }
- */
- for (let j = args.length - 1; j >= 0; j--) {
- if (args[j] === texture) throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
- // else if(Array.isArray(args[j])) ...
- // we don't support passing arrays of textures at the time of this writing
- }
-
- // context loss?
- if (gl.isContextLost()) return texture;
-
- // use program
- gl.useProgram(this._program);
-
- // bind the VAO
- gl.bindVertexArray(this._geometry.vao);
-
- // select the render target
- const fbo = this._renderToTexture ? texture.glFbo : null;
-
- // update texSize uniform (available in all fragment shaders)
- const texSize = this._uniform.get('texSize');
- this._size[0] = this._width;
- this._size[1] = this._height;
- texSize.setValue(gl, this._size);
-
- // set uniforms[i] to args[i]
- for (let i = 0, texNo = 0; i < args.length; i++) {
- const argname = argnames[i];
- if (!this._argIsArray[i]) {
- // uniform variable matches argument name
- const uniform = this._uniform.get(argname);
- texNo = uniform.setValue(gl, args[i], texNo);
- } else {
- // uniform array matches argument name
- const array = args[i];
- if (Array.isArray(array)) {
- if (this._uniform.has(indexedVariable(argname, array.length))) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
- for (let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++) texNo = uniform.setValue(gl, array[j], texNo);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
- }
- }
-
- // set Uniform Buffer Objects (if any)
- if (this._ubo !== null) this._ubo.update();
-
- // bind the FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
-
- // draw call
- gl.viewport(0, 0, this._width, this._height);
- gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
-
- // unbind the FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // unbind the VAO
- gl.bindVertexArray(null);
-
- // we've just changed the texture! discard the pyramid, if any
- if (texture != null) texture.discardMipmaps();
-
- // ping-pong rendering
- this._pingpong();
-
- // done!
- return texture;
- }
-
- /**
- * Set the output texture(s) and its (their) shape(s)
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @param {...SpeedyDrawableTexture|null} texture output texture(s)
- * @returns {SpeedyProgram} this
- */
- outputs(width, height, ...texture) {
- this._setOutputTexture(...texture);
- this._setOutputSize(width, height);
- return this;
- }
-
- /**
- * Set the size of the output
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @returns {SpeedyProgram} this
- */
- _setOutputSize(width, height) {
- utils/* Utils */.A.assert(width > 0 && height > 0);
-
- // update output size
- this._width = width | 0;
- this._height = height | 0;
-
- // resize the output texture(s)
- for (let i = 0; i < this._texture.length; i++) {
- if (this._texture[i] != null) this._texture[i].resize(this._width, this._height);
- }
-
- // done!
- return this;
- }
-
- /**
- * Use the provided texture(s) as output
- * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
- * @returns {SpeedyProgram} this
- */
- _setOutputTexture(...texture) {
- utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
-
- // update output texture(s)
- for (let i = 0; i < this._texture.length; i++) this._texture[i] = texture[i];
- this._textureIndex = 0;
-
- // done!
- return this;
- }
-
- /**
- * Clear the internal textures
- * @returns {SpeedyDrawableTexture}
- */
- clear() {
- const texture = this._texture[this._textureIndex];
-
- // clear internal textures
- for (let i = 0; i < this._texture.length; i++) this._texture[i].clear();
-
- // ping-pong rendering
- this._pingpong();
-
- // done!
- return texture;
- }
-
- /**
- * Set data using a Uniform Buffer Object
- * @param {string} blockName uniform block name
- * @param {ArrayBufferView} data
- * @returns {SpeedyProgram} this
- */
- setUBO(blockName, data) {
- if (this._ubo === null) this._ubo = new UBOHelper(this._gl, this._program);
- this._ubo.set(blockName, data);
- return this;
- }
-
- /**
- * Release the resources associated with this SpeedyProgram
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // Release UBOs (if any)
- if (this._ubo != null) this._ubo = this._ubo.release();
-
- // Unlink textures
- this._texture.fill(null);
-
- // Release geometry
- this._geometry = this._geometry.release();
-
- // Release program
- gl.deleteProgram(this._program);
- this._program = null;
-
- // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
- // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
- // will automatically be marked for deletion when the JS object is
- // destroyed (i.e., garbage collected)
-
- // done!
- return null;
- }
-
- /**
- * A constant #defined in the shader declaration
- * @param {string} name
- * @returns {number}
- */
- definedConstant(name) {
- return this._shaderdecl.definedConstant(name);
- }
-
- /**
- * Helper method for pingpong rendering: alternates
- * the texture index from 0 to 1 and vice-versa
- */
- _pingpong() {
- if (this._texture.length > 1) this._textureIndex = 1 - this._textureIndex;
- }
-
- /**
- * Compile and link GLSL shaders
- * @param {WebGL2RenderingContext} gl
- * @param {string} vertexShaderSource GLSL code of the vertex shader
- * @param {string} fragmentShaderSource GLSL code of the fragment shader
- * @returns {WebGLProgram}
- */
- static _compile(gl, vertexShaderSource, fragmentShaderSource) {
- const program = gl.createProgram();
- const vertexShader = gl.createShader(gl.VERTEX_SHADER);
- const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
-
- // compile vertex shader
- gl.shaderSource(vertexShader, vertexShaderSource);
- gl.compileShader(vertexShader);
- gl.attachShader(program, vertexShader);
-
- // compile fragment shader
- gl.shaderSource(fragmentShader, fragmentShaderSource);
- gl.compileShader(fragmentShader);
- gl.attachShader(program, fragmentShader);
-
- // link program
- gl.linkProgram(program);
- gl.validateProgram(program);
-
- // return on success
- if (gl.getProgramParameter(program, gl.LINK_STATUS)) return program;
-
- // display an error
- const errors = [gl.getShaderInfoLog(fragmentShader), gl.getShaderInfoLog(vertexShader), gl.getProgramInfoLog(program)];
- gl.deleteProgram(program);
- gl.deleteShader(fragmentShader);
- gl.deleteShader(vertexShader);
-
- // display error
- const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
- const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
- const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
- const formattedSource = source.split('\n').map((line, no) => col(1 + no) + line).join('\n');
- throw new utils_errors/* GLError */.wB(`\n\n---------- ERROR ----------\n\n` + errors.filter(err => err).join('\n') + `\n\n---------- SOURCE CODE ----------\n\n` + formattedSource + '\n');
- }
- }
-
- // ============================================================================
- // HELPERS
- // ============================================================================
-
- /**
- * Configure and store the VAO and the VBOs
- * @param {WebGL2RenderingContext} gl
- * @param {LocationOfAttributes} location
- * @returns {ProgramGeometry}
- *
- * @typedef {Object} LocationOfAttributes
- * @property {number} position
- * @property {number} texCoord
- *
- * @typedef {Object} BufferOfAttributes
- * @property {WebGLBuffer} position
- * @property {WebGLBuffer} texCoord
- */
- function ProgramGeometry(gl, location) {
- /** @type {WebGLVertexArrayObject} Vertex Array Object */
- this.vao = gl.createVertexArray();
-
- /** @type {BufferOfAttributes} Vertex Buffer Objects */
- this.vbo = Object.freeze({
- position: gl.createBuffer(),
- texCoord: gl.createBuffer()
- });
-
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- // bind the VAO
- gl.bindVertexArray(this.vao);
-
- // set the position attribute
- gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
- // clip coordinates (CCW)
- -1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
- gl.enableVertexAttribArray(location.position);
- gl.vertexAttribPointer(location.position,
- // attribute location
- 2,
- // 2 components per vertex (x,y)
- gl.FLOAT,
- // type
- false,
- // don't normalize
- 0,
- // default stride (tightly packed)
- 0); // offset
-
- // set the texCoord attribute
- gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
- // texture coordinates (CCW)
- 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);
- gl.enableVertexAttribArray(location.texCoord);
- gl.vertexAttribPointer(location.texCoord,
- // attribute location
- 2,
- // 2 components per vertex (x,y)
- gl.FLOAT,
- // type
- false,
- // don't normalize
- 0,
- // default stride (tightly packed)
- 0); // offset
-
- // unbind
- gl.bindBuffer(gl.ARRAY_BUFFER, null);
- gl.bindVertexArray(null);
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * Releases the internal resources
- * @returns {null}
- */
- ProgramGeometry.prototype.release = function () {
- const gl = this._gl;
- gl.deleteVertexArray(this.vao);
- gl.deleteBuffer(this.vbo.position);
- gl.deleteBuffer(this.vbo.texCoord);
- return null;
- };
-
- /**
- * Helper class for storing data in GLSL uniform variables
- * @param {string} type
- * @param {WebGLUniformLocation} location
- */
- function UniformVariable(type, location) {
- /** @type {string} GLSL data type */
- this.type = String(type);
- if (!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
-
- /** @type {WebGLUniformLocation} uniform location in a WebGL program */
- this.location = location;
-
- /** @type {string} setter function */
- this.setter = UNIFORM_SETTERS[this.type];
- const n = Number(this.setter.match(/^uniform(Matrix)?(\d)/)[2]) | 0;
-
- /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
- this.dim = this.type.startsWith('mat') ? 2 : this.type.indexOf('vec') >= 0 ? 1 : 0;
-
- /** @type {number} required number of scalars */
- this.length = this.dim == 2 ? n * n : n;
-
- /** @type {SpeedyProgramUniformValue|null} cached value */
- this._value = null;
- }
-
- /**
- * Set the value of a uniform variable
- * @param {WebGL2RenderingContext} gl
- * @param {SpeedyProgramUniformValue} value use column-major format for matrices
- * @param {number} [texNo] current texture index
- * @returns {number} new texture index
- */
- UniformVariable.prototype.setValue = function (gl, value, texNo = -1) {
- const setValue = /** @type {Function} */gl[this.setter];
-
- // check uniform type
- if (typeof value === 'object' && this.type.endsWith('sampler2D')) {
- // set texture
- if (texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS) throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);else if (Array.isArray(value)) throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);else if (value == null) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);else if (texNo < 0) throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
- const tex = value;
- gl.activeTexture(gl.TEXTURE0 + texNo);
- gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
- gl.uniform1i(this.location, texNo);
- texNo++;
- } else if (value === this._value && typeof value !== 'object') {
- // do not update the uniform if it hasn't changed
- // note that value may be an array whose entries may have been updated
- void 0;
- } else if (typeof value === 'number' || typeof value === 'boolean') {
- // set scalar value
- setValue.call(gl, this.location, value);
- } else if (Array.isArray(value)) {
- // set vector or matrix
- if (value.length === this.length) {
- if (this.dim == 2) setValue.call(gl, this.location, false, value); // matrix
- else setValue.call(gl, this.location, ...value); // vector
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
-
- // cache the value
- this._value = value;
-
- // done
- return texNo;
- };
-
- /**
- * @typedef {object} UBOStuff
- * @property {WebGLBuffer} buffer
- * @property {number} blockBindingIndex "global" binding index
- * @property {number} blockIndex UBO "location" in the program
- * @property {ArrayBufferView|null} data user-data
- */
-
- /**
- * A helper class for handling Uniform Buffer Objects (UBOs)
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLProgram} program
- */
- function UBOHelper(gl, program) {
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- /** @type {WebGLProgram} */
- this._program = program;
-
- /** @type {number} auto-increment counter */
- this._nextIndex = 0;
-
- /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
- this._ubo = Object.create(null);
- }
-
- /**
- * Set Uniform Buffer Object data
- * (the buffer will be uploaded when the program is executed)
- * @param {string} name uniform block name
- * @param {ArrayBufferView} data
- */
- UBOHelper.prototype.set = function (name, data) {
- const gl = this._gl;
-
- // create UBO entry
- if (this._ubo[name] === undefined) {
- this._ubo[name] = {
- buffer: gl.createBuffer(),
- blockBindingIndex: this._nextIndex++,
- blockIndex: -1,
- data: null
- };
- }
-
- // get UBO entry for the given block name
- const ubo = this._ubo[name];
-
- // read block index & assign binding point
- if (ubo.blockIndex < 0) {
- const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
- gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
- ubo.blockIndex = blockIndex;
- }
-
- // store the data - we'll upload it later
- ubo.data = data;
- };
-
- /**
- * Update UBO data
- * Called when we're using the appropriate WebGLProgram
- */
- UBOHelper.prototype.update = function () {
- const gl = this._gl;
- for (const name in this._ubo) {
- const ubo = this._ubo[name];
- gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
- gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
- gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
- gl.bindBuffer(gl.UNIFORM_BUFFER, null);
- }
- };
-
- /**
- * Release allocated buffers
- * @returns {null}
- */
- UBOHelper.prototype.release = function () {
- const gl = this._gl;
- for (const name in this._ubo) {
- const ubo = this._ubo[name];
- gl.deleteBuffer(ubo.buffer);
- ubo.data = null;
- }
- return null;
- };
-
- /**
- * Generates an indexed variable name, as in variable[index]
- * @param {string} variable
- * @param {number} index
- * @returns {string} variable[index]
- */
- function indexedVariable(variable, index) {
- //return `${variable}[${index}]`; // no caching
-
- // is this cache lookup really faster than string concatenation?
- // what about memory consumption?
- const cache = indexedVariable.cache;
- let nameList = cache.get(variable);
- if (nameList === undefined) cache.set(variable, nameList = []);
- if (nameList[index] === undefined) nameList[index] = `${variable}[${index}]`;
- return nameList[index];
- }
-
- /** @type {Map<string,string[]>} cached argument names */
- indexedVariable.cache = new Map(); // Object.create(null)
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program-group.js
- * An abstract group of programs that run on the GPU
- */
-
-
-
-
-
- /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
-
- /**
- * @typedef {object} SpeedyProgramHelpers
- * @property {function(): SpeedyProgramOptions} usesPingpongRendering
- * @property {function(): SpeedyProgramOptions} rendersToCanvas
- */
-
- /** @const {SpeedyProgramHelpers} Program settings generator */
- const PROGRAM_HELPERS = Object.freeze({
- /**
- * Pingpong Rendering: the output texture of a
- * program cannot be used as an input to itself.
- * This is a convenient helper in these situations
- * @returns {SpeedyProgramOptions}
- */
- usesPingpongRendering() {
- return {
- pingpong: true
- };
- },
- /**
- * Render to canvas
- * Use it when we're supposed to see the texture
- * @returns {SpeedyProgramOptions}
- */
- rendersToCanvas() {
- return {
- renderToTexture: false
- };
- }
- });
-
- /**
- * SpeedyProgramGroup
- * A semantically correlated group
- * of programs that run on the GPU
- * @abstract
- */
- class SpeedyProgramGroup {
- /**
- * Class constructor
- * @protected
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- /** @type {SpeedyGPU} GPU-accelerated routines */
- this._gpu = gpu;
-
- /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
- this._programs = [];
- }
-
- /**
- * Declare a program
- * @protected
- * @param {string} name Program name
- * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
- * @param {SpeedyProgramOptions} [options] Program settings
- * @returns {this}
- */
- declare(name, builder, options = {}) {
- // lazy instantiation of kernels
- Object.defineProperty(this, name, {
- get: (() => {
- // Why cast a symbol to symbol?
- // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
- const key = /** @type {symbol} */Symbol(name);
- return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
- })()
- });
- return this;
- }
-
- /**
- * Neat helpers to be used when declaring programs
- * @returns {SpeedyProgramHelpers}
- */
- get program() {
- return PROGRAM_HELPERS;
- }
-
- /**
- * Releases all programs from this group
- * @returns {null}
- */
- release() {
- for (let i = 0; i < this._programs.length; i++) this._programs[i].release();
- return null;
- }
-
- /**
- * Spawn a SpeedyProgram
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} [options] Program settings
- * @returns {SpeedyProgram}
- */
- _createProgram(shaderdecl, options = {}) {
- const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
- this._programs.push(program);
- return program;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * utils.js
- * GPU utilities
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Copy image
- const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
-
- // Copy keypoints
- const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
- 'TYPE': 1
- }).withArguments('image');
-
- // Copy 2D vectors
- const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
- 'TYPE': 2
- }).withArguments('image');
-
- // Flip y-axis for output
- const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
-
- // Fill image with a constant
- const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
-
- // Fill zero or more color components of the input image with a constant value
- const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
-
- // Copy the src component of src to zero or more color components of a copy of dest
- const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
-
- // Scan the entire image and find the minimum & maximum pixel intensity
- const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
-
- // Compute the partial derivatives of an image
- const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
-
- /**
- * SpeedyProgramGroupUtils
- * Utility operations
- */
- class SpeedyProgramGroupUtils extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // render to the canvas
- .declare('renderToCanvas', flipY, Object.assign({}, this.program.rendersToCanvas()))
-
- // copy image
- .declare('copy', copy)
-
- // copy keypoints
- .declare('copyKeypoints', copyKeypoints)
-
- // copy 2D vectors
- .declare('copy2DVectors', copy2DVectors)
-
- // Fill image with a constant
- .declare('fill', fill)
-
- // Fill zero or more color components of the input image with a constant value
- .declare('fillComponents', fillComponents)
-
- // Copy the src component of src to zero or more color components of a copy of dest
- .declare('copyComponents', copyComponents)
-
- // find minimum & maximum pixel intensity
- .declare('scanMinMax2D', scanMinMax2D, Object.assign({}, this.program.usesPingpongRendering()))
-
- // Compute the partial derivatives of an image
- .declare('sobelDerivatives', sobelDerivatives);
- }
- }
- // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
- var convolution = __nested_webpack_require_314174__(1672);
- ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * filters.js
- * Image filtering on the GPU
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Convert to greyscale
- const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl').withArguments('image');
-
- // Convolution
- const filters_convolution = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl').withDefines({
- 'KERNEL_SIZE_SQUARED': ksize * ksize
- }).withArguments('image', 'kernel'), obj), {});
-
- // Separable convolution
- const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
- 'KERNEL_SIZE': ksize,
- 'AXIS': 0
- }).withArguments('image', 'kernel'), obj), {});
- const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
- 'KERNEL_SIZE': ksize,
- 'AXIS': 1
- }).withArguments('image', 'kernel'), obj), {});
- // Median filter
- const median = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl').withDefines({
- 'KERNEL_SIZE': ksize
- }).withArguments('image'), obj), {});
-
- // Normalize image
- const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
- 'GREYSCALE': 1
- }).withArguments('minmax2d', 'minValue', 'maxValue');
- const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
- 'GREYSCALE': 0
- }).withArguments('minmax2dRGB', 'minValue', 'maxValue');
-
- // Nightvision
- const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
- 'GREYSCALE': 0
- }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
- const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
- 'GREYSCALE': 1
- }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
-
- //
- // Utilities
- //
-
- // Handy conversion for Gaussian filters
- // (symmetric kernel, approx. zero after 3*sigma)
- const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
-
- // Generate a 1D Gaussian kernel
- const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
-
- // Generate a 1D Box filter
- const box = ksize => new Array(ksize).fill(1.0 / ksize);
-
- /**
- * SpeedyProgramGroupFilters
- * Image filtering
- */
- class SpeedyProgramGroupFilters extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // convert to greyscale
- .declare('rgb2grey', rgb2grey)
-
- // median filters
- .declare('median3', median[3]) // 3x3 window
- .declare('median5', median[5]) // 5x5 window
- .declare('median7', median[7]) // 7x7 window
-
- // 2D convolution
- .declare('convolution3', filters_convolution[3]) // 3x3 kernel
- .declare('convolution5', filters_convolution[5]) // 5x5 kernel
- .declare('convolution7', filters_convolution[7]) // 7x7 kernel
-
- // 1D separable convolution
- .declare('convolution3x', convolutionX[3]) // 1x3 kernel
- .declare('convolution3y', convolutionY[3]) // 3x1 kernel
- .declare('convolution5x', convolutionX[5]) // 1x5 kernel
- .declare('convolution5y', convolutionY[5]) // 5x1 kernel
- .declare('convolution7x', convolutionX[7]).declare('convolution7y', convolutionY[7]).declare('convolution9x', convolutionX[9]).declare('convolution9y', convolutionY[9]).declare('convolution11x', convolutionX[11]).declare('convolution11y', convolutionY[11]).declare('convolution13x', convolutionX[13]).declare('convolution13y', convolutionY[13]).declare('convolution15x', convolutionX[15]).declare('convolution15y', convolutionY[15])
-
- // normalize image
- .declare('normalizeGreyscale', normalizeGreyscale).declare('normalizeColored', normalizeColored)
-
- // nightvision
- .declare('nightvision', nightvision).declare('nightvisionGreyscale', nightvisionGreyscale).declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255))).declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
-
- // gaussian: separable kernels
- // see also: http://dev.theomader.com/gaussian-kernel-calculator/
- .declare('gaussian3x', (0,convolution.convX)([0.25, 0.5, 0.25])) // sigma ~ 1.0
- .declare('gaussian3y', (0,convolution.convY)([0.25, 0.5, 0.25])).declare('gaussian5x', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])) // sigma ~ 1.0
- .declare('gaussian5y', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('gaussian7x', (0,convolution.convX)(gaussian(7))).declare('gaussian7y', (0,convolution.convY)(gaussian(7))).declare('gaussian9x', (0,convolution.convX)(gaussian(9))).declare('gaussian9y', (0,convolution.convY)(gaussian(9))).declare('gaussian11x', (0,convolution.convX)(gaussian(11))).declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
-
- // box filter: separable kernels
- .declare('box3x', (0,convolution.convX)(box(3))).declare('box3y', (0,convolution.convY)(box(3))).declare('box5x', (0,convolution.convX)(box(5))).declare('box5y', (0,convolution.convY)(box(5))).declare('box7x', (0,convolution.convX)(box(7))).declare('box7y', (0,convolution.convY)(box(7))).declare('box9x', (0,convolution.convX)(box(9))).declare('box9y', (0,convolution.convY)(box(9))).declare('box11x', (0,convolution.convX)(box(11))).declare('box11y', (0,convolution.convY)(box(11)));
- }
- }
- // EXTERNAL MODULE: ./src/core/speedy-namespace.js
- var speedy_namespace = __nested_webpack_require_314174__(6634);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-descriptordb.js
- * A database of binary descriptors in video memory
- */
-
-
-
-
-
-
- //
- // A database of binary descriptors is a texture that stores
- // a set of (descriptor: uint8_t[]) entries.
- //
-
- /** @type {number} we use RGBA8 textures to store the descriptors */
- const DESCRIPTORDB_BYTESPERPIXEL = 4;
-
- /** @type {number} texture size goes up to 16 MB */
- const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
-
- /**
- * Utility for generating a database of binary descriptors in video memory
- */
- class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create a database of binary descriptors
- * @param {SpeedyTexture} texture output texture
- * @param {Uint8Array[]} descriptors binary descriptors
- * @param {number} descriptorSize in bytes, a multiple of 4
- * @returns {SpeedyTexture} texture
- */
- static create(texture, descriptors, descriptorSize) {
- utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
- const numberOfDescriptors = descriptors.length;
- const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
-
- // find an appropriate texture size
- const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
- const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
-
- // setup texture parameters
- const stride = 1 << log2stride;
- const width = stride,
- height = stride; // we use powers-of-two
-
- // are we within storage capacity?
- const capacity = width * height / pixelsPerDescriptor;
- if (numberOfDescriptors > capacity) throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
-
- // create texture data
- const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
- for (let i = 0; i < numberOfDescriptors; i++) {
- const byteOffset = i * descriptorSize;
- const descriptor = descriptors[i];
-
- // validate input
- utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
- utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
-
- // write data
- data.set(descriptor, byteOffset);
- }
-
- // log data for further study
- const MEGABYTE = 1048576;
- const totalSize = numberOfDescriptors * descriptorSize;
- utils/* Utils */.A.log(`Creating a ${width}x${height} database of ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors ` + `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`);
-
- // upload to the texture
- texture.resize(width, height);
- texture.upload(data);
- return texture;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-lsh.js
- * GPU-based LSH tables for fast matching of binary descriptors
- */
-
-
-
-
-
-
- /*
- * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
- * ------------------------------------------------
- *
- * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
- * Indices of keypoint descriptors are stored in several tables, each with many
- * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
- * size to match the keypoints.
- *
- * Buckets in video memory may get full. Wouldn't it be cool if we could use a
- * probabilistic approach to let us work within their storage capacity?
- *
- * Let there be n buckets in a table, each with storage capacity c (holding
- * up to c elements). Buckets are numbered from 0 to n-1.
- *
- * We pick uniformly a random bucket to store a new element in the table. Let
- * X be the chosen bucket. The probability that we'll store the new element in
- * any particular bucket k is:
- *
- * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
- *
- * On average, each new element stored in the table inserts 1/n of an element
- * in each bucket. If we add m new elements to the table, each bucket receives
- * m/n elements, on average(*).
- *
- * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
- * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
- * addition, the expected value of (m Ik) is m * E(ik) = m/n.
- *
- * Now let Yi be the number of elements inserted in bucket i in m additions to
- * the table. We model Yi as Poisson(m/n), since on average, m additions to
- * the table result in m/n new elements being inserted in bucket i. Buckets
- * are picked independently. Hence, for all i, the probability that we insert
- * q elements in bucket i in m additions to the table is:
- *
- * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
- *
- * Given that each bucket has storage capacity c, we require Yi <= c with a
- * high probability p (say, p = 0.99). This means that, in m additions, we
- * don't want to exceed the capacity c with high probability. So, let us find
- * a (large) value of m such that:
- *
- * P(Yi <= c) >= p
- *
- * Sounds good! We can find the largest matching m using binary search.
- *
- * I don't think we need to enforce a high probability that ALL buckets stay
- * within their capacity - n is large, we need to use the available space, and
- * we have multiple tables anyway.
- *
- * In practice, the assumption that buckets are picked uniformly doesn't hold:
- * keypoints that are nearby tend to have similar descriptors and buckets are
- * picked according to those descriptors. Still, this model works well enough
- * in practice and it is simple! That's what I like about it!
- *
- * ... now, how I actually do the matching is the theme of the next episode!
- */
-
- /** @type {number} Default number of tables in a LSH data structure */
- const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
-
- /** @type {number} Default number of bits of a hash */
- const LSH_DEFAULT_HASH_SIZE = 15;
-
- /** @type {number[]} Acceptable number of tables for a LSH data structure */
- const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
-
- /** @type {number[]} Acceptable values for hashSize, in bits */
- const LSH_ACCEPTABLE_HASH_SIZES = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20];
-
- /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
- const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32, 64];
-
- /**
- * @typedef {Object} LSHProfile LSH profile
- * @property {string} name name of the profile
- * @property {number} capacity maximum number of keypoints that can be stored in such a table
- * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
- * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
- * @property {number} bucketCapacity maximum number of entries of a bucket of a table
- */
-
- /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
- const generateLSHProfiles = (t, h, p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [{
- name: 'x-small',
- bucketCapacity: 1,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 1, p)
- }, {
- name: 'small',
- bucketCapacity: 2,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 2, p)
- }, {
- name: 'small-plus',
- bucketCapacity: 3,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 3, p)
- }, {
- name: 'medium',
- bucketCapacity: 4,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 4, p)
- }, {
- name: 'medium-plus',
- bucketCapacity: 5,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 5, p)
- }, {
- name: 'large',
- bucketCapacity: 6,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 6, p)
- }, {
- name: 'x-large',
- bucketCapacity: 8,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 8, p)
- }];
-
- //
- // LSH hash sequences: random bits in increasing order
- // We generate a few sequences (one for each table) supporting up to 16 hash bits
- // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
- //
-
- /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
- /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
- /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
-
- /** @type {number} maximum number of elements of a sequence */
- const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
-
- /** @type {number} number of sequences in a BitSequences object */
- const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
-
- /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
- const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i + 1) * LSH_SEQUENCE_MAXLEN).sort()), seq);
-
- /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
- const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray((i + 1) * LSH_SEQUENCE_MAXLEN - p, (i + 1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)), seq);
-
- /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
- const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p, o) => (p[o] = f(o), p), {}))(h => ({
- // for 256-bit descriptors
- 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
- // for 512-bit descriptors
- 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN))))
- }));
-
- //
- // Misc
- //
-
- /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
- const LSH_BYTESPERPIXEL = 4;
-
- /** @type {function(number): number} next power of 2 */
- const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /**
- * GPU-based LSH tables for fast matching of binary descriptors
- */
- class SpeedyLSH {
- /**
- * Constructor
- * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
- * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
- * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
- * @param {number} [tableCount] number of LSH tables, preferably a power of two
- * @param {number} [hashSize] number of bits of a hash of a descriptor
- * @param {number} [probability] probability of no discard events happening in the theoretical model
- */
- constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95) {
- const descriptorCount = descriptors.length;
- const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
- const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
-
- // validate input
- utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
- utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
- utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
- utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
- utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
-
- /** @type {LSHProfile} LSH profile */
- this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
-
- /** @type {number} descriptor size, in bytes */
- this._descriptorSize = descriptorSize;
-
- /** @type {number} number of descriptors */
- this._descriptorCount = descriptorCount;
-
- /** @type {BitSequences} bit sequences */
- this._sequences = this._pickSequences(this._descriptorSize);
-
- /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
- this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
-
- /** @type {SpeedyTexture} a storage of descriptors */
- this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- return this._descriptorSize;
- }
-
- /**
- * Number of descriptors stored in this LSH data structure
- * @returns {number}
- */
- get descriptorCount() {
- return this._descriptorCount;
- }
-
- /**
- * LSH bit sequences
- * @returns {BitSequences}
- */
- get sequences() {
- return this._sequences;
- }
-
- /**
- * Number of bits that make a hash
- * @returns {number}
- */
- get hashSize() {
- return this._profile.hashSize;
- }
-
- /**
- * Maximum number of descriptors that can be stored in a bucket of a table
- * @returns {number}
- */
- get bucketCapacity() {
- return this._profile.bucketCapacity;
- }
-
- /**
- * How many buckets per table do we have?
- * @returns {number}
- */
- get bucketsPerTable() {
- return 1 << this._profile.hashSize;
- }
-
- /**
- * Number of LSH tables
- * @returns {number}
- */
- get tableCount() {
- return this._profile.tableCount;
- }
-
- /**
- * Size of one LSH table, in bytes
- * @returns {number}
- */
- get tableSize() {
- return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
- }
-
- /**
- * Size of all LSH tables combined, in bytes
- * @returns {number}
- */
- get totalSize() {
- // actually, the total memory in VRAM may be a bit larger than
- // this value, depending on the actual size of the texture
- return this.tableCount * this.tableSize;
- }
-
- /**
- * LSH tables texture
- * @returns {SpeedyDrawableTexture}
- */
- get tables() {
- return this._tables;
- }
-
- /**
- * A collection of descriptors
- * @returns {SpeedyDrawableTexture}
- */
- get descriptorDB() {
- return this._descriptorDB;
- }
-
- /**
- * Pick the appropriate LSH sequences for a particular descriptor size
- * @param {number} descriptorSize in bytes
- * @returns {BitSequences}
- */
- _pickSequences(descriptorSize) {
- utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
- utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
- return LSH_SEQUENCES[this.hashSize][descriptorSize];
- }
-
- /**
- * Create LSH tables
- * @param {SpeedyTexture} texture output texture
- * @param {BitSequences} sequences bit sequences
- * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
- * @param {number} descriptorSize in bytes
- * @returns {SpeedyTexture} texture
- */
- _createStaticTables(texture, sequences, descriptors, descriptorSize) {
- const END_OF_LIST = 0xFFFFFFFF;
- const profileName = this._profile.name;
- const tableCapacity = this._profile.capacity;
- const tableCount = this.tableCount;
- const bucketsPerTable = this.bucketsPerTable;
- const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
- const hashSize = this.hashSize;
- const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
- const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
- const textureHeight = Math.ceil(numberOfPixels / textureWidth);
- const numberOfDescriptors = descriptors.length;
-
- // validate input
- utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
- utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
- utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
-
- // log
- const MEGABYTE = 1048576;
- utils/* Utils */.A.log(`Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` + `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` + `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `);
-
- // warn the user if there are too many descriptors
- if (numberOfDescriptors > tableCapacity) {
- const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
- utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
- }
-
- // create empty LSH tables
- const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
- const bytes = new Uint8Array(buffer).fill(0xFF);
- const data = new DataView(buffer);
-
- // shuffle the descriptors...
- // it seems like a good idea to handle collisions of similar descriptors,
- // which may be located next to each other in the array
- const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
-
- // for each descriptor
- // do everything in little-endian format!
- const numberOfDiscardedDescriptorsPerTable = new Array(tableCount).fill(0);
- for (let i = 0; i < numberOfDescriptors; i++) {
- const descriptorIndex = permutation[i]; //i;
- const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
-
- // for each table
- for (let table = 0; table < tableCount; table++) {
- // compute hash & memory addresses
- const hash = hashes[table];
- const tableByteOffset = table * bucketsPerTable * bucketSize;
- const bucketByteOffset = tableByteOffset + hash * bucketSize;
-
- // find the end of the list
- let index = END_OF_LIST;
- for (let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
- const byteOffset = bucketByteOffset + entryByteOffset;
- index = data.getUint32(byteOffset, true);
-
- // add the keypoint
- if (index == END_OF_LIST) {
- data.setUint32(byteOffset, descriptorIndex, true);
- break;
- }
- }
-
- // note: if the bucket is full, we just discard the entry :\
- // we give this event a probabilistic treatment (see above),
- // so it happens with low probability
- if (index != END_OF_LIST) numberOfDiscardedDescriptorsPerTable[table]++;
- }
- }
-
- // log data for further study
- const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
- const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
- utils/* Utils */.A.log(`When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` + `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` + `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` + `Minimum: ${Math.min(...profile).toFixed(2)}%. ` + `Table capacity: ${tableCapacity}.`);
-
- // upload the LSH tables to the GPU
- texture.resize(textureWidth, textureHeight);
- texture.upload(bytes);
- return texture;
- }
-
- /**
- * Pick bits from a binary descriptor
- * @param {Uint8Array} descriptor a single descriptor
- * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
- * @returns {number[]} hash code for each table
- */
- _hashCodes(descriptor, sequences) {
- const tableCount = this.tableCount;
- const hashSize = this.hashSize;
- const bucketsPerTable = this.bucketsPerTable;
- const hashes = new Array(tableCount);
- //const descriptorSize = descriptor.length;
-
- // just to be sure...
- utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN && sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount);
-
- // for each table
- for (let table = 0; table < tableCount; table++) {
- const offset = LSH_SEQUENCE_MAXLEN * table;
-
- // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
- let hash = 0;
- for (let i = 0; i < hashSize; i++) {
- let bit = sequences[offset + i];
- let b = bit >>> 3;
- let m = 1 << (bit & 7);
-
- //Utils.assert(b < descriptorSize);
- hash = hash << 1 | (descriptor[b] & m) != 0;
- }
-
- // validate & store
- utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
- hashes[table] = hash;
- }
-
- // done!
- return hashes;
- }
- }
-
- /**
- * Compute P(X <= k), where X ~ Poisson(lambda)
- * @param {number} lambda positive number
- * @param {number} k non-negative integer
- * @returns {number}
- */
- function cumulativePoisson(lambda, k) {
- const exp = Math.exp(-lambda);
- let sum = 1,
- fat = 1,
- pow = 1;
-
- // k should be small!!!
- for (let i = 1; i <= k; i++) sum += (pow *= lambda) / (fat *= i);
- return sum * exp;
- }
-
- /**
- * Find the maximum number of keypoint descriptors that a table can hold
- * @param {number} hashSize positive integer
- * @param {number} bucketCapacity positive integer
- * @param {number} [probability] probability of no discard events happening in the theoretical model
- * @return {number} optimal table capacity
- */
- function findTableCapacity(hashSize, bucketCapacity, probability = 0.99) {
- const n = 1 << hashSize; // number of buckets
- const c = bucketCapacity;
- const p = probability;
- let l = 1,
- r = n * c; // watch for overflow!
- let m = 0,
- pm = 0;
-
- // binary search
- while (l < r) {
- m = Math.floor((l + r) / 2);
- pm = cumulativePoisson(m / n, c);
- if (pm > p)
- //if(1-pm < 1-p)
- l = m + 1;else r = m;
- }
- return m;
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * keypoints.js
- * Facade for various keypoint detection algorithms
- */
-
-
-
-
-
-
-
- // FAST corner detector
- const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl').withDefines({
- 'FAST_TYPE': 916
- }).withArguments('corners', 'pyramid', 'lod', 'threshold');
-
- // Harris corner detector
- const harris = [1, 3, 5, 7].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl').withDefines({
- 'WINDOW_SIZE': win
- }).withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian'), obj), {});
- const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl').withArguments('corners', 'iterationNumber');
- const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl').withArguments('corners', 'maxScore', 'quality');
-
- // Subpixel refinement
- const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 0
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 1
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 2
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 3
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
-
- // Scale refinement
- const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
- 'METHOD': 0
- }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
- 'METHOD': 1
- }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
-
- // Pixel allocation
- const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
- const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
- const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl').withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // ORB descriptors
- const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl').withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
- const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl').withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Non-maximum suppression
- const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
- 'MULTISCALE': 0
- }).withArguments('image', 'lodStep');
- const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
- 'MULTISCALE': 1
- }).withArguments('image', 'lodStep');
- const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl').withArguments('corners');
- const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
- 'USE_LAPLACIAN': 1
- }).withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
- const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
- 'USE_LAPLACIAN': 0
- }).withArguments('corners', 'pyramid', 'lodStep');
- const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl').withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
-
- // Keypoint tracking & optical-flow
- const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl').withDefines({
- 'WINDOW_SIZE': win
- }).withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {});
- const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl').withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Brute-force matching
- const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 0
- });
- const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 1
- });
- const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
- const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 32,
- 'NUMBER_OF_KEYPOINTS_PER_PASS': 16
- }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
- const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 64,
- 'NUMBER_OF_KEYPOINTS_PER_PASS': 8
- }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
-
- // LSH-based KNN matching
- const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 0
- });
- const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 1
- });
- const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => (obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => (obj[hashSize] = [0, 1, 2].reduce((obj, level) => (obj[level] = (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': descriptorSize,
- 'HASH_SIZE': hashSize,
- 'LEVEL': level,
- 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
- 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT
- }).withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {}), obj), {}), obj), {});
- const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
-
- // Keypoint sorting
- const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 1
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 2
- }).withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
- const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 3
- }).withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
-
- // Keypoint mixing
- const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 1
- }).withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
- const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 2
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
- const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 3
- }).withArguments('array', 'blockSize');
- const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 5
- }).withArguments('array');
- const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 4
- }).withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Keypoint encoding
- const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
- 'FS_OUTPUT_TYPE': 2,
- 'STAGE': 1
- }).withArguments('corners');
- const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl').withDefines({
- 'FS_OUTPUT_TYPE': 2,
- 'FS_USE_CUSTOM_PRECISION': 1,
- 'STAGE': 2
- }).withArguments('lookupTable', 'blockSize', 'width', 'height');
- const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
- 'STAGE': -1
- }).withArguments('lookupTable');
- const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl').withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
- const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl').withArguments('corners', 'imageSize');
- const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl').withDefines({
- 'MAX_ITERATIONS': 6
- }) // dependent texture reads :(
- .withArguments('offsetsImage', 'imageSize');
- const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl').withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl').withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl').withArguments();
- const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl').withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl').withDefines({
- // UBOs can hold at least 16KB of data;
- // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
- // according to the GL ES 3 reference.
- // Each keypoint uses 16 bytes (vec4)
- 'BUFFER_SIZE': 1024 //16384 / 16
- }).withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Geometric transformations
- const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl').withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Keypoint filters
- const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl').withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl').withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
- const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 32
- }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
- const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 64
- }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
-
- // Other utilities
- const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl').withDefines({
- 'PERMUTATION_MAXLEN': 2048
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl').withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
-
- /**
- * SpeedyProgramGroupKeypoints
- * Keypoint detection
- */
- class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- //
- // FAST corner detector
- //
- .declare('fast9_16', fast9_16, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Harris corner detector
- //
- .declare('harris1', harris[1], Object.assign({}, this.program.usesPingpongRendering())).declare('harris3', harris[3], Object.assign({}, this.program.usesPingpongRendering())).declare('harris5', harris[5], Object.assign({}, this.program.usesPingpongRendering())).declare('harris7', harris[7], Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreFindMax', harrisScoreFindMax, Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreCutoff', harrisScoreCutoff)
-
- //
- // Subpixel refinement
- //
- .declare('subpixelQuadratic1d', subpixelQuadratic1d).declare('subpixelTaylor2d', subpixelTaylor2d).declare('subpixelBicubic', subpixelBicubic).declare('subpixelBilinear', subpixelBilinear)
-
- //
- // Scale refinement
- //
- .declare('refineScaleLoG', refineScaleLoG).declare('refineScaleFAST916', refineScaleFAST916)
-
- //
- // Pixel allocation
- //
- .declare('allocateDescriptors', allocateDescriptors).declare('allocateExtra', allocateExtra).declare('transferToExtra', transferToExtra)
-
- //
- // ORB descriptors
- //
- .declare('orbDescriptor', orbDescriptor).declare('orbOrientation', orbOrientation)
-
- //
- // Non-maximum suppression
- //
- .declare('nonmax', nonMaxSuppression).declare('pyrnonmax', multiscaleNonMaxSuppression).declare('nonmaxSpace', nonmaxSpace).declare('nonmaxScale', nonmaxScale).declare('nonmaxScaleSimple', nonmaxScaleSimple).declare('laplacian', laplacian)
-
- //
- // LK optical-flow
- //
- .declare('lk21', lk[21], Object.assign({}, this.program.usesPingpongRendering())).declare('lk19', lk[19], Object.assign({}, this.program.usesPingpongRendering())).declare('lk17', lk[17], Object.assign({}, this.program.usesPingpongRendering())).declare('lk15', lk[15], Object.assign({}, this.program.usesPingpongRendering())).declare('lk13', lk[13], Object.assign({}, this.program.usesPingpongRendering())).declare('lk11', lk[11], Object.assign({}, this.program.usesPingpongRendering())).declare('lk9', lk[9], Object.assign({}, this.program.usesPingpongRendering())).declare('lk7', lk[7], Object.assign({}, this.program.usesPingpongRendering())).declare('lk5', lk[5], Object.assign({}, this.program.usesPingpongRendering())).declare('lk3', lk[3], Object.assign({}, this.program.usesPingpongRendering())).declare('transferFlow', transferFlow)
-
- //
- // Brute-force KNN matching
- //
- .declare('bfMatcherInitCandidates', bfMatcherInitCandidates).declare('bfMatcherInitFilters', bfMatcherInitFilters).declare('bfMatcherTransfer', bfMatcherTransfer, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher32', bfMatcher32, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher64', bfMatcher64, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // LSH-based KNN matching
- //
- .declare('lshKnnInitCandidates', lshKnnInitCandidates).declare('lshKnnInitFilters', lshKnnInitFilters).declare('lshKnnTransfer', lshKnnTransfer, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Keypoint sorting
- //
- .declare('sortCreatePermutation', sortCreatePermutation).declare('sortMergePermutation', sortMergePermutation, Object.assign({}, this.program.usesPingpongRendering())).declare('sortApplyPermutation', sortApplyPermutation)
-
- //
- // Keypoint mixing
- //
- .declare('mixKeypointsPreInit', mixKeypointsPreInit).declare('mixKeypointsInit', mixKeypointsInit).declare('mixKeypointsSort', mixKeypointsSort, Object.assign({}, this.program.usesPingpongRendering())).declare('mixKeypointsView', mixKeypointsView).declare('mixKeypointsApply', mixKeypointsApply)
-
- //
- // Keypoint encoders
- //
- .declare('encodeNullKeypoints', encodeNullKeypoints).declare('encodeKeypoints', encodeKeypoints).declare('initLookupTable', initLookupTable).declare('sortLookupTable', sortLookupTable, Object.assign({}, this.program.usesPingpongRendering())).declare('viewLookupTable', viewLookupTable).declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets).declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointPositions', encodeKeypointPositions, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointProperties', encodeKeypointProperties).declare('transferOrientation', transferOrientation).declare('uploadKeypoints', uploadKeypoints, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Geometric transformations
- //
- .declare('applyHomography', applyHomography)
-
- //
- // Keypoint filters
- //
- .declare('clipBorder', clipBorder).declare('distanceFilter', distanceFilter).declare('hammingDistanceFilter32', hammingDistanceFilter32).declare('hammingDistanceFilter64', hammingDistanceFilter64)
-
- //
- // Other utilities
- //
- .declare('shuffle', shuffle).declare('clip', clip);
-
- //
- // LSH-based KNN matching
- //
- for (const descriptorSize of Object.keys(lshKnn)) {
- for (const hashSize of Object.keys(lshKnn[descriptorSize])) {
- for (const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
- const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
- this.declare(name, lshKnn[descriptorSize][hashSize][level], Object.assign({}, this.program.usesPingpongRendering()));
- }
- }
- }
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pyramids.js
- * Image pyramids
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
- const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
-
- /**
- * SpeedyProgramGroupPyramids
- * Image pyramids
- */
- class SpeedyProgramGroupPyramids extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // upsampling & downsampling
- .declare('upsample2', upsample2).declare('downsample2', downsample2)
-
- // separable kernels for gaussian smoothing
- // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
- // pick a = 0.4 for gaussian approximation (sigma = 1)
- .declare('smoothX', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('smoothY', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05]))
- /*
- .declare('reduce', conv2D([
- 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
- 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
- 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
- 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
- 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
- ]))
- */
-
- // smoothing for 2x image
- // same rules as above with sum(k) = 2
- .declare('smoothX2', (0,convolution.convX)([0.1, 0.5, 0.8, 0.5, 0.1
- // NOTE: this would saturate the image, but we apply it
- // on a 2x upsampled version with lots of zero pixels
- ])).declare('smoothY2', (0,convolution.convY)([0.1, 0.5, 0.8, 0.5, 0.1], 1.0 / 2.0));
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transforms.js
- * Geometric transformations
- */
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Perspective warp
- const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl').withArguments('image', 'inverseHomography');
-
- // Resize image
- const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
- 'INTERPOLATION_METHOD': 0 // Nearest neighbors
- }).withArguments('image');
- const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
- 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
- }).withArguments('image');
-
- // Additive mix (TODO create a new program group?)
- const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl').withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
-
- /**
- * SpeedyProgramGroupTransforms
- * Geometric transformations
- */
- class SpeedyProgramGroupTransforms extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this.declare('warpPerspective', warpPerspective).declare('resizeNearest', resizeNearest).declare('resizeBilinear', resizeBilinear).declare('additiveMix', additiveMix);
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program-center.js
- * An access point to all programs that run on the GPU
- */
-
-
-
-
-
-
-
-
-
- /**
- * An access point to all programs that run on the CPU
- * All program groups can be accessed via this class
- */
- class SpeedyProgramCenter {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu reference to SpeedyGPU
- */
- constructor(gpu) {
- // Note: we instantiate the program groups lazily
-
- /** @type {SpeedyGPU} reference to SpeedyGPU */
- this._gpu = gpu;
-
- /** @type {SpeedyProgramGroupFilters} image filters */
- this._filters = null;
-
- /** @type {SpeedyProgramGroupTransforms} geometric transformations */
- this._transforms = null;
-
- /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
- this._pyramids = null;
-
- /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
- this._keypoints = null;
-
- /** @type {SpeedyProgramGroupUtils} utility programs */
- this._utils = null;
- }
-
- /**
- * Image filters & convolutions
- * @returns {SpeedyProgramGroupFilters}
- */
- get filters() {
- return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
- }
-
- /**
- * Geometric transformations
- * @returns {SpeedyProgramGroupTransforms}
- */
- get transforms() {
- return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
- }
-
- /**
- * Image pyramids & scale-space
- * @returns {SpeedyProgramGroupPyramids}
- */
- get pyramids() {
- return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
- }
-
- /**
- * Keypoint detection & description
- * @returns {SpeedyProgramGroupKeypoints}
- */
- get keypoints() {
- return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
- }
-
- /**
- * Utility programs
- * @returns {SpeedyProgramGroupUtils}
- */
- get utils() {
- return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
- }
-
- /**
- * Release all programs from all groups. You'll
- * no longer be able to use any of them.
- * @returns {null}
- */
- release() {
- for (const key in this) {
- if (Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
- const group = this[key];
- if (group instanceof SpeedyProgramGroup) group.release();
- }
- }
- return null;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture-pool.js
- * Texture pool
- */
-
-
-
-
-
-
- // Constants
- const DEFAULT_CAPACITY = 1024;
- const BUCKET = Symbol('Bucket');
-
- /*
-
- === Heuristics to figure out the capacity of a texture pool ===
-
- 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
-
- 2. Figure out the average texture size in your application (say, 640x360 pixels).
-
- 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
- uses 4 bytes (RGBA format).
-
- 4. Divide the maximum amount of VRAM by the average texture size in bytes
- (say, 72). That's the capacity of the pool.
-
- Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
-
- Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
-
- */
-
- /**
- * @typedef {number} TextureBucketIndex index of a bucket in a pool
- */
-
- /**
- * A bucket
- */
- class TextureBucket {
- /**
- * Constructor
- * @param {SpeedyDrawableTexture} texture managed texture
- * @param {TextureBucketIndex} index index of this bucket
- * @param {TextureBucketIndex} next index of the next bucket
- */
- constructor(texture, index, next) {
- /** @type {SpeedyDrawableTexture} managed texture */
- this.texture = texture;
-
- /** @type {TextureBucketIndex} index of this bucket */
- this.index = index;
-
- /** @type {TextureBucketIndex} index of the next bucket */
- this.next = next;
-
- /** @type {boolean} whether the texture is available or not */
- this.free = true;
- }
- }
-
- /**
- * Texture pool
- */
- class SpeedyTexturePool {
- /**
- * Constructor
- * @param {SpeedyGPU} gpu
- * @param {number} [capacity] number of textures in the pool
- */
- constructor(gpu, capacity = DEFAULT_CAPACITY) {
- utils/* Utils */.A.assert(capacity > 0);
-
- /** @type {TextureBucket[]} buckets */
- this._bucket = Array.from({
- length: capacity
- }, (_, i) => new TextureBucket(null, i, i - 1));
-
- /** @type {TextureBucketIndex} index of an available bucket */
- this._head = capacity - 1;
-
- /** @type {SpeedyGPU} GPU instance */
- this._gpu = gpu;
- }
-
- /**
- * Get a texture from the pool
- * @returns {SpeedyDrawableTexture}
- */
- allocate() {
- if (this._head < 0) throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
- const bucket = this._bucket[this._head];
- bucket.free = false;
- this._head = bucket.next;
- if (bucket.texture == null)
- // lazy instantiation
- bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
- return bucket.texture;
- }
-
- /**
- * Put a texture back in the pool
- * @param {SpeedyDrawableTexture} texture
- * @returns {null}
- */
- free(texture) {
- const bucket = texture[BUCKET];
- utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
- bucket.next = this._head;
- bucket.free = true;
- this._head = bucket.index;
- return null;
- }
-
- /**
- * Release the texture pool
- * @returns {null}
- */
- release() {
- for (let i = 0; i < this._bucket.length; i++) {
- if (this._bucket[i].texture != null) this._bucket[i].texture = this._bucket[i].texture.release();
- }
- return null;
- }
-
- /**
- * Create a texture with a reference to a bucket
- * @param {WebGL2RenderingContext} gl
- * @param {TextureBucket} bucket
- * @returns {SpeedyDrawableTexture}
- */
- static _createManagedTexture(gl, bucket) {
- const texture = new SpeedyDrawableTexture(gl, 1, 1);
- return Object.defineProperty(texture, BUCKET, {
- configurable: false,
- enumerable: false,
- writable: false,
- value: bucket
- });
- }
- }
- // EXTERNAL MODULE: ./src/utils/types.js
- var types = __nested_webpack_require_314174__(6049);
- ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-media-source.js
- * Wrappers around <img>, <video>, <canvas>, etc.
- */
-
-
-
-
-
-
- /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
-
- /** Internal token for protected constructors */
- const PRIVATE_TOKEN = Symbol();
-
- /**
- * An abstract media source: a wrapper around native
- * elements such as: HTMLImageElement, HTMLVideoElement,
- * and so on
- * @abstract
- */
- class SpeedyMediaSource {
- /**
- * @protected Constructor
- * @param {symbol} token
- */
- constructor(token) {
- // the constructor is not public
- if (token !== PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
-
- /** @type {SpeedyMediaSourceNativeElement} underlying media object */
- this._data = null;
- }
-
- /**
- * Load a media source
- * @param {SpeedyMediaSourceNativeElement} wrappedObject
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(wrappedObject) {
- if (wrappedObject instanceof HTMLImageElement) return SpeedyImageMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLVideoElement) return SpeedyVideoMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLCanvasElement) return SpeedyCanvasMediaSource.load(wrappedObject);else if (typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas) return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageBitmap) return SpeedyBitmapMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageData) return SpeedyDataMediaSource.load(wrappedObject);else throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
- }
-
- /**
- * The underlying wrapped object
- * @returns {SpeedyMediaSourceNativeElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * Is the underlying media loaded?
- * @returns {boolean}
- */
- isLoaded() {
- return this._data !== null;
- }
-
- /**
- * The type of the underlying media source
- * @abstract
- * @returns {MediaType}
- */
- get type() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Media width, in pixels
- * @abstract
- * @returns {number}
- */
- get width() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Media height, in pixels
- * @abstract
- * @returns {number}
- */
- get height() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Clone this media source
- * @abstract
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Release resources associated with this object
- * @returns {null}
- */
- release() {
- return this._data = null;
- }
-
- /**
- * Load the underlying media
- * @abstract
- * @param {SpeedyMediaSourceNativeElement} element
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(element) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Wait for an event to be triggered in an element
- * @param {Element} element
- * @param {string} eventName
- * @param {number} [timeout] in ms
- * @returns {SpeedyPromise<Element>}
- */
- static _waitUntil(element, eventName, timeout = 30000) {
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
- const timer = setTimeout(() => {
- clear();
- reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
- }, timeout);
- function clear() {
- clearTimeout(timer);
- element.removeEventListener('error', handleError, false);
- element.removeEventListener(eventName, handleSuccess, false);
- }
- function handleError() {
- const hasError = element.error !== null && typeof element.error === 'object';
- const error = hasError ? element.error : {
- code: -1,
- message: ''
- };
- const info = `${error.message} (error code ${error.code})`;
- clear();
- reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
- }
- function handleSuccess() {
- clear();
- resolve(element);
- }
- element.addEventListener('error', handleError, false);
- element.addEventListener(eventName, handleSuccess, false);
- });
- }
- }
-
- /**
- * Image media source:
- * a wrapper around HTMLImageElement
- */
- class SpeedyImageMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLImageElement} image element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLImageElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Image;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.naturalWidth : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.naturalHeight : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newNode = /** @type {HTMLImageElement} */this._data.cloneNode(true);
- return SpeedyImageMediaSource.load(newNode);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLImageElement} image
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(image) {
- if (this.isLoaded()) this.release();
- if (image.complete && image.naturalWidth !== 0) {
- // already loaded?
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = image;
- resolve(this);
- });
- } else {
- return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
- this._data = image;
- return this;
- });
- }
- }
-
- /**
- * Load the underlying media
- * @param {HTMLImageElement} image
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(image) {
- return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
- }
- }
-
- /**
- * Video media source:
- * a wrapper around HTMLVideoElement
- */
- class SpeedyVideoMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLVideoElement} video element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLVideoElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Video;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- // Warning: videoWidth & videoHeight may change at any time !!!
- // so you can't cache these dimensions
- return this._data ? this._data.videoWidth : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.videoHeight : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newNode = /** @type {HTMLVideoElement} */this._data.cloneNode(true);
- return SpeedyVideoMediaSource.load(newNode);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(video) {
- if (this.isLoaded()) this.release();
- utils/* Utils */.A.log('Loading a video...');
- video.load();
- return SpeedyVideoMediaSource._waitUntilPlayable(video).then(() => {
- return SpeedyVideoMediaSource._handleAutoplay(video).then(() => {
- this._data = video;
- return this;
- });
- });
- }
-
- /**
- * Load the underlying media
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(video) {
- return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
- }
-
- /**
- * Handle browser quirks concerning autoplay
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<void>} gets rejected if we can't autoplay
- */
- static _handleAutoplay(video) {
- // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
- // Chrome policy: https://developer.chrome.com/blog/autoplay/
- // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
-
- // videos marked as autoplay may not play if not visible on-screen
- // videos marked as autoplay should be muted
- if (video.autoplay /*&& video.muted*/) {
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- const promise = video.play();
-
- // handle older browsers
- if (promise === undefined) {
- resolve();
- return;
- }
-
- // wrap promise
- promise.then(resolve, reject);
- });
- }
-
- // nothing to do
- return speedy_promise/* SpeedyPromise */.i.resolve();
- }
-
- /**
- * Wait for the input video to be playable
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<HTMLVideoElement>} resolves to the input video when it can be played
- */
- static _waitUntilPlayable(video) {
- const TIMEOUT = 30000,
- INTERVAL = 500;
- if (video.readyState >= 3) return speedy_promise/* SpeedyPromise */.i.resolve(video);
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- let ms = 0,
- t = setInterval(() => {
- //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
- if (video.readyState >= 3) {
- clearInterval(t);
- resolve(video);
- } else if ((ms += INTERVAL) >= TIMEOUT) {
- clearInterval(t);
- reject(new utils_errors/* TimeoutError */.MU('The video took too long to load'));
- }
- }, INTERVAL);
- });
- }
- }
-
- /**
- * Canvas media source:
- * a wrapper around HTMLCanvasElement
- */
- class SpeedyCanvasMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLCanvasElement} canvas element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLCanvasElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Canvas;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
- const newContext = newCanvas.getContext('2d');
- newContext.drawImage(this._data, 0, 0);
- return SpeedyCanvasMediaSource.load(newCanvas);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLCanvasElement} canvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(canvas) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = canvas;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {HTMLCanvasElement} canvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(canvas) {
- return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
- }
- }
-
- /**
- * OffscreenCanvas media source:
- * a wrapper around OffscreenCanvas
- */
- class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {OffscreenCanvas} offscreen canvas element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {OffscreenCanvas}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.OffscreenCanvas;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newCanvas = new OffscreenCanvas(this.width, this.height);
- const newContext = newCanvas.getContext('2d');
- newContext.drawImage(this._data, 0, 0);
- return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
- }
-
- /**
- * Load the underlying media
- * @param {OffscreenCanvas} offscreenCanvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(offscreenCanvas) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = offscreenCanvas;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {OffscreenCanvas} offscreenCanvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(offscreenCanvas) {
- return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
- }
- }
-
- /**
- * Bitmap media source:
- * a wrapper around ImageBitmap
- */
- class SpeedyBitmapMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {ImageBitmap} image bitmap */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {ImageBitmap}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Bitmap;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- createImageBitmap(this._data).then(newBitmap => {
- const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
- newSource._load(newBitmap).then(resolve, reject);
- }, reject);
- });
- }
-
- /**
- * Release resources associated with this object
- * @returns {null}
- */
- release() {
- if (this._data != null) this._data.close();
- return super.release();
- }
-
- /**
- * Load the underlying media
- * @param {ImageBitmap} bitmap
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(bitmap) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = bitmap;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {ImageBitmap} bitmap
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(bitmap) {
- return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
- }
- }
-
- /**
- * Data media source:
- * a wrapper around ImageData
- */
- class SpeedyDataMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {ImageData} image data */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {ImageData}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Data;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const imageDataCopy = new ImageData(new Uint8ClampedArray(this._data.data), this._data.width, this._data.height);
- return SpeedyDataMediaSource.load(imageDataCopy);
- }
-
- /**
- * Load the underlying media
- * @param {ImageData} imageData
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(imageData) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = imageData;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {ImageData} imageData
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(imageData) {
- return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
- }
- }
- // EXTERNAL MODULE: ./src/utils/observable.js
- var observable = __nested_webpack_require_314174__(3211);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-gpu.js
- * GPU-accelerated routines for Computer Vision
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * GPU-accelerated routines for Computer Vision
- */
- class SpeedyGPU extends observable/* Observable */.c {
- /**
- * Constructor
- */
- constructor() {
- super();
-
- /** @type {SpeedyGL} cached reference */
- this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
-
- /** @type {SpeedyProgramCenter} GPU-based programs */
- this._programs = new SpeedyProgramCenter(this);
-
- /** @type {SpeedyTexturePool} texture pool */
- this._texturePool = new SpeedyTexturePool(this);
-
- // recreate the state if necessary
- this._speedyGL.subscribe(this._reset, this);
- }
-
- /**
- * Access point to all GPU programs
- * @returns {SpeedyProgramCenter}
- */
- get programs() {
- return this._programs;
- }
-
- /**
- * The WebGL Rendering Context
- * Be careful not to cache this, as the WebGL Rendering Context may be lost!
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._speedyGL.gl;
- }
-
- /**
- * Internal canvas
- * @returns {HTMLCanvasElement}
- */
- get canvas() {
- return this._speedyGL.canvas;
- }
-
- /**
- * Texture pool
- * @returns {SpeedyTexturePool}
- */
- get texturePool() {
- return this._texturePool;
- }
-
- /**
- * Renders a texture to the canvas
- * @param {SpeedyTexture} texture
- * @returns {HTMLCanvasElement} returned for convenience
- */
- renderToCanvas(texture) {
- const width = texture.width;
- const height = texture.height;
- const canvas = this.canvas;
-
- // do we need to resize the canvas?
- if (width > canvas.width || height > canvas.height) {
- utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
- canvas.width = width;
- canvas.height = height;
- }
-
- // render
- this.programs.utils.renderToCanvas.outputs(width, height, null);
- this.programs.utils.renderToCanvas(texture);
-
- // done!
- return canvas;
- }
-
- /**
- * Upload an image to the GPU
- * @param {SpeedyMediaSource} source
- * @param {SpeedyTexture} outputTexture
- * @returns {SpeedyTexture} outputTexture
- */
- upload(source, outputTexture) {
- return outputTexture.upload(source.data, source.width, source.height);
- }
-
- /**
- * Releases resources
- * @returns {null}
- */
- release() {
- utils/* Utils */.A.assert(!this.isReleased());
-
- // release internal components
- this._programs = this._programs.release();
- this._texturePool = this._texturePool.release();
-
- // unsubscribe
- this._speedyGL.unsubscribe(this._reset);
- return null;
- }
-
- /**
- * Has this SpeedyGPU been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._programs == null;
- }
-
- /**
- * Lose & restore the WebGL context (useful for testing purposes)
- * @return {SpeedyPromise<void>} resolves as soon as the context is restored
- */
- loseAndRestoreWebGLContext() {
- return this._speedyGL.loseAndRestoreContext().then(() => void 0);
- }
-
- /**
- * Reset the internal state
- * (called on context reset)
- */
- _reset() {
- if (this.isReleased()) return;
- this._programs = new SpeedyProgramCenter(this);
- this._texturePool = new SpeedyTexturePool(this);
- this._notify();
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-size.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-size.js
- * Size of a rectangle
- */
-
- /**
- * Size of a rectangle
- */
- class SpeedySize {
- /**
- * Constructor
- * @param {number} width non-negative number
- * @param {number} height non-negative number
- */
- constructor(width, height) {
- /** @type {number} width */
- this._width = Math.max(0, +width);
-
- /** @type {number} height */
- this._height = Math.max(0, +height);
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * Width
- * @returns {number}
- */
- get width() {
- return this._width;
- }
-
- /**
- * Width
- * @param {number} value
- */
- set width(value) {
- this._width = Math.max(0, +value);
- }
-
- /**
- * Height
- * @returns {number}
- */
- get height() {
- return this._height;
- }
-
- /**
- * Height
- * @param {number} value
- */
- set height(value) {
- this._height = Math.max(0, +value);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedySize(${this.width}, ${this.height})`;
- }
-
- /**
- * Is this size equal to anotherSize?
- * @param {SpeedySize} anotherSize
- * @returns {boolean}
- */
- equals(anotherSize) {
- return this.width === anotherSize.width && this.height === anotherSize.height;
- }
-
- /**
- * The area of the rectangle
- * @returns {number}
- */
- area() {
- return this.width * this.height;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-media.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-media.js
- * SpeedyMedia implementation
- */
-
-
-
-
-
-
-
-
-
-
- /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
-
- /**
- * @typedef {object} SpeedyMediaOptions
- * @property {ImageFormat} [format] default is RGBA
- */
-
- /** A helper used to keep the constructor of SpeedyMedia private */
- const speedy_media_PRIVATE_TOKEN = Symbol();
-
- /**
- * SpeedyMedia encapsulates a media element
- * (e.g., image, video, canvas)
- */
- class SpeedyMedia {
- /**
- * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
- * @param {symbol} token
- * @param {SpeedyMediaSource} source
- * @param {SpeedyMediaOptions} [options] options object
- */
- constructor(token, source, options = {}) {
- // private constructor
- if (token !== speedy_media_PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
-
- /** @type {SpeedyMediaSource} media source */
- this._source = source;
-
- /** @type {ImageFormat} format */
- this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
-
- /** @type {SpeedyMediaOptions} options */
- this._options = Object.freeze(Object.assign(Object.assign({}, options), {}, {
- format: this._format
- }));
-
- // validate
- if (!source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);else if (this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
- }
-
- /**
- * Load a media source
- * Will wait until the HTML media source is loaded
- * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
- * @param {SpeedyMediaOptions} [options] options object
- * @param {boolean} [log] show log message?
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static load(mediaSource, options = {}, log = true) {
- return SpeedyMediaSource.load(mediaSource).then(source => {
- utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
-
- // FIXME user could pass an invalid format in options if ImageFormat is made public
- const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
-
- // show log message
- if (log) utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
-
- // done!
- return media;
- });
- }
-
- /**
- * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
- * @returns {SpeedyMediaSourceNativeElement} the media element
- */
- get source() {
- return this._source ? this._source.data : null;
- }
-
- /**
- * The type of the media attached to this SpeedyMedia object
- * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
- */
- get type() {
- if (this.isReleased()) return 'unknown';
- switch (this._source.type) {
- case types/* MediaType */.zu.Image:
- return 'image';
- case types/* MediaType */.zu.Video:
- return 'video';
- case types/* MediaType */.zu.Canvas:
- return 'canvas';
- case types/* MediaType */.zu.OffscreenCanvas:
- return 'offscreen-canvas';
- case types/* MediaType */.zu.Bitmap:
- return 'bitmap';
- case types/* MediaType */.zu.Data:
- return 'data';
- default:
- // this shouldn't happen
- return 'unknown';
- }
- }
-
- /**
- * Gets the width of the media
- * @returns {number} media width
- */
- get width() {
- return this._source ? this._source.width : 0;
- }
-
- /**
- * Gets the height of the media
- * @returns {number} media height
- */
- get height() {
- return this._source ? this._source.height : 0;
- }
-
- /**
- * The size of this media, in pixels
- * @returns {SpeedySize}
- */
- get size() {
- return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
- }
-
- /**
- * Returns a read-only object featuring advanced options
- * related to this SpeedyMedia object
- * @returns {SpeedyMediaOptions}
- */
- get options() {
- return this._options;
- }
-
- /**
- * Releases resources associated with this media
- * @returns {null}
- */
- release() {
- if (!this.isReleased()) {
- utils/* Utils */.A.log('Releasing SpeedyMedia object...');
- this._source = this._source.release();
- }
- return null;
- }
-
- /**
- * Has this media been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._source == null;
- }
-
- /**
- * Clones the SpeedyMedia object
- * @returns {SpeedyPromise<SpeedyMedia>} a clone object
- */
- clone() {
- // has the media been released?
- if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
-
- // clone the object
- const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
-
- // done!
- return speedy_promise/* SpeedyPromise */.i.resolve(clone);
- }
-
- /**
- * Converts the media to an ImageBitmap
- * @returns {SpeedyPromise<ImageBitmap>}
- */
- toBitmap() {
- if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');else if (!this._source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');else if (this._source.type == types/* MediaType */.zu.Bitmap) return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);else return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-platform.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-platform.js
- * Utilities to query information about the graphics driver
- */
-
-
-
-
- /**
- * Utilities to query information about the graphics driver. This information
- * may or may not be available, depending on the privacy settings of the web
- * browser. In addition, it may be more or less accurate in different browsers.
- */
- class SpeedyPlatform extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Renderer string of the graphics driver
- * @returns {string}
- */
- static get renderer() {
- return speedy_gl/* SpeedyGL */.c.instance.renderer;
- }
-
- /**
- * Vendor string of the graphics driver
- * @returns {string}
- */
- static get vendor() {
- return speedy_gl/* SpeedyGL */.c.instance.vendor;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-vector.js
- * Vectors
- */
-
- /**
- * 2D vector of floating-point numbers
- */
- class SpeedyVector2 {
- /**
- * Create a 2D vector
- * @param {number} x
- * @param {number} y
- */
- constructor(x, y) {
- /** @type {number} x coordinate */
- this._x = +x;
-
- /** @type {number} y coordinate */
- this._y = +y;
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * x-coordinate
- * @returns {number}
- */
- get x() {
- return this._x;
- }
-
- /**
- * x-coordinate
- * @param {number} value
- */
- set x(value) {
- this._x = +value;
- }
-
- /**
- * y-coordinate
- * @returns {number}
- */
- get y() {
- return this._y;
- }
-
- /**
- * y-coordinate
- * @param {number} value
- */
- set y(value) {
- this._y = +value;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
- }
-
- /**
- * Is this vector equal to v?
- * @param {SpeedyVector2} v
- * @returns {boolean}
- */
- equals(v) {
- return this.x === v.x && this.y === v.y;
- }
-
- /**
- * Dot product between this vector and another vector
- * @param {SpeedyVector2} v another vector
- * @returns {number}
- */
- dot(v) {
- return this.x * v.x + this.y * v.y;
- }
-
- /**
- * The distance between this vector and another vector
- * @param {SpeedyVector2} v another vector
- * @returns {number}
- */
- distanceTo(v) {
- const dx = this.x - v.x;
- const dy = this.y - v.y;
- return Math.sqrt(dx * dx + dy * dy);
- }
-
- /**
- * Euclidean norm
- * @returns {number}
- */
- length() {
- return Math.sqrt(this.x * this.x + this.y * this.y);
- }
-
- /**
- * Returns a normalized version of this vector
- * @returns {SpeedyVector2}
- */
- normalized() {
- const len = this.length();
- if (len > 0.0) return new SpeedyVector2(this.x / len, this.y / len);else return new SpeedyVector2(0.0, 0.0);
- }
-
- /**
- * Returns a copy of this vector translated by offset
- * @param {SpeedyVector2} offset
- * @returns {SpeedyVector2}
- */
- plus(offset) {
- return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
- }
-
- /**
- * Returns a copy of this vector translated by -offset
- * @param {SpeedyVector2} offset
- * @returns {SpeedyVector2}
- */
- minus(offset) {
- return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
- }
-
- /**
- * Returns a copy of this vector scaled by a scalar
- * @param {number} scalar
- * @returns {SpeedyVector2}
- */
- times(scalar) {
- return new SpeedyVector2(this.x * scalar, this.y * scalar);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-point.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-point.js
- * Points in space
- */
-
-
-
- /**
- * 2D point
- */
- class SpeedyPoint2 {
- /**
- * Create a 2D point
- * @param {number} x
- * @param {number} y
- */
- constructor(x, y) {
- /** @type {number} x coordinate */
- this._x = +x;
-
- /** @type {number} y coordinate */
- this._y = +y;
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * x-coordinate
- * @returns {number}
- */
- get x() {
- return this._x;
- }
-
- /**
- * x-coordinate
- * @param {number} value
- */
- set x(value) {
- this._x = +value;
- }
-
- /**
- * y-coordinate
- * @returns {number}
- */
- get y() {
- return this._y;
- }
-
- /**
- * y-coordinate
- * @param {number} value
- */
- set y(value) {
- this._y = +value;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
- }
-
- /**
- * Add a vector to this point
- * @param {SpeedyVector2} v
- * @returns {SpeedyPoint2}
- */
- plus(v) {
- return new SpeedyPoint2(this.x + v.x, this.y + v.y);
- }
-
- /**
- * Subtracts a point p from this point
- * @param {SpeedyPoint2} p
- * @returns {SpeedyVector2}
- */
- minus(p) {
- return new SpeedyVector2(this.x - p.x, this.y - p.y);
- }
-
- /**
- * Is this point equal to p?
- * @param {SpeedyPoint2} p
- * @returns {boolean}
- */
- equals(p) {
- return this.x === p.x && this.y === p.y;
- }
- }
- // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
- var speedy_matrix_expr = __nested_webpack_require_314174__(6306);
- // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
- var speedy_matrix_wasm = __nested_webpack_require_314174__(6465);
- // EXTERNAL MODULE: ./src/core/speedy-matrix.js
- var speedy_matrix = __nested_webpack_require_314174__(4188);
- ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-factory.js
- * A factory of matrices
- */
-
-
-
-
-
-
-
-
- /**
- * Matrix routines
- */
- class SpeedyMatrixFactory extends Function {
- /**
- * Constructor
- */
- constructor() {
- // This factory can be invoked as a function
- super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
- return this.bind(this);
- }
-
- /**
- * @private
- *
- * Create a new matrix filled with the specified size and entries
- * @param {number} rows
- * @param {number} [columns]
- * @param {number[]} [entries] in column-major format
- * @returns {SpeedyMatrix}
- */
- _create(rows, columns = rows, entries = []) {
- return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
- }
-
- /**
- * @private
- *
- * Evaluate an expression synchronously and store the result in a new matrix
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix}
- */
- _from(expr) {
- return speedy_matrix.SpeedyMatrix.From(expr);
- }
-
- /**
- * Create a new matrix filled with zeros with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Zeros(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
- }
-
- /**
- * Create a new matrix filled with ones with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Ones(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
- }
-
- /**
- * Create an identity matrix with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Eye(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
- }
-
- /**
- * Returns a promise that resolves immediately if the WebAssembly routines
- * are ready to be used, or as soon as they do become ready
- * @returns {SpeedyPromise<void>}
- */
- ready() {
- return speedy_matrix.SpeedyMatrix.ready();
- }
-
- /**
- * QR decomposition
- * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
- * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
- * @param {SpeedyMatrix} mat is m x n, input
- * @param {object} [options]
- * @param {'reduced'|'full'} [options.mode]
- * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
- */
- qr(Q, R, mat, {
- mode = 'reduced'
- } = {}) {
- const A = mat,
- m = mat.rows,
- n = mat.columns;
-
- // validate shapes & mode
- if (mode == 'reduced') {
- if (Q.rows != m || Q.columns != n || R.rows != n || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
- } else if (mode == 'full') {
- if (Q.rows != m || Q.columns != m || R.rows != m || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
- const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
- const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
-
- // run the WASM routine
- if (mode == 'reduced') wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);else wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
-
- // done!
- return [Q, R];
- });
- }
-
- /**
- * Solve a possibly overdetermined system of linear
- * equations Ax = b for x using ordinary least squares
- * @param {SpeedyMatrix} solution n x 1, output
- * @param {SpeedyMatrix} A m x n, m >= n, input
- * @param {SpeedyMatrix} b m x 1, output
- * @param {object} [options]
- * @param {'qr'} [options.method] method of resolution
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
- */
- ols(solution, A, b, {
- method = 'qr'
- } = {}) {
- const m = A.rows,
- n = A.columns;
- const x = solution;
-
- // validate shapes
- if (m < n || n == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
- const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
- const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
-
- // run the WASM routine
- switch (method) {
- case 'qr':
- wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
- }
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
-
- // done!
- return solution;
- });
- }
-
- /**
- * Solve a system of linear equations Ax = b for x
- * @param {SpeedyMatrix} solution m x 1, output
- * @param {SpeedyMatrix} A m x m, input
- * @param {SpeedyMatrix} b m x 1, output
- * @param {object} [options]
- * @param {'qr'} [options.method] method of resolution
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
- */
- solve(solution, A, b, {
- method = 'qr'
- } = {}) {
- const m = A.rows,
- n = A.columns;
- const x = solution;
-
- // validate shapes
- if (m != n) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // select method
- switch (method) {
- case 'qr':
- return this.ols(x, A, b, {
- method
- });
-
- /*case 'lu':
- break;*/
-
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
- }
- });
- }
-
- /**
- * Compute a perspective transformation using 4 correspondences of points
- * @param {SpeedyMatrix} homography 3x3 output - homography matrix
- * @param {SpeedyMatrix} src 2x4 input points - source coordinates
- * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- perspective(homography, src, dest) {
- // validate shapes
- if (src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
-
- // done!
- return homography;
- });
- }
-
- /**
- * Compute a perspective transformation using n >= 4 correspondences of points
- * @param {SpeedyMatrix} homography 3x3 output - homography matrix
- * @param {SpeedyMatrix} src 2 x n input points - source coordinates
- * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
- * @param {object} [options]
- * @param {'default'|'pransac'} [options.method] method of computation
- * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
- * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
- * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
- * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- findHomography(homography, src, dest, {
- method = 'default',
- mask = null,
- reprojectionError = 3,
- numberOfHypotheses = 512,
- bundleSize = 128
- } = {}) {
- // validate shapes
- if (src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
- const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- switch (method) {
- case 'pransac':
- utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
- wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
- break;
- case 'default':
- case 'dlt':
- // obsolete
- wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
- }
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
-
- // deallocate matrices
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
-
- // done!
- return homography;
- });
- }
-
- /**
- * Apply a perspective transformation to a set of 2D points
- * @param {SpeedyMatrix} dest 2 x n output matrix
- * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
- * @param {SpeedyMatrix} transform 3x3 homography matrix
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
- */
- applyPerspectiveTransform(dest, src, transform) {
- // validate shapes
- if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 3 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
-
- // run the WASM routine
- wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return dest;
- });
- }
-
- /**
- * Compute an affine transform using 3 correspondences of points
- * @param {SpeedyMatrix} transform 2x3 output - affine transform
- * @param {SpeedyMatrix} src 2x3 input points - source coordinates
- * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- affine(transform, src, dest) {
- // validate shapes
- if (src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return transform;
- });
- }
-
- /**
- * Compute an affine transformation using n >= 3 correspondences of points
- * @param {SpeedyMatrix} transform 2x3 output - affine transform
- * @param {SpeedyMatrix} src 2 x n input points - source coordinates
- * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
- * @param {object} [options]
- * @param {'default'|'pransac'} [options.method] method of computation
- * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
- * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
- * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
- * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
- */
- findAffineTransform(transform, src, dest, {
- method = 'default',
- mask = null,
- reprojectionError = 3,
- numberOfHypotheses = 512,
- bundleSize = 128
- } = {}) {
- // validate shapes
- if (src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
- const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- switch (method) {
- case 'pransac':
- utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
- wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
- break;
- case 'default':
- wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
- }
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
-
- // deallocate matrices
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return transform;
- });
- }
-
- /**
- * Apply an affine transformation to a set of 2D points
- * @param {SpeedyMatrix} dest 2 x n output matrix
- * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
- * @param {SpeedyMatrix} transform 2x3 affine transform
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
- */
- applyAffineTransform(dest, src, transform) {
- // validate shapes
- if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
-
- // run the WASM routine
- wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return dest;
- });
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-message.js
- * A message that is shared between nodes of a pipeline
- */
-
-
-
-
-
-
-
-
- /**
- * Types of messages
- * @enum {Symbol}
- */
- const SpeedyPipelineMessageType = Object.freeze({
- Nothing: Symbol('Nothing'),
- Image: Symbol('Image'),
- Keypoints: Symbol('Keypoints'),
- Vector2: Symbol('Vector2'),
- LSHTables: Symbol('LSHTables'),
- KeypointMatches: Symbol('KeypointMatches')
- });
-
- /**
- * Diagnostic data
- * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
- */
-
- /**
- * A message that is shared between nodes of a pipeline
- * @abstract
- */
- class SpeedyPipelineMessage {
- /**
- * Constructor
- * @param {SpeedyPipelineMessageType} type message type
- */
- constructor(type) {
- /** @type {SpeedyPipelineMessageType} message type */
- this._type = type;
- }
-
- /**
- * Message type
- * @returns {SpeedyPipelineMessageType}
- */
- get type() {
- return this._type;
- }
-
- /**
- * Checks if the type of this message is equal to parameter type
- * @param {SpeedyPipelineMessageType} type
- * @returns {boolean}
- */
- hasType(type) {
- return this._type === type;
- }
-
- /**
- * Is this an empty message?
- * @returns {boolean}
- */
- isEmpty() {
- return this.hasType(SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this.type);
- return `message of type ${type}`;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Set parameters
- * @abstract
- * @param {...any} args
- * @returns {SpeedyPipelineMessage} this message
- */
- set(...args) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Create a message of the specified type
- * @param {SpeedyPipelineMessageType} type
- * @returns {SpeedyPipelineMessage}
- */
- static create(type) {
- return createMessage(type);
- }
- }
-
- /**
- * An empty message carrying nothing
- */
- class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Set parameters
- * @returns {SpeedyPipelineMessage} this message
- */
- set() {
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name
- };
- }
- }
-
- /**
- * A message transporting an image
- */
- class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Image);
-
- /** @type {SpeedyDrawableTexture} the image we carry */
- this._image = null;
-
- /** @type {ImageFormat} image format */
- this._format = types/* ImageFormat */.f5.RGBA;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} image the image we carry
- * @param {ImageFormat} [format] image format
- * @returns {SpeedyPipelineMessage} this message
- */
- set(image, format = types/* ImageFormat */.f5.RGBA) {
- // set parameters
- this._image = image;
- this._format = format;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- const formatName = Object.keys(types/* ImageFormat */.f5).find(format => types/* ImageFormat */.f5[format] === this.format);
- return {
- type: this.constructor.name,
- format: String(formatName),
- imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
- image: this.image ? '<image data>' /* possibly MBs of data */ : '',
- hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
- };
- }
-
- /**
- * The image we carry
- * @returns {SpeedyDrawableTexture}
- */
- get image() {
- return this._image;
- }
-
- /**
- * Image format
- * @returns {ImageFormat}
- */
- get format() {
- return this._format;
- }
- }
-
- /**
- * A message transporting keypoints
- */
- class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Keypoints);
-
- /** @type {SpeedyDrawableTexture} encoded keypoints */
- this._encodedKeypoints = null;
-
- /** @type {number} descriptor size in bytes */
- this._descriptorSize = 0;
-
- /** @type {number} extra size in bytes */
- this._extraSize = 0;
-
- /** @type {number} encoder length */
- this._encoderLength = 1;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderLength positive integer
- * @returns {SpeedyPipelineMessage} this message
- */
- set(encodedKeypoints, descriptorSize, extraSize, encoderLength) {
- // set parameters
- this._encodedKeypoints = encodedKeypoints;
- this._descriptorSize = descriptorSize | 0;
- this._extraSize = extraSize | 0;
- this._encoderLength = encoderLength | 0;
-
- // validate
- utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
- utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
- utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- descriptorSize: this.descriptorSize,
- extraSize: this.extraSize,
- encoderLength: this.encoderLength,
- encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
- encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * Encoded keypoints
- * @returns {SpeedyDrawableTexture}
- */
- get encodedKeypoints() {
- return this._encodedKeypoints;
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- return this._descriptorSize;
- }
-
- /**
- * Extra size, in bytes
- * @returns {number}
- */
- get extraSize() {
- return this._extraSize;
- }
-
- /**
- * Encoder length
- * @returns {number}
- */
- get encoderLength() {
- return this._encoderLength;
- }
- }
-
- /*
- * A message transporting a set of 2D vectors
- */
- class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Vector2);
-
- /** @type {SpeedyDrawableTexture} the set of vectors */
- this._vectors = null;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} vectors the set of vectors
- * @returns {SpeedyPipelineMessage} this message
- */
- set(vectors) {
- // set parameters
- this._vectors = vectors;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
- vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * The set of vectors
- * @returns {SpeedyDrawableTexture}
- */
- get vectors() {
- return this._vectors;
- }
- }
-
- /**
- * A message transporting LSH tables
- */
- class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.LSHTables);
-
- /** @type {SpeedyLSH} LSH data structure */
- this._lsh = null;
- }
-
- /**
- * Set parameters
- * @param {SpeedyLSH} lsh
- * @returns {SpeedyPipelineMessage} this message
- */
- set(lsh) {
- // set parameters
- this._lsh = lsh;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- lsh: '<LSH tables>'
- };
- }
-
- /**
- * LSH data structure
- * @returns {SpeedyLSH}
- */
- get lsh() {
- return this._lsh;
- }
- }
-
- /*
- * A message transporting a set of keypoint matches
- */
- class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.KeypointMatches);
-
- /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
- this._encodedMatches = null;
-
- /** @type {number} number of matches per keypoint */
- this._matchesPerKeypoint = 1;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} encodedMatches
- * @param {number} matchesPerKeypoint
- * @returns {SpeedyPipelineMessage} this message
- */
- set(encodedMatches, matchesPerKeypoint) {
- // set parameters
- this._encodedMatches = encodedMatches;
- this._matchesPerKeypoint = matchesPerKeypoint | 0;
-
- // validate
- utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- matchesPerKeypoint: this.matchesPerKeypoint,
- encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
- encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * The matches
- * @returns {SpeedyDrawableTexture}
- */
- get encodedMatches() {
- return this._encodedMatches;
- }
-
- /**
- * Number of matches per keypoint
- * @returns {number}
- */
- get matchesPerKeypoint() {
- return this._matchesPerKeypoint;
- }
- }
-
- //
- // Utilities
- //
-
- /** Map message type to message class */
- const MESSAGE_CLASS = Object.freeze({
- [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
- [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
- [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
- [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
- [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
- [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches
- });
-
- /**
- * Create a message of the specified type
- * @param {SpeedyPipelineMessageType} type
- * @returns {SpeedyPipelineMessage}
- */
- function createMessage(type) {
- //return Reflect.construct(MESSAGE_CLASS[type], []);
- return new MESSAGE_CLASS[
- // error TS2538: Type 'Symbol' cannot be used as an index type.
- // heck, what the hack...
- /** @type {any} */
- type]();
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-portspec.js
- * Specification (requirements) of a port of a node of a pipeline
- */
-
-
-
-
- /**
- * A message constraint is a message validation predicate
- * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
- */
-
- /**
- * A validation predicate that validates all messages
- * @type {SpeedyPipelineMessageConstraint}
- */
- const always = message => true;
-
- /**
- * Specification (requirements) of a port of a node of a pipeline
- */
- class SpeedyPipelinePortSpec {
- /**
- * Constructor
- * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
- * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
- */
- constructor(expectedMessageType, messageConstraint = always) {
- /** @type {SpeedyPipelineMessageType} expected message type */
- this._expectedMessageType = expectedMessageType;
-
- /** @type {SpeedyPipelineMessageConstraint} message validation function */
- this._isValidMessage = typeof messageConstraint === 'function' ? messageConstraint : always;
-
- // expect a valid type
- utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Checks if two specs have the same expected type
- * @param {SpeedyPipelinePortSpec} spec
- * @returns {boolean}
- */
- isCompatibleWith(spec) {
- return this._expectedMessageType == spec._expectedMessageType;
- }
-
- /**
- * Is the given message accepted by a port that abides by this specification?
- * @param {SpeedyPipelineMessage} message
- * @returns {boolean}
- */
- accepts(message) {
- return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this._expectedMessageType);
- return `Port expects ${type} satisfying ${this._isValidMessage}`;
- }
-
- /**
- * Expected message type
- * @returns {SpeedyPipelineMessageType}
- */
- get expectedMessageType() {
- return this._expectedMessageType;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-port.js
- * Port of a node of a pipeline
- */
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_INPUT_PORT_NAME = 'in';
- const DEFAULT_OUTPUT_PORT_NAME = 'out';
- const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
- const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
-
- /**
- * Diagnostic data
- * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
- */
-
- /**
- * Port of a node of a pipeline
- * @abstract
- */
- class SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- /** @type {string} the name of this port */
- this._name = String(name);
-
- /** @type {SpeedyPipelinePortSpec} the specification of this port */
- this._spec = spec;
-
- /** @type {SpeedyPipelineNode} the node to which this port belongs */
- this._node = node;
-
- /** @type {SpeedyPipelineMessage} the message located in this port */
- this._message = EMPTY_MESSAGE;
-
- // check if we've got an acceptable port name
- utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
- }
-
- /**
- * The name of this port
- * @returns {string}
- */
- get name() {
- return this._name;
- }
-
- /**
- * The node to which this port belongs
- * @returns {SpeedyPipelineNode}
- */
- get node() {
- return this._node;
- }
-
- /**
- * Connect this port to another
- * @abstract
- * @param {SpeedyPipelinePort} port
- */
- connectTo(port) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this an input port?
- * @abstract
- * @returns {boolean}
- */
- isInputPort() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this an output port?
- * @returns {boolean}
- */
- isOutputPort() {
- return !this.isInputPort();
- }
-
- /**
- * Clear the message stored in this port
- */
- clearMessage() {
- this._message = EMPTY_MESSAGE;
- }
-
- /**
- * Is there a valid message located in this port?
- * @returns {boolean}
- */
- hasMessage() {
- return !this._message.isEmpty();
- }
-
- /**
- * Read the message that is in this port
- * @returns {SpeedyPipelineMessage}
- */
- read() {
- if (this._message.isEmpty()) throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
- return this._message;
- }
-
- /**
- * Write a message to this port
- * @param {SpeedyPipelineMessage} message
- */
- write(message) {
- throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
- }
-
- /**
- * Inspect this port for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
- */
- inspect(gpu) {
- return this._message.inspect(gpu);
- }
-
- /**
- * Default port name
- * @abstract
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
- }
-
- /**
- * Output port
- */
- class SpeedyPipelineOutputPort extends SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- super(name, spec, node);
-
- /** @type {SpeedyPipelineMessage} cached message */
- this._cachedMessage = null;
- }
-
- /**
- * Connect this port to another
- * @param {SpeedyPipelineInputPort} port
- */
- connectTo(port) {
- if (!port.isInputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
- port.connectTo(this);
- }
-
- /**
- * Is this an input port?
- * @returns {boolean}
- */
- isInputPort() {
- return false;
- }
-
- /**
- * Write a message to this port
- * @param {SpeedyPipelineMessage} message
- */
- write(message) {
- if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
- this._message = message;
- }
-
- /**
- * Write a message to this port using a cached message object
- * @param {...any} args to be passed to SpeedyPipelineMessage.set()
- */
- swrite(...args) {
- if (this._cachedMessage == null) this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
- this.write(this._cachedMessage.set(...args));
- }
-
- /**
- * Default port name
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- return DEFAULT_OUTPUT_PORT_NAME;
- }
- }
-
- /**
- * Input port
- */
- class SpeedyPipelineInputPort extends SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- super(name, spec, node);
-
- /** @type {SpeedyPipelineOutputPort|null} incoming link */
- this._incomingLink = null;
- }
-
- /**
- * Incoming link
- * @returns {SpeedyPipelineOutputPort|null}
- */
- get incomingLink() {
- return this._incomingLink;
- }
-
- /**
- * Connect this port to another
- * @param {SpeedyPipelineOutputPort} port
- */
- connectTo(port) {
- if (!port.isOutputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);else if (!this._spec.isCompatibleWith(port._spec)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
- this._incomingLink = port;
- }
-
- /**
- * Unlink this port
- */
- disconnect() {
- this._incomingLink = null;
- }
-
- /**
- * Is this an input port?
- * @returns {boolean}
- */
- isInputPort() {
- return true;
- }
-
- /**
- * Receive a message using the incoming link
- * @param {string} [nodeName]
- * @returns {SpeedyPipelineMessage}
- */
- pullMessage(nodeName = '') {
- const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
- if (this._incomingLink == null) throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
- const message = this._incomingLink.read();
- if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
- return this._message = message;
- }
-
- /**
- * Default port name
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- return DEFAULT_INPUT_PORT_NAME;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-portbuilder.js
- * Builder of a port of a node of a pipeline
- */
-
-
-
-
-
-
-
- /**
- * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
- */
-
- /**
- * Builder of a port of a node of a pipeline
- */
- class SpeedyPipelinePortBuilder {
- /**
- * Constructor
- * @param {typeof SpeedyPipelinePort} portClass input or output?
- * @param {string} portName
- */
- constructor(portClass, portName) {
- /** @type {typeof SpeedyPipelinePort} input or output? */
- this._class = portClass;
-
- /** @type {string} port name */
- this._name = String(portName);
-
- /** @type {SpeedyPipelineMessageType} accepted message type */
- this._type = SpeedyPipelineMessageType.Nothing;
-
- /** @type {SpeedyPipelineMessageConstraint} message validation function */
- this._messageConstraint = undefined;
- }
-
- /**
- * Declare that the new port expects a certain type of message
- * @param {SpeedyPipelineMessageType} type expected type
- * @returns {SpeedyPipelinePortBuilder} this builder
- */
- expects(type) {
- utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
- utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
- this._type = type;
- return this;
- }
-
- /**
- * Declare that the new port expects messages satisfying a constraint
- * @param {SpeedyPipelineMessageConstraint} constraint
- * @returns {SpeedyPipelinePortBuilder} this builder
- */
- satisfying(constraint) {
- utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
- utils/* Utils */.A.assert(this._messageConstraint === undefined);
- utils/* Utils */.A.assert(typeof constraint === 'function');
- this._messageConstraint = constraint;
- return this;
- }
-
- /**
- * Build a port
- * @param {SpeedyPipelineNode} node the node to which the new port will belong
- * @returns {SpeedyPipelinePort}
- */
- build(node) {
- const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
- return Reflect.construct(this._class, [this._name, spec, node]);
- }
- }
-
- /**
- * Creates a builder for an input port
- * @param {string} [portName]
- * @returns {SpeedyPipelinePortBuilder}
- */
- function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
- return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
- }
-
- /**
- * Creates a builder for an output port
- * @param {string} [portName]
- * @returns {SpeedyPipelinePortBuilder}
- */
- function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
- return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-node.js
- * Node of a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
- /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
-
- /** Generate a random name for a node */
- const generateRandomName = () => Math.random().toString(16).substr(2);
-
- /** Create an empty input port dictionary */
- const createInputPortDictionary = () => ( /** @type {InputPortDictionary} */Object.create(null));
-
- /** Create an empty output port dictionary */
- const createOutputPortDictionary = () => ( /** @type {OutputPortDictionary} */Object.create(null));
-
- /**
- * Map an array of input ports to an InputPortDictionary whose keys are their names
- * @param {SpeedyPipelineInputPort[]} ports
- * @returns {InputPortDictionary}
- */
- function InputPortDictionary(ports) {
- return ports.reduce((dict, port) => (dict[port.name] = port, dict), createInputPortDictionary());
- }
-
- /**
- * Map an array of output ports to an OutputPortDictionary whose keys are their names
- * @param {SpeedyPipelineOutputPort[]} ports
- * @returns {OutputPortDictionary}
- */
- function OutputPortDictionary(ports) {
- return ports.reduce((dict, port) => (dict[port.name] = port, dict), createOutputPortDictionary());
- }
-
- /** A flag used for debugging purposes */
- let _texView = false;
-
- /**
- * Node of a pipeline
- * @abstract
- */
- class SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = generateRandomName(), texCount = 0, portBuilders = []) {
- /** @type {string} the name of this node */
- this._name = String(name);
-
- /** @type {SpeedyDrawableTexture[]} work texture(s) */
- this._tex = new Array(texCount).fill(null);
-
- // build the ports
- const ports = portBuilders.map(builder => builder.build(this));
- const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ports.filter(port => port.isInputPort());
- const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ports.filter(port => port.isOutputPort());
-
- /** @type {InputPortDictionary} input ports */
- this._inputPorts = InputPortDictionary(inputPorts);
-
- /** @type {OutputPortDictionary} output ports */
- this._outputPorts = OutputPortDictionary(outputPorts);
-
- // validate
- if (this._name.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);else if (portBuilders.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
- }
-
- /**
- * The name of this node
- * @returns {string}
- */
- get name() {
- return this._name;
- }
-
- /**
- * Name and type of this node
- * @returns {string}
- */
- get fullName() {
- return `${this.constructor.name}[${this.name}]`;
- }
-
- /**
- * Find input port by name
- * @param {string} [portName]
- * @returns {SpeedyPipelineInputPort}
- */
- input(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
- if (portName in this._inputPorts) return this._inputPorts[portName];
- throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
- }
-
- /**
- * Find output port by name
- * @param {string} [portName]
- * @returns {SpeedyPipelineOutputPort}
- */
- output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
- if (portName in this._outputPorts) return this._outputPorts[portName];
- throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
- }
-
- /**
- * Get data from the input ports and execute
- * the task that this node is supposed to!
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- execute(gpu) {
- let portName;
-
- // clear output ports
- for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
-
- // let the input ports receive what is due
- for (portName in this._inputPorts) this._inputPorts[portName].pullMessage(this.fullName);
-
- // run the task
- const runTask = this._run(gpu);
- if (typeof runTask === 'undefined') return void this._finishExecution(gpu);else return runTask.then(() => this._finishExecution(gpu));
- }
-
- /**
- * Finish the execution of this node;
- * to be called after execute()
- * @param {SpeedyGPU} gpu
- */
- _finishExecution(gpu) {
- // ensure that no output ports are empty
- for (const portName in this._outputPorts) {
- utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
- }
-
- // diagnosticize the node / pipeline
- if (settings/* Settings */.w.logging === 'diagnostic') {
- utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
-
- // Inspecting the data has performance implications.
- // It is for diagnostic purposes only, not meant to be done in production!
-
- for (const portName in this._inputPorts) utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
- for (const portName in this._outputPorts) utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
- }
- }
-
- /**
- * Run the specific task of this node
- * @abstract
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- gpu.subscribe(this._allocateWorkTextures, this, gpu);
- this._allocateWorkTextures(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._deallocateWorkTextures(gpu);
- gpu.unsubscribe(this._allocateWorkTextures, this);
- }
-
- /**
- * Clear all ports
- */
- clearPorts() {
- let portName;
- for (portName in this._inputPorts) this._inputPorts[portName].clearMessage();
- for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
- }
-
- /**
- * Find all nodes that feed input to this node
- * @returns {SpeedyPipelineNode[]}
- */
- inputNodes() {
- const nodes = [];
- for (const portName in this._inputPorts) {
- const port = this._inputPorts[portName];
- if (port.incomingLink != null) nodes.push(port.incomingLink.node);
- }
- return nodes;
- }
-
- /**
- * Is this a source of the pipeline?
- * @returns {boolean}
- */
- isSource() {
- return false;
- }
-
- /**
- * Is this a sink of the pipeline?
- * @returns {boolean}
- */
- isSink() {
- return false;
-
- // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
- //return Object.keys(this._outputPorts).length == 0;
- }
-
- /**
- * Allocate work texture(s)
- * @param {SpeedyGPU} gpu
- */
- _allocateWorkTextures(gpu) {
- for (let j = 0; j < this._tex.length; j++) this._tex[j] = gpu.texturePool.allocate();
- }
-
- /**
- * Deallocate work texture(s)
- * @param {SpeedyGPU} gpu
- */
- _deallocateWorkTextures(gpu) {
- for (let j = this._tex.length - 1; j >= 0; j--) this._tex[j] = gpu.texturePool.free(this._tex[j]);
- }
-
- /**
- * Visually inspect a texture for debugging purposes
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} texture
- */
- _visualize(gpu, texture) {
- const canvas = gpu.renderToCanvas(texture);
- if (!_texView) {
- document.body.appendChild(canvas);
- _texView = true;
- }
- }
- }
-
- /**
- * Source node (a node with no input ports)
- * @abstract
- */
- class SpeedyPipelineSourceNode extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
- utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
- }
-
- /**
- * Is this a source of the pipeline?
- * @returns {boolean}
- */
- isSource() {
- return true;
- }
- }
-
- /**
- * Sink node (a node with no output ports)
- * @abstract
- */
- class SpeedyPipelineSinkNode extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
- utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
- }
-
- /**
- * Export data from this node to the user
- * @abstract
- * @returns {SpeedyPromise<any>}
- */
- export() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this a sink of the pipeline?
- * @returns {boolean}
- */
- isSink() {
- return true;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-match.js
- * A match between two keypoint descriptors
- */
-
-
-
- // Constants
- const MATCH_NOT_FOUND = -1;
-
- /**
- * A match between two keypoint descriptors
- */
- class SpeedyKeypointMatch {
- /**
- * Constructor
- * @param {number} index index of the stored keypoint, a non-negative integer
- * @param {number} distance a measure of the quality of the match, a non-negative number
- */
- constructor(index, distance) {
- const isValid = distance < globals.MATCH_MAX_DISTANCE;
-
- /** @type {number} index of the stored keypoint */
- this._index = isValid ? index | 0 : MATCH_NOT_FOUND;
-
- /** @type {number} a measure of the quality of the match */
- this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * The index of the stored keypoint
- * @returns {number}
- */
- get index() {
- return this._index;
- }
-
- /**
- * A measure of the quality of the match (lower values indicate better matches)
- * @returns {number}
- */
- get distance() {
- return this._distance;
- }
-
- /**
- * A string representation of the keypoint match
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypointMatch(${this.index},${this.distance})`;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-keypoint.js
- * Keypoint class
- */
-
-
-
-
-
-
- /**
- * Represents a keypoint
- */
- class SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null) {
- /** @type {SpeedyPoint2} keypoint position */
- this._position = new SpeedyPoint2(+x, +y);
-
- /** @type {number} level of detail */
- this._lod = +lod;
-
- /** @type {number} rotation in radians */
- this._rotation = +rotation;
-
- /** @type {number} a cornerness measure */
- this._score = +score;
-
- /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
- this._descriptor = descriptor;
- }
-
- /**
- * Converts this keypoint to a descriptive string
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypoint(${this.x},${this.y})`;
- }
-
- /**
- * The position of this keypoint
- * @returns {SpeedyPoint2}
- */
- get position() {
- return this._position;
- }
-
- /**
- * The x-position of this keypoint
- * @returns {number}
- */
- get x() {
- return this._position.x;
- }
-
- /**
- * The x-position of this keypoint
- * @param {number} value
- */
- set x(value) {
- this._position.x = +value;
- }
-
- /**
- * The y-position of this keypoint
- * @returns {number}
- */
- get y() {
- return this._position.y;
- }
-
- /**
- * The y-position of this keypoint
- * @param {number} value
- */
- set y(value) {
- this._position.y = +value;
- }
-
- /**
- * The pyramid level-of-detail from which this keypoint was extracted
- * @returns {number}
- */
- get lod() {
- return this._lod;
- }
-
- /**
- * Scale: 2^lod
- * @returns {number}
- */
- get scale() {
- return Math.pow(2, this._lod);
- }
-
- /**
- * The orientation of the keypoint, in radians
- * @returns {number} Angle in radians
- */
- get rotation() {
- return this._rotation;
- }
-
- /**
- * Score: a cornerness measure
- * @returns {number} Score
- */
- get score() {
- return this._score;
- }
-
- /**
- * Keypoint descriptor
- * @return {SpeedyKeypointDescriptor|null}
- */
- get descriptor() {
- return this._descriptor;
- }
- }
-
- /**
- * Represents a tracked keypoint
- */
- class SpeedyTrackedKeypoint extends SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- * @param {SpeedyVector2} [flow] flow vector
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0, 0)) {
- super(x, y, lod, rotation, score, descriptor);
-
- /** @type {SpeedyVector2} flow vector */
- this._flow = flow;
- }
-
- /**
- * Flow vector
- * @returns {SpeedyVector2}
- */
- get flow() {
- return this._flow;
- }
- }
-
- /**
- * Represents a matched keypoint
- */
- class SpeedyMatchedKeypoint extends SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = []) {
- super(x, y, lod, rotation, score, descriptor);
-
- /** @type {SpeedyKeypointMatch[]} keypoint matches */
- this._matches = matches;
- }
-
- /**
- * Keypoint matches
- * @returns {SpeedyKeypointMatch[]}
- */
- get matches() {
- return this._matches;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline.js
- * A pipeline is a network of nodes in which data flows to a sink
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A dictionary indexed by the names of the sink nodes
- * @typedef {Object<string,any>} SpeedyPipelineOutput
- */
-
- /** @type {SpeedyGPU} shared GPU programs & textures */
- let gpu = null;
-
- /** @type {number} gpu reference count */
- let referenceCount = 0;
-
- /**
- * A pipeline is a network of nodes in which data flows to a sink
- */
- class SpeedyPipeline {
- /**
- * Constructor
- */
- constructor() {
- /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
- this._nodes = [];
-
- /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
- this._sequence = [];
-
- /** @type {boolean} are we running the pipeline at this moment? */
- this._busy = false;
- }
-
- /**
- * Find a node by its name
- * @template T extends SpeedyPipelineNode
- * @param {string} name
- * @returns {T|null}
- */
- node(name) {
- for (let i = 0, n = this._nodes.length; i < n; i++) {
- if (this._nodes[i].name === name) return this._nodes[i];
- }
- return null;
- }
-
- /**
- * Initialize the pipeline
- * @param {...SpeedyPipelineNode} nodes
- * @returns {SpeedyPipeline} this pipeline
- */
- init(...nodes) {
- // validate
- if (this._nodes.length > 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);else if (nodes.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
-
- // create a GPU instance and increase the reference count
- if (0 == referenceCount++) {
- utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
- gpu = new SpeedyGPU();
- }
-
- // add nodes to the network
- for (let i = 0; i < nodes.length; i++) {
- const node = nodes[i];
- if (!this._nodes.includes(node)) this._nodes.push(node);
- }
-
- // generate the sequence of nodes
- this._sequence = SpeedyPipeline._tsort(this._nodes);
- SpeedyPipeline._validateSequence(this._sequence);
-
- // initialize nodes
- for (let i = 0; i < this._sequence.length; i++) this._sequence[i].init(gpu);
-
- // done!
- return this;
- }
-
- /**
- * Release the resources associated with this pipeline
- * @returns {null}
- */
- release() {
- if (this._nodes.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
-
- // release nodes
- for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].release(gpu);
- this._sequence.length = 0;
- this._nodes.length = 0;
-
- // decrease reference count and release GPU if necessary
- if (0 == --referenceCount) gpu = gpu.release();
-
- // done!
- return null;
- }
-
- /**
- * Run the pipeline
- * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
- */
- run() {
- utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
-
- // is the pipeline busy?
- if (this._busy) {
- // if so, we need to wait 'til it finishes
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- setTimeout(() => this.run().then(resolve, reject), 0);
- });
- } else {
- // the pipeline is now busy and won't accept concurrent tasks
- // (we allocate textures using a single pool)
- this._busy = true;
- }
-
- // find the sinks
- const sinks = /** @type {SpeedyPipelineSinkNode[]} */this._sequence.filter(node => node.isSink());
-
- // create output template
- const template = SpeedyPipeline._createOutputTemplate(sinks);
-
- // diagnostic log
- if (settings/* Settings */.w.logging === 'diagnostic') utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
-
- // run the pipeline
- return SpeedyPipeline._runSequence(this._sequence).then(() =>
- // export results
- speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
- // aggregate results by the names of the sinks
- results.reduce((obj, val, idx) => (obj[sinks[idx].name] = val, obj), template))).finally(() => {
- // clear all ports
- for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].clearPorts();
-
- // the pipeline is no longer busy
- this._busy = false;
-
- // diagnostic log
- if (settings/* Settings */.w.logging === 'diagnostic') {
- utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
- Object.keys(template).forEach(entry => {
- utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
- });
- }
- }).turbocharge();
- }
-
- /**
- * @internal
- *
- * GPU instance
- * @returns {SpeedyGPU}
- */
- get _gpu() {
- return gpu;
- }
-
- /**
- * Execute the tasks of a sequence of nodes
- * @param {SpeedyPipelineNode[]} sequence sequence of nodes
- * @param {number} [i] in [0,n)
- * @param {number} [n] number of nodes
- * @returns {SpeedyPromise<void>}
- */
- static _runSequence(sequence, i = 0, n = sequence.length) {
- for (; i < n; i++) {
- const runTask = sequence[i].execute(gpu);
-
- // this call greatly improves performance when downloading pixel data using PBOs
- gpu.gl.flush();
- if (typeof runTask !== 'undefined') return runTask.then(() => SpeedyPipeline._runSequence(sequence, i + 1, n));
- }
- return speedy_promise/* SpeedyPromise */.i.resolve();
- }
-
- /**
- * Topological sorting
- * @param {SpeedyPipelineNode[]} nodes
- * @returns {SpeedyPipelineNode[]}
- */
- static _tsort(nodes) {
- /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
-
- const outlinks = SpeedyPipeline._outlinks(nodes);
- const stack = nodes.map(node => ( /** @type {StackNode} */[node, false]));
- const trash = new Set();
- const sorted = new Array(nodes.length);
- let j = sorted.length;
- while (stack.length > 0) {
- const [node, done] = stack.pop();
- if (!done) {
- if (!trash.has(node)) {
- const outnodes = outlinks.get(node);
- trash.add(node);
- stack.push([node, true]);
- stack.push(...outnodes.map(node => ( /** @type {StackNode} */[node, false])));
- if (outnodes.some(node => trash.has(node) && !sorted.includes(node))) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
- }
- } else sorted[--j] = node;
- }
- return sorted;
- }
-
- /**
- * Figure out the outgoing links of all nodes
- * @param {SpeedyPipelineNode[]} nodes
- * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
- */
- static _outlinks(nodes) {
- const outlinks = new Map();
- for (let k = 0; k < nodes.length; k++) outlinks.set(nodes[k], []);
- for (let i = 0; i < nodes.length; i++) {
- const to = nodes[i];
- const inputs = to.inputNodes();
- for (let j = 0; j < inputs.length; j++) {
- const from = inputs[j];
- const links = outlinks.get(from);
- if (!links) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
- if (!links.includes(to)) links.push(to);
- }
- }
- return outlinks;
- }
-
- /**
- * Generate the output template by aggregating the names of the sinks
- * @param {SpeedyPipelineNode[]} [sinks]
- * @returns {SpeedyPipelineOutput}
- */
- static _createOutputTemplate(sinks = []) {
- const template = Object.create(null);
- for (let i = sinks.length - 1; i >= 0; i--) template[sinks[i].name] = null;
- return template;
- }
-
- /**
- * Validate a sequence of nodes
- * @param {SpeedyPipelineNode[]} sequence
- */
- static _validateSequence(sequence) {
- if (sequence.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);else if (!sequence[0].isSource()) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);else if (!sequence.find(node => node.isSink())) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-input.js
- * Gets an image into a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
-
- /**
- * Gets an image into a pipeline
- */
- class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, UPLOAD_BUFFER_SIZE, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMedia|null} source media */
- this._media = null;
-
- /** @type {number} texture index */
- this._textureIndex = 0;
- }
-
- /**
- * Source media
- * @returns {SpeedyMedia|null}
- */
- get media() {
- return this._media;
- }
-
- /**
- * Source media
- * @param {SpeedyMedia|null} media
- */
- set media(media) {
- if (media !== null && !(media instanceof SpeedyMedia)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
- this._media = media;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._media == null) throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
-
- // use round-robin to mitigate WebGL's implicit synchronization
- // and maybe minimize texture upload times
- this._textureIndex = (this._textureIndex + 1) % this._tex.length;
-
- // upload texture
- const outputTexture = this._tex[this._textureIndex];
- gpu.upload(this._media._source, outputTexture);
- this.output().swrite(outputTexture, this._media._format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-output.js
- * Gets an image out of a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
-
- /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
- const DEFAULT_MEDIA_TYPE = "bitmap";
-
- /**
- * Gets an image out of a pipeline
- */
- class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'image') {
- super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
- this._mediaType = DEFAULT_MEDIA_TYPE;
-
- /** @type {ImageBitmap} output bitmap */
- this._bitmap = null;
-
- /** @type {ImageData} output pixel data */
- this._data = null;
-
- /** @type {ImageFormat} output format */
- this._format = types/* ImageFormat */.f5.RGBA;
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader(1);
- }
-
- /**
- * The media type that is exported from this node
- * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
- */
- get mediaType() {
- return this._mediaType;
- }
-
- /**
- * The media type that is exported from this node
- * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
- */
- set mediaType(value) {
- if (value != 'bitmap' && value != 'data') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
- this._mediaType = value;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- export() {
- const bitmapOrData = this._mediaType != 'data' ? this._bitmap : this._data;
- utils/* Utils */.A.assert(bitmapOrData != null);
- return SpeedyMedia.load(bitmapOrData, {
- format: this._format
- }, false);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- if (this._mediaType != 'data') {
- /* Create an ImageBitmap (default) */
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- const canvas = gpu.renderToCanvas(image);
- createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
- this._bitmap = bitmap;
- this._format = format;
- this._data = null;
- resolve();
- });
- });
- } else {
- /* Create an ImageData */
- return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
- const dataArray = new Uint8ClampedArray(pixels.buffer);
- this._data = new ImageData(dataArray, image.width, image.height);
- this._format = format;
- this._bitmap = null;
- });
- }
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * multiplexer.js
- * Image multiplexer
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {string[]} the names of the input ports indexed by their number */
- const INPUT_PORT = ['in0', 'in1'];
-
- /**
- * Image multiplexer
- */
- class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [...INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image)), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} which port should be linked to the output? */
- this._port = 0;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @returns {number}
- */
- get port() {
- return this._port;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @param {number} port
- */
- set port(port) {
- if (port < 0 || port >= INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
- this._port = port | 0;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const message = this.input(INPUT_PORT[this._port]).read();
- this.output().write(message);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * buffer.js
- * Image Buffer
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Image Buffer: a node with memory.
- * At time t, it outputs the image received at time t-1
- */
- class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} current page: 0 or 1 */
- this._pageIndex = 0;
-
- /** @type {boolean} first run? */
- this._initialized = false;
-
- /** @type {ImageFormat} previous image format */
- this._previousFormat = types/* ImageFormat */.f5.RGBA;
-
- /** @type {boolean} frozen buffer? */
- this._frozen = false;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @returns {boolean}
- */
- get frozen() {
- return this._frozen;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @param {boolean} value
- */
- set frozen(value) {
- this._frozen = Boolean(value);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const previousFormat = this._previousFormat;
- const page = this._tex;
- const previousInputTexture = page[1 - this._pageIndex];
- const outputTexture = page[this._pageIndex];
-
- // can't store pyramids
- if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
-
- // bufferize
- if (!this._frozen || !this._initialized) {
- // store input
- this._previousFormat = format;
- previousInputTexture.resize(image.width, image.height);
- image.copyTo(previousInputTexture);
-
- // page flipping
- this._pageIndex = 1 - this._pageIndex;
- }
-
- // first run?
- if (!this._initialized) {
- this._initialized = true;
- this.output().swrite(previousInputTexture, format);
- return;
- }
-
- // done!
- this.output().swrite(outputTexture, previousFormat);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pyramid.js
- * Generate pyramid
- */
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
- const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
-
- /**
- * Generate pyramid
- */
- class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, MAX_TEXTURES + 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const outputTexture = this._tex[0];
- const pyramids = gpu.programs.pyramids;
- let width = image.width,
- height = image.height;
-
- // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
- const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
-
- // get work textures
- const mip = new Array(MAX_TEXTURES + 1);
- for (let i = MAX_TEXTURES; i >= 1; i--) mip[i - 1] = this._tex[i];
-
- // get a copy of the input image
- mip[0].resize(width, height);
- image.copyTo(mip[0]);
-
- // generate gaussian pyramid
- const numLevels = Math.min(mipLevels, MAX_LEVELS);
- for (let level = 1; level < numLevels; level++) {
- // use max(1, floor(size / 2^lod)), in accordance to
- // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
- const halfWidth = Math.max(1, width >>> 1);
- const halfHeight = Math.max(1, height >>> 1);
-
- // reduce operation
- const tmp = level - 1 + MAX_LEVELS;
- pyramids.smoothX.outputs(width, height, mip[tmp])(mip[level - 1]);
- pyramids.smoothY.outputs(width, height, mip[level - 1])(mip[tmp]);
- pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level])(mip[level - 1]);
- /*
- (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
- (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
- */
-
- // flush
- gpu.gl.flush();
-
- // next level
- width = halfWidth;
- height = halfHeight;
-
- /*
- // debug: view pyramid
- const view = mip[level-1];
- const canvas = gpu.renderToCanvas(view);
- if(!window._ww) document.body.appendChild(canvas);
- window._ww = 1;
- */
- }
-
- // copy to output & set mipmap
- outputTexture.resize(image.width, image.height);
- outputTexture.clear();
- image.copyTo(outputTexture);
- outputTexture.generateMipmaps(mip.slice(0, numLevels));
-
- // done!
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * mixer.js
- * Image Mixer
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Image Mixer
- */
- class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in0').expects(SpeedyPipelineMessageType.Image), InputPort('in1').expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} alpha coefficient (applied to image0) */
- this._alpha = 0.5;
-
- /** @type {number} beta coefficient (applied to image1) */
- this._beta = 0.5;
-
- /** @type {number} gamma coefficient (brightness control) */
- this._gamma = 0.0;
- }
-
- /**
- * Alpha coefficient (applied to image0)
- * @returns {number}
- */
- get alpha() {
- return this._alpha;
- }
-
- /**
- * Alpha coefficient (applied to image0)
- * @param {number} value
- */
- set alpha(value) {
- this._alpha = +value;
- }
-
- /**
- * Beta coefficient (applied to image1)
- * @returns {number}
- */
- get beta() {
- return this._beta;
- }
-
- /**
- * Beta coefficient (applied to image1)
- * @param {number} value
- */
- set beta(value) {
- this._beta = +value;
- }
-
- /**
- * Gamma coefficient (brightness control)
- * @returns {number}
- */
- get gamma() {
- return this._gamma;
- }
-
- /**
- * Gamma coefficient (brightness control)
- * @param {number} value
- */
- set gamma(value) {
- this._gamma = +value;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const in0 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in0').read();
- const in1 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in1').read();
- const image0 = in0.image,
- image1 = in1.image;
- const format0 = in0.format,
- format1 = in1.format;
- const width = Math.max(image0.width, image1.width);
- const height = Math.max(image0.height, image1.height);
- const alpha = this._alpha,
- beta = this._beta,
- gamma = this._gamma;
- const outputTexture = this._tex[0];
- if (format0 != format1) throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
- gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
- gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
- this.output().swrite(outputTexture, format0);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * portal.js
- * Image Portals
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A sink of an Image Portal
- * This is not a pipeline sink - it doesn't export any data!
- */
- class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {ImageFormat} stored image format */
- this._format = types/* ImageFormat */.f5.RGBA;
-
- /** @type {boolean} is this node initialized? */
- this._initialized = false;
- }
-
- /**
- * Stored image
- * @returns {SpeedyTexture}
- */
- get image() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._tex[0];
- }
-
- /**
- * Stored image format
- * @returns {ImageFormat}
- */
- get format() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._format;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._tex[0].resize(1, 1).clear(); // initial texture
- this._format = types/* ImageFormat */.f5.RGBA;
- this._initialized = true;
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const tex = this._tex[0];
-
- // can't store pyramids
- if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
-
- // copy input
- this._format = format;
- tex.resize(image.width, image.height);
- image.copyTo(tex);
- }
- }
-
- /**
- * A source of an Image Portal
- */
- class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
- this._source = null;
- }
-
- /**
- * Data source
- * @returns {SpeedyPipelineNodeImagePortalSink|null}
- */
- get source() {
- return this._source;
- }
-
- /**
- * Data source
- * @param {SpeedyPipelineNodeImagePortalSink|null} node
- */
- set source(node) {
- if (node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
- this._source = node;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
- this.output().swrite(this._source.image, this._source.format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-factory.js
- * Image-related nodes
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Portal nodes
- */
- class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image portal source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePortalSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeImagePortalSource(name);
- }
-
- /**
- * Create an image portal sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePortalSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeImagePortalSink(name);
- }
- }
-
- /**
- * Image nodes
- */
- class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeImageSource(name);
- }
-
- /**
- * Create an image sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeImageSink(name);
- }
-
- /**
- * Create an image multiplexer
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageMultiplexer}
- */
- static Multiplexer(name = undefined) {
- return new SpeedyPipelineNodeImageMultiplexer(name);
- }
-
- /**
- * Create an image buffer
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageBuffer}
- */
- static Buffer(name = undefined) {
- return new SpeedyPipelineNodeImageBuffer(name);
- }
-
- /**
- * Image Pyramid
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePyramid}
- */
- static Pyramid(name = undefined) {
- return new SpeedyPipelineNodeImagePyramid(name);
- }
-
- /**
- * Image Mixer (blending)
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageMixer}
- */
- static Mixer(name = undefined) {
- return new SpeedyPipelineNodeImageMixer(name);
- }
-
- /**
- * Image Portals
- * @returns {typeof SpeedyPipelineImagePortalFactory}
- */
- static get Portal() {
- return SpeedyPipelineImagePortalFactory;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * greyscale.js
- * Convert an image to greyscale
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Convert an image to greyscale
- */
- class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const filters = gpu.programs.filters;
- filters.rgb2grey.outputs(width, height, outputTexture);
- filters.rgb2grey(image);
- this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * gaussian-blur.js
- * Gaussian Blur
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
- * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
- * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
- * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
- */
- const DEFAULT_KERNEL = Object.freeze({
- 3: [0.27901008925473514, 0.44197982149052983, 0.27901008925473514],
- // 1D convolution (sigma = 1)
- 5: [0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021],
- // 1D convolution (separable kernel)
- 7: [0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274],
- 9: [0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988],
- 11: [0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346],
- 13: [0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363],
- 15: [0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383]
- //3: [ 0.25, 0.5, 0.25 ],
- //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
- });
-
- /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
- const DEFAULT_SIGMA = new SpeedyVector2(0, 0);
-
- /** convolution programs (x-axis) */
- const CONVOLUTION_X = Object.freeze({
- 3: 'convolution3x',
- 5: 'convolution5x',
- 7: 'convolution7x',
- 9: 'convolution9x',
- 11: 'convolution11x',
- 13: 'convolution13x',
- 15: 'convolution15x'
- });
-
- /** convolution programs (y-axis) */
- const CONVOLUTION_Y = Object.freeze({
- 3: 'convolution3y',
- 5: 'convolution5y',
- 7: 'convolution7y',
- 9: 'convolution9y',
- 11: 'convolution11y',
- 13: 'convolution13y',
- 15: 'convolution15y'
- });
-
- /**
- * @typedef {object} SeparableConvolutionKernel
- * @property {number[]} x
- * @property {number[]} y
- */
-
- /**
- * Gaussian Blur
- */
- class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel */
- this._kernelSize = new SpeedySize(5, 5);
-
- /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
- this._sigma = DEFAULT_SIGMA;
-
- /** @type {SeparableConvolutionKernel} convolution kernel */
- this._kernel = {
- x: DEFAULT_KERNEL[this._kernelSize.width],
- y: DEFAULT_KERNEL[this._kernelSize.height]
- };
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const kw = kernelSize.width,
- kh = kernelSize.height;
- if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
- this._kernelSize = kernelSize;
- this._updateKernel();
- }
-
- /**
- * Sigma of the Gaussian kernel
- * @returns {SpeedyVector2}
- */
- get sigma() {
- return this._sigma;
- }
-
- /**
- * Sigma of the Gaussian kernel
- * @param {SpeedyVector2} sigma
- */
- set sigma(sigma) {
- utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
- utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
- this._sigma = sigma;
- this._updateKernel();
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const kernX = this._kernel.x;
- const kernY = this._kernel.y;
- const convX = CONVOLUTION_X[this._kernelSize.width];
- const convY = CONVOLUTION_Y[this._kernelSize.height];
- const tex = this._tex[0];
- const outputTexture = this._tex[1];
- gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
- gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Update the internal kernel to match
- * sigma and kernelSize
- */
- _updateKernel() {
- if (this._sigma.x == DEFAULT_SIGMA.x) this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];else this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
- if (this._sigma.y == DEFAULT_SIGMA.y) this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];else this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * simple-blur.js
- * Simple Blur (Box Filter)
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** 1D convolution filters */
- const BOX_FILTER = Object.freeze({
- 3: new Array(3).fill(1 / 3),
- 5: new Array(5).fill(1 / 5),
- 7: new Array(7).fill(1 / 7),
- 9: new Array(9).fill(1 / 9),
- 11: new Array(11).fill(1 / 11),
- 13: new Array(13).fill(1 / 13),
- 15: new Array(15).fill(1 / 15)
- });
-
- /** convolution programs (x-axis) */
- const simple_blur_CONVOLUTION_X = Object.freeze({
- 3: 'convolution3x',
- 5: 'convolution5x',
- 7: 'convolution7x',
- 9: 'convolution9x',
- 11: 'convolution11x',
- 13: 'convolution13x',
- 15: 'convolution15x'
- });
-
- /** convolution programs (y-axis) */
- const simple_blur_CONVOLUTION_Y = Object.freeze({
- 3: 'convolution3y',
- 5: 'convolution5y',
- 7: 'convolution7y',
- 9: 'convolution9y',
- 11: 'convolution11y',
- 13: 'convolution13y',
- 15: 'convolution15y'
- });
-
- /**
- * @typedef {object} SeparableConvolutionKernel
- * @property {number[]} x
- * @property {number[]} y
- */
-
- /**
- * Simple Blur (Box Filter)
- */
- class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel */
- this._kernelSize = new SpeedySize(5, 5);
-
- /** @type {SeparableConvolutionKernel} convolution kernel */
- this._kernel = {
- x: BOX_FILTER[this._kernelSize.width],
- y: BOX_FILTER[this._kernelSize.height]
- };
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const kw = kernelSize.width,
- kh = kernelSize.height;
- if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
- this._kernelSize = kernelSize;
- this._kernel.x = BOX_FILTER[this._kernelSize.width];
- this._kernel.y = BOX_FILTER[this._kernelSize.height];
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const kernX = this._kernel.x;
- const kernY = this._kernel.y;
- const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
- const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
- const tex = this._tex[0];
- const outputTexture = this._tex[1];
- gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
- gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * median-blur.js
- * Median Blur
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Median programs
- const MEDIAN = {
- 3: 'median3',
- 5: 'median5',
- 7: 'median7'
- };
-
- /**
- * Median Blur
- */
- class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel (assumed to be square) */
- this._kernelSize = new SpeedySize(5, 5);
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const ksize = kernelSize.width;
- if (!(ksize == 3 || ksize == 5 || ksize == 7)) throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);else if (kernelSize.width != kernelSize.height) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
- this._kernelSize = kernelSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const ksize = this._kernelSize.width;
- const med = MEDIAN[ksize];
- const outputTexture = this._tex[0];
- gpu.programs.filters[med].outputs(width, height, outputTexture)(image);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * convolution.js
- * Image convolution
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- // 2D convolution programs
- const CONVOLUTION = {
- 3: 'convolution3',
- 5: 'convolution5',
- 7: 'convolution7'
- };
-
- /**
- * Image convolution
- */
- class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMatrix} convolution kernel (square matrix) */
- this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
- }
-
- /**
- * Convolution kernel
- * @returns {SpeedyMatrix}
- */
- get kernel() {
- return this._kernel;
- }
-
- /**
- * Convolution kernel
- * @param {SpeedyMatrix} kernel
- */
- set kernel(kernel) {
- if (kernel.rows != kernel.columns) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);else if (!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7)) throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
- this._kernel = kernel;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const ksize = this._kernel.rows;
- const conv = CONVOLUTION[ksize];
- const kernel = this._kernel.read();
- gpu.programs.filters[conv].outputs(width, height, outputTexture)(image, kernel);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * nightvision.js
- * Nightvision filter
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
- */
-
- /**
- * Nightvision filter: "see in the dark"
- */
- class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.RGBA || msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} a value typically in [0,1]: larger number => higher contrast */
- this._gain = 0.5;
-
- /** @type {number} a value typically in [0,1]: controls brightness */
- this._offset = 0.5;
-
- /** @type {number} gain decay, a value in [0,1] */
- this._decay = 0.0;
-
- /** @type {NightvisionQualityLevel} quality level */
- this._quality = 'medium';
- }
-
- /**
- * Gain, a value typically in [0,1]: larger number => higher contrast
- * @returns {number}
- */
- get gain() {
- return this._gain;
- }
-
- /**
- * Gain, a value typically in [0,1]: larger number => higher contrast
- * @param {number} gain
- */
- set gain(gain) {
- this._gain = +gain;
- }
-
- /**
- * Offset, a value typically in [0,1] that controls the brightness
- * @returns {number}
- */
- get offset() {
- return this._offset;
- }
-
- /**
- * Offset, a value typically in [0,1] that controls the brightness
- * @param {number} offset
- */
- set offset(offset) {
- this._offset = +offset;
- }
-
- /**
- * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
- * @returns {number}
- */
- get decay() {
- return this._decay;
- }
-
- /**
- * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
- * @param {number} decay
- */
- set decay(decay) {
- this._decay = Math.max(0.0, Math.min(+decay, 1.0));
- }
-
- /**
- * Quality level of the filter
- * @returns {NightvisionQualityLevel}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Quality level of the filter
- * @param {NightvisionQualityLevel} quality
- */
- set quality(quality) {
- if (quality === 'high' || quality === 'medium' || quality === 'low') this._quality = quality;else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const gain = this._gain;
- const offset = this._offset;
- const decay = this._decay;
- const quality = this._quality;
- const filters = gpu.programs.filters;
- const tmp = this._tex[0];
- const illuminationMap = this._tex[1];
- const outputTexture = this._tex[2];
-
- // compute illumination map
- if (quality == 'medium') {
- filters.illuminationMapX.outputs(width, height, tmp);
- filters.illuminationMapY.outputs(width, height, illuminationMap);
- filters.illuminationMapX(image);
- filters.illuminationMapY(tmp);
- } else if (quality == 'high') {
- filters.illuminationMapHiX.outputs(width, height, tmp);
- filters.illuminationMapHiY.outputs(width, height, illuminationMap);
- filters.illuminationMapHiX(image);
- filters.illuminationMapHiY(tmp);
- } else if (quality == 'low') {
- filters.illuminationMapLoX.outputs(width, height, tmp);
- filters.illuminationMapLoY.outputs(width, height, illuminationMap);
- filters.illuminationMapLoX(image);
- filters.illuminationMapLoY(tmp);
- }
-
- // run nightvision
- if (format === types/* ImageFormat */.f5.GREY) {
- filters.nightvisionGreyscale.outputs(width, height, outputTexture);
- filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
- } else if (format === types/* ImageFormat */.f5.RGBA) {
- filters.nightvision.outputs(width, height, outputTexture);
- filters.nightvision(image, illuminationMap, gain, offset, decay);
- }
-
- // done!
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * normalize.js
- * Normalize image to a range
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Normalize image to a range
- */
- class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} a value in [0,255] */
- this._minValue = 0;
-
- /** @type {number} a value in [0,255] */
- this._maxValue = 255;
- }
-
- /**
- * Minimum intensity in the output image, a value in [0,255]
- * @returns {number}
- */
- get minValue() {
- return this._minValue;
- }
-
- /**
- * Minimum intensity in the output image, a value in [0,255]
- * @param {number} minValue
- */
- set minValue(minValue) {
- this._minValue = Math.max(0, Math.min(+minValue, 255));
- }
-
- /**
- * Maximum intensity in the output image, a value in [0,255]
- * @returns {number}
- */
- get maxValue() {
- return this._maxValue;
- }
-
- /**
- * Maximum intensity in the output image, a value in [0,255]
- * @param {number} maxValue
- */
- set maxValue(maxValue) {
- this._maxValue = Math.max(0, Math.min(+maxValue, 255));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[3];
- let minValue = this._minValue;
- let maxValue = this._maxValue;
- if (minValue > maxValue) minValue = maxValue = (minValue + maxValue) / 2;
- const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
- gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
- gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Scan a single component in all pixels of the image and find the min & max intensities
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTexture} image input image
- * @param {PixelComponent} pixelComponent a single PixelComponent flag
- * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
- */
- _scanMinMax(gpu, image, pixelComponent) {
- const tex = this._tex;
- const program = gpu.programs.utils;
- const width = image.width,
- height = image.height;
- const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
- utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
- program.copyComponents.outputs(width, height, tex[2]);
- program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
- let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
- for (let i = 0; i < numIterations; i++) texture = program.scanMinMax2D(texture, i);
- return texture;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * filter-factory.js
- * Image filters
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Image filters
- */
- class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Convert image to greyscale
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeGreyscale}
- */
- static Greyscale(name = undefined) {
- return new SpeedyPipelineNodeGreyscale(name);
- }
-
- /**
- * Gaussian Blur
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeGaussianBlur}
- */
- static GaussianBlur(name = undefined) {
- return new SpeedyPipelineNodeGaussianBlur(name);
- }
-
- /**
- * Simple Blur (Box Filter)
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeSimpleBlur}
- */
- static SimpleBlur(name = undefined) {
- return new SpeedyPipelineNodeSimpleBlur(name);
- }
-
- /**
- * Median Blur
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeMedianBlur}
- */
- static MedianBlur(name = undefined) {
- return new SpeedyPipelineNodeMedianBlur(name);
- }
-
- /**
- * Image Convolution
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeConvolution}
- */
- static Convolution(name = undefined) {
- return new SpeedyPipelineNodeConvolution(name);
- }
-
- /**
- * Nightvision
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeNightvision}
- */
- static Nightvision(name = undefined) {
- return new SpeedyPipelineNodeNightvision(name);
- }
-
- /**
- * Normalize image
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeNormalize}
- */
- static Normalize(name = undefined) {
- return new SpeedyPipelineNodeNormalize(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * perspective-warp.js
- * Warp an image using a perspective transformation
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Used when an invalid matrix is provided
- const SINGULAR_MATRIX = [0, 0, 0, 0, 0, 0, 0, 0, 1];
-
- /**
- * Warp an image using a perspective transformation
- */
- class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMatrix} perspective transformation */
- this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
- }
-
- /**
- * Perspective transform, a 3x3 homography matrix
- * @returns {SpeedyMatrix}
- */
- get transform() {
- return this._transform;
- }
-
- /**
- * Perspective transform, a 3x3 homography matrix
- * @param {SpeedyMatrix} transform
- */
- set transform(transform) {
- if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
- this._transform = transform;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const homography = this._transform.read();
- const inverseHomography = this._inverse3(homography);
- const isValidHomography = !Number.isNaN(inverseHomography[0]);
- gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
- gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
- * @param {number[]} mat 3x3 matrix in column-major format
- * @param {number} [eps] epsilon
- * @returns {number[]} 3x3 inverse matrix in column-major format
- */
- _inverse3(mat, eps = 1e-6) {
- // read the entries of the matrix
- const a11 = mat[0];
- const a21 = mat[1];
- const a31 = mat[2];
- const a12 = mat[3];
- const a22 = mat[4];
- const a32 = mat[5];
- const a13 = mat[6];
- const a23 = mat[7];
- const a33 = mat[8];
-
- // compute cofactors
- const b1 = a33 * a22 - a32 * a23; // b11
- const b2 = a33 * a12 - a32 * a13; // b21
- const b3 = a23 * a12 - a22 * a13; // b31
-
- // compute the determinant
- const det = a11 * b1 - a21 * b2 + a31 * b3;
-
- // set up the inverse
- if (!(Math.abs(det) < eps)) {
- const d = 1.0 / det;
- mat[0] = b1 * d;
- mat[1] = -(a33 * a21 - a31 * a23) * d;
- mat[2] = (a32 * a21 - a31 * a22) * d;
- mat[3] = -b2 * d;
- mat[4] = (a33 * a11 - a31 * a13) * d;
- mat[5] = -(a32 * a11 - a31 * a12) * d;
- mat[6] = b3 * d;
- mat[7] = -(a23 * a11 - a21 * a13) * d;
- mat[8] = (a22 * a11 - a21 * a12) * d;
- } else mat.fill(Number.NaN, 0, 9);
-
- // done!
- return mat;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * resize.js
- * Resize image
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
-
- /**
- * Resize image
- */
- class SpeedyPipelineNodeResize extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the output image, in pixels */
- this._size = new SpeedySize(0, 0);
-
- /** @type {SpeedyVector2} size of the output relative to the size of the input */
- this._scale = new SpeedyVector2(1, 1);
-
- /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
- this._method = 'bilinear';
- }
-
- /**
- * Size of the output image, in pixels (use 0 to use scale)
- * @returns {SpeedySize}
- */
- get size() {
- return this._size;
- }
-
- /**
- * Size of the output image, in pixels (use 0 to use scale)
- * @param {SpeedySize} size
- */
- set size(size) {
- this._size = size;
- }
-
- /**
- * Size of the output image relative to the size of the input image
- * @returns {SpeedyVector2}
- */
- get scale() {
- return this._scale;
- }
-
- /**
- * Size of the output image relative to the size of the input image
- * @param {SpeedyVector2} scale
- */
- set scale(scale) {
- this._scale = scale;
- }
-
- /**
- * Interpolation method
- * @returns {SpeedyPipelineNodeResizeMethod}
- */
- get method() {
- return this._method;
- }
-
- /**
- * Interpolation method
- * @param {SpeedyPipelineNodeResizeMethod} method
- */
- set method(method) {
- if (method !== 'nearest' && method !== 'bilinear') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
- this._method = method;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const method = this._method;
- const newWidth = this._size.width || Math.max(1, this._scale.x * width);
- const newHeight = this._size.height || Math.max(1, this._scale.y * height);
- if (method == 'bilinear') {
- gpu.programs.transforms.resizeBilinear.outputs(newWidth, newHeight, outputTexture)(image);
- } else if (method == 'nearest') {
- gpu.programs.transforms.resizeNearest.outputs(newWidth, newHeight, outputTexture)(image);
- }
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transform-factory.js
- * Image transforms
- */
-
-
-
-
-
- /**
- * Image transforms
- */
- class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Resize image
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeResize}
- */
- static Resize(name = undefined) {
- return new SpeedyPipelineNodeResize(name);
- }
-
- /**
- * Warp an image using a perspective transformation
- * @param {string} [name]
- * @returns {SpeedyPipelineNodePerspectiveWarp}
- */
- static PerspectiveWarp(name = undefined) {
- return new SpeedyPipelineNodePerspectiveWarp(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * detector.js
- * Abstract keypoint detectors
- */
-
-
-
-
-
-
-
-
-
-
- // Constants
- const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
- const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
- const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
- const NUMBER_OF_RGBA16_TEXTURES = 2;
-
- // legacy constants
- const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
- const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
- const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
-
- /**
- * Abstract keypoint detector
- * @abstract
- */
- class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = 0, portBuilders = undefined) {
- super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
-
- /** @type {number} encoder capacity */
- this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
-
- /** @type {GLint} auxiliary storage */
- this._oldWrapS = 0;
-
- /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
- this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
- }
-
- /**
- * Initialize this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- // initialize
- super.init(gpu);
-
- // encodeKeypointSkipOffsets() relies on this
- this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
-
- // allocate RGBA16 textures
- this._allocateTex16(gpu);
- gpu.subscribe(this._allocateTex16, this, gpu);
- }
-
- /**
- * Release this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- // deallocate RGBA16 textures
- gpu.unsubscribe(this._allocateTex16, this);
- this._deallocateTex16(gpu);
-
- // we need to restore the texture parameter because textures come from a pool!
- this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
-
- // release
- super.release(gpu);
- }
-
- /**
- * Set a parameter of the special texture
- * @param {GLenum} pname
- * @param {GLint} param new value
- * @returns {GLint} old value of param
- */
- _setupSpecialTexture(pname, param) {
- if (NUMBER_OF_INTERNAL_TEXTURES == 0) return;
-
- // legacy code
- const texture = this._tex[this._tex.length - 1];
- const gl = texture.gl;
- gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
- const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
- gl.texParameteri(gl.TEXTURE_2D, pname, param);
- gl.bindTexture(gl.TEXTURE_2D, null);
- return oldval;
- }
-
- /**
- * We can encode up to this many keypoints. If you find a
- * tight bound for this, download times will be faster.
- * @returns {number}
- */
- get capacity() {
- return this._capacity;
- }
-
- /**
- * We can encode up to this many keypoints. If you find a
- * tight bound for this, download times will be faster.
- * @param {number} capacity
- */
- set capacity(capacity) {
- this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
- }
-
- /**
- * Create a tiny texture with encoded keypoints out of
- * an encoded corners texture
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTexture} corners input
- * @param {SpeedyDrawableTexture} encodedKeypoints output
- * @param {number} [descriptorSize] in bytes
- * @param {number} [extraSize] in bytes
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const encoderCapacity = this._capacity;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
- const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
- const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
- //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
- const maxSize = Math.max(width, height);
- const keypoints = gpu.programs.keypoints;
-
- // prepare programs
- keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
- keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
- keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
-
- // compute lookup table
- let lookupTable = keypoints.initLookupTable(corners);
- for (let b = 1; b < maxSize; b *= 2) lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
-
- /*
- // debug: view texture
- const lookupView = (keypoints.viewLookupTable.outputs(
- width, height, this._tex[0]
- ))(lookupTable);
- const canvas = gpu.renderToCanvas(lookupView);
- if(!this._ww) document.body.appendChild(canvas);
- this._ww = 1;
- */
-
- // encode keypoints
- return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
- }
- _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const capacity = this._capacity;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const width = corners.width,
- height = corners.height;
- const imageSize = [width, height];
- const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
- const keypoints = gpu.programs.keypoints;
- const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
-
- // prepare programs
- keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
- keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
- keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
- keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
-
- // copy the input corners to a special texture
- // that is needed by encodeKeypointSkipOffsets()
- corners = gpu.programs.utils.copy.outputs(width, height, specialTexture)(corners);
-
- // encode skip offsets
- let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
- for (let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) {
- // to boost performance
- // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
- // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
- offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
- }
-
- /*
- // debug: view corners
- let cornerview = offsets;
- const canvas = gpu.renderToCanvas(cornerview);
- if(!window._ww) document.body.appendChild(canvas);
- window._ww = 1;
- */
-
- // encode keypoint positions
- let encodedKps = tex[3].clear();
- for (let j = 0; j < ENCODER_PASSES; j++) encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
-
- // encode keypoint properties
- return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Create a tiny texture with zero encoded keypoints
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} encodedKeypoints output texture
- * @param {number} [descriptorSize] in bytes
- * @param {number} [extraSize] in bytes
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const capacity = 0;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const keypoints = gpu.programs.keypoints;
- keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
- return keypoints.encodeNullKeypoints();
- }
-
- /**
- * Allocate RGBA16 textures
- * @param {SpeedyGPU} gpu
- */
- _allocateTex16(gpu) {
- const gl = gpu.gl;
-
- // RGBA16UI is color renderable according to the OpenGL ES 3 spec
- for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
- }
-
- /**
- * Deallocate RGBA16 textures
- * @param {SpeedyGPU} gpu
- */
- _deallocateTex16(gpu) {
- for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = this._tex16[i].release();
- }
-
- /**
- * Compute the length of the keypoint encoder, given its capacity
- * @param {number} encoderCapacity how many keypoints can we fit?
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- */
- static encoderLength(encoderCapacity, descriptorSize, extraSize) {
- const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
- const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
- return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
- }
-
- /**
- * The maximum number of keypoints we can store using
- * a particular configuration of a keypoint encoder
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderLength
- */
- static encoderCapacity(descriptorSize, extraSize, encoderLength) {
- const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
- const numberOfPixels = encoderLength * encoderLength;
- return Math.floor(numberOfPixels / pixelsPerKeypoint);
- }
- }
-
- /**
- * Abstract scale-space keypoint detector
- * @abstract
- */
- class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
-
- /** @type {number} number of pyramid levels */
- this._levels = 1;
-
- /** @type {number} scale factor between two pyramid levels */
- this._scaleFactor = DEFAULT_SCALE_FACTOR;
- }
-
- /**
- * Number of pyramid levels
- * @returns {number}
- */
- get levels() {
- return this._levels;
- }
-
- /**
- * Number of pyramid levels
- * @param {number} levels
- */
- set levels(levels) {
- this._levels = Math.max(1, levels | 0);
- }
-
- /**
- * Scale factor between two pyramid levels
- * @returns {number}
- */
- get scaleFactor() {
- return this._scaleFactor;
- }
-
- /**
- * Scale factor between two pyramid levels
- * @param {number} scaleFactor should be greater than 1
- */
- set scaleFactor(scaleFactor) {
- this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * source.js
- * Gets keypoints into the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
- const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
- const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
-
- /**
- * Gets keypoints into the pipeline
- */
- class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
- this._keypoints = [];
-
- /** @type {Float32Array} upload buffer (UBO) */
- this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
-
- /** @type {number} maximum number of keypoints */
- this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
- }
-
- /**
- * Keypoints to be uploaded
- * @returns {SpeedyKeypoint[]}
- */
- get keypoints() {
- return this._keypoints;
- }
-
- /**
- * Keypoints to be uploaded
- * @param {SpeedyKeypoint[]} keypoints
- */
- set keypoints(keypoints) {
- if (!Array.isArray(keypoints)) throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
- this._keypoints = keypoints;
- }
-
- /**
- * The maximum number of keypoints we'll accept.
- * This should be a tight bound for better performance.
- * @returns {number}
- */
- get capacity() {
- return this._capacity;
- }
-
- /**
- * The maximum number of keypoints we'll accept.
- * This should be a tight bound for better performance.
- * @param {number} capacity
- */
- set capacity(capacity) {
- this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- // Orientation, descriptors and extra bytes will be lost
- const descriptorSize = 0,
- extraSize = 0;
- const keypoints = this._keypoints;
- const maxKeypoints = this._capacity;
- const numKeypoints = Math.min(keypoints.length, maxKeypoints);
- const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
- const buffer = this._buffer;
- const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
-
- uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
- let startIndex = 0,
- encodedKeypoints = uploadKeypoints.clear();
- for (let i = 0; i < numPasses; i++) {
- const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
- const endIndex = startIndex + n;
- uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
- encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
- startIndex = endIndex;
- }
- this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Create an upload buffer
- * @param {number} bufferSize number of keypoints
- * @returns {Float32Array}
- */
- static _createUploadBuffer(bufferSize) {
- const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
- utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
- return new Float32Array(internalBuffer);
- }
-
- /**
- * Fill upload buffer with keypoint data
- * @param {Float32Array} buffer
- * @param {SpeedyKeypoint[]} keypoints
- * @param {number} start index, inclusive
- * @param {number} end index, exclusive
- * @returns {Float32Array} buffer
- */
- static _fillUploadBuffer(buffer, keypoints, start, end) {
- const n = end - start;
- for (let i = 0; i < n; i++) {
- const keypoint = keypoints[start + i];
- const hasPos = keypoint.position !== undefined;
- const j = i * 4;
-
- // Format data as follows:
- // vec4(xpos, ypos, lod, score)
- buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
- buffer[j + 1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
- buffer[j + 2] = +keypoint.lod || 0;
- buffer[j + 3] = +keypoint.score || 0;
- }
-
- // done!
- return buffer;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-keypoint-descriptor.js
- * Keypoint descriptor
- */
-
- /**
- * Represents a keypoint descriptor
- */
- class SpeedyKeypointDescriptor {
- /**
- * Constructor
- * @param {Uint8Array} data descriptor bytes
- */
- constructor(data) {
- this._data = data;
- return Object.freeze(this);
- }
-
- /**
- * Descriptor data
- * @returns {Uint8Array}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The size of the descriptor, in bytes
- * @returns {number}
- */
- get size() {
- return this._data.byteLength;
- }
-
- /**
- * A string representation of the keypoint descriptor
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * sink.js
- * Gets keypoints out of the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** next power of 2 */
- const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /** empty array of bytes */
- const ZERO_BYTES = new Uint8Array([]);
-
- /**
- * Gets keypoints out of the pipeline
- * @template {SpeedyKeypoint} T
- * @abstract
- */
- class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount]
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
- */
- constructor(name = 'keypoints', texCount = 0, portBuilders = []) {
- super(name, texCount + 2, portBuilders);
-
- /** @type {Array<T|null>} keypoints (output) */
- this._keypoints = [];
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader();
-
- /** @type {number} page flipping index */
- this._page = 0;
-
- /** @type {boolean} accelerate GPU-CPU transfers */
- this._turbo = false;
-
- /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
- this._includeDiscarded = false;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @returns {boolean}
- */
- get turbo() {
- return this._turbo;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @param {boolean} value
- */
- set turbo(value) {
- this._turbo = Boolean(value);
- }
-
- /**
- * Should discarded keypoints be exported as null or dropped altogether?
- * @returns {boolean}
- */
- get includeDiscarded() {
- return this._includeDiscarded;
- }
-
- /**
- * Should discarded keypoints be exported as null or dropped altogether?
- * @param {boolean} value
- */
- set includeDiscarded(value) {
- this._includeDiscarded = Boolean(value);
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<Array<T|null>>}
- */
- export() {
- return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Download and decode keypoints from the GPU
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} encodedKeypoints
- * @param {number} descriptorSize
- * @param {number} extraSize
- * @param {number} encoderLength
- * @returns {SpeedyPromise<void>}
- */
- _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength) {
- const useBufferedDownloads = this._turbo;
-
- /*
- I have found experimentally that, in Firefox, readPixelsAsync()
- performs MUCH better if the width of the target texture is a power
- of two. I have no idea why this is the case, nor if it's related to
- some interaction with the GL drivers, somehow. This seems to make no
- difference on Chrome, however. In any case, let's convert the input
- texture to POT.
- */
- const encoderWidth = sink_nextPot(encoderLength);
- //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
- const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
- //const encoderWidth=encoderLength,encoderHeight=encoderLength;
-
- // copy the set of keypoints to an internal texture
- const copiedTexture = this._tex[this._tex.length - 1 - this._page];
- gpu.programs.utils.copyKeypoints.outputs(encoderWidth, encoderHeight, copiedTexture)(encodedKeypoints);
-
- // flip page
- this._page = 1 - this._page;
-
- // download the internal texture
- return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
- // decode the keypoints and store them in this._keypoints
- this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
- });
- }
-
- /**
- * Decode a sequence of keypoints, given a flattened image of encoded pixels
- * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderWidth
- * @param {number} encoderHeight
- * @returns {Array<T|null>} keypoints
- */
- _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight) {
- const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
- const m = globals.LOG2_PYRAMID_MAX_SCALE,
- h = globals.PYRAMID_MAX_LEVELS;
- const piOver255 = Math.PI / 255.0;
- const keypoints = /** @type {Array<T|null>} */[];
- const includeDiscarded = this._includeDiscarded;
- let descriptorBytes = ZERO_BYTES,
- extraBytes = ZERO_BYTES;
- let x, y, z, w, lod, rotation, score;
- let keypoint;
-
- // validate
- if (descriptorSize % 4 != 0 || extraSize % 4 != 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
-
- // how many bytes should we read?
- const e2 = encoderWidth * encoderHeight * 4;
- const size = pixels.byteLength;
- if (size != e2) utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
-
- // copy the data (we use shared buffers when receiving pixels[])
- if (descriptorSize + extraSize > 0) pixels = new Uint8Array(pixels);
-
- // for each encoded keypoint
- for (let i = 0; i < size; i += bytesPerKeypoint) {
- // extract encoded header
- x = pixels[i + 1] << 8 | pixels[i];
- y = pixels[i + 3] << 8 | pixels[i + 2];
- z = pixels[i + 5] << 8 | pixels[i + 4];
- w = pixels[i + 7] << 8 | pixels[i + 6];
-
- // the keypoint is "null": we have reached the end of the list
- if (x == 0xFFFF && y == 0xFFFF) break;
-
- // the header is zero: discard the keypoint
- if (x + y + z + w == 0) {
- if (includeDiscarded) keypoints.push(null);
- continue;
- }
-
- // extract extra & descriptor bytes
- if (extraSize > 0) {
- extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
- if (extraBytes.byteLength < extraSize) {
- utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
- continue; // something is off here; discard
- }
- }
- if (descriptorSize > 0) {
- descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
- if (descriptorBytes.byteLength < descriptorSize) {
- utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
- continue; // something is off here; discard
- }
- }
-
- // decode position: convert from fixed-point
- x /= globals.FIX_RESOLUTION;
- y /= globals.FIX_RESOLUTION;
-
- // decode level-of-detail
- lod = pixels[i + 4] < 255 ? -m + (m + h) * pixels[i + 4] / 255.0 : 0.0;
-
- // decode orientation
- rotation = (2 * pixels[i + 5] - 255) * piOver255;
-
- // decode score
- score = utils/* Utils */.A.decodeFloat16(w);
-
- // create keypoint
- keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
-
- // register keypoint
- keypoints.push(keypoint);
- }
-
- // done!
- return keypoints;
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {T}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Allocate extra space
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} output output texture
- * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
- * @param {number} inputDescriptorSize in bytes, must be positive
- * @param {number} inputExtraSize must be 0
- * @param {number} outputDescriptorSize must be inputDescriptorSize
- * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
- * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
- */
- _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize) {
- utils/* Utils */.A.assert(inputExtraSize === 0);
- utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
- const inputEncoderLength = inputEncodedKeypoints.width;
- const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
- const outputEncoderCapacity = inputEncoderCapacity;
- const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
- return gpu.programs.keypoints.allocateExtra.outputs(outputEncoderLength, outputEncoderLength, output)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
- }
- }
-
- /**
- * Gets standard keypoints out of the pipeline
- * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
- */
- class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // create keypoint
- return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
- }
- }
-
- /**
- * Gets tracked keypoints out of the pipeline
- * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
- */
- class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const {
- vectors
- } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input('flow').read();
-
- // allocate extra space
- const newDescriptorSize = descriptorSize;
- const newExtraSize = 4; // 1 pixel per flow vector per keypoint
- const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
-
- // attach flow vectors
- const newEncoderLength = encodedKeypointsWithExtraSpace.width;
- const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
-
- // done!
- return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyTrackedKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
- const extraSize = extraBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // read flow vector
- const fx = utils/* Utils */.A.decodeFloat16(extraBytes[1] << 8 | extraBytes[0]);
- const fy = utils/* Utils */.A.decodeFloat16(extraBytes[3] << 8 | extraBytes[2]);
- const flow = new SpeedyVector2(fx, fy);
-
- // create keypoint
- return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
- }
- }
-
- /**
- * Gets matched keypoints out of the pipeline
- * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
- */
- class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const {
- encodedMatches,
- matchesPerKeypoint
- } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */this.input('matches').read();
-
- // allocate space for the matches
- const newDescriptorSize = descriptorSize;
- const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
- const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
-
- // transfer matches to a new texture
- const newEncoderLength = encodedKeypointsWithExtraSpace.width;
- const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
-
- // done!
- return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyMatchedKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
- const extraSize = extraBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // decode matches
- const matchesPerKeypoint = extraSize / 4;
- const matches = /** @type {SpeedyKeypointMatch[]} */new Array(matchesPerKeypoint);
- for (let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
- const base = matchIndex * 4;
- const u32 = extraBytes[base] | extraBytes[base + 1] << 8 | extraBytes[base + 2] << 16 | extraBytes[base + 3] << 24;
- const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
- matches[matchIndex] = match;
- }
-
- // done!
- return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * clipper.js
- * Keypoint clipper
- */
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const LOG2_STRIDE = 5;
- const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
-
- /**
- * Keypoint clipper: filters the best keypoints from a stream
- */
- class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} the maximum number of keypoints in the output */
- this._size = MAX_SIZE;
- }
-
- /**
- * The maximum number of keypoints in the output
- * @returns {number}
- */
- get size() {
- return this._size;
- }
-
- /**
- * The maximum number of keypoints in the output
- * @param {number} size
- */
- set size(size) {
- this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const keypoints = gpu.programs.keypoints;
- const clipValue = this._size;
- const tex = this._tex;
- const outputTexture = this._tex[3];
-
- // find the minimum power of 2 pot such that pot >= capacity
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
-
- // find the dimensions of the sorting shaders
- const stride = 1 << LOG2_STRIDE; // must be a power of 2
- //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
- const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
- const numberOfPixels = stride * height;
-
- // find the dimensions of the output texture
- const newCapacity = Math.min(capacity, clipValue);
- const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
-
- // generate permutation of keypoints
- keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
- let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort permutation
- const numPasses = Math.ceil(Math.log2(numberOfPixels));
- keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
- for (let i = 1; i <= numPasses; i++) {
- const blockSize = 1 << i; // 2, 4, 8...
- const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
- permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
- }
-
- // apply permutation
- keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
- keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
-
- /*
- // debug (read the contents of the permutation)
- const pixels = permutation.inspect(gpu), debug = [];
- for(let i = 0; i < pixels.length; i += 4) {
- let id = pixels[i] | (pixels[i+1] << 8);
- let score = pixels[i+2] / 255.0;
- let valid = pixels[i+3] / 255.0;
- debug.push([ id, valid, score, ].join(', '));
- }
- console.log(debug);
- */
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * border-clipper.js
- * Keypoint Border Clipper
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * The Border Clipper removes all keypoints within a border of the edges of an image
- */
- class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedySize} image size, in pixels */
- this._imageSize = new SpeedySize(0, 0);
-
- /** @type {SpeedyVector2} border size, in pixels */
- this._borderSize = new SpeedyVector2(0, 0);
- }
-
- /**
- * Image size, in pixels
- * @returns {SpeedySize}
- */
- get imageSize() {
- return this._imageSize;
- }
-
- /**
- * Image size, in pixels
- * @param {SpeedySize} imageSize
- */
- set imageSize(imageSize) {
- this._imageSize = imageSize;
- }
-
- /**
- * Border size, in pixels
- * @returns {SpeedyVector2}
- */
- get borderSize() {
- return this._borderSize;
- }
-
- /**
- * Border size, in pixels
- * @param {SpeedyVector2} borderSize
- */
- set borderSize(borderSize) {
- this._borderSize = borderSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const keypoints = gpu.programs.keypoints;
- const imageSize = this._imageSize;
- const borderSize = this._borderSize;
- const imageWidth = imageSize.width,
- imageHeight = imageSize.height;
- const borderLeft = borderSize.x,
- borderRight = borderSize.x;
- const borderTop = borderSize.y,
- borderBottom = borderSize.y;
- const tex = this._tex;
-
- // validate
- if (imageWidth == 0 || imageHeight == 0) throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
-
- // find the capacity of the keypoint stream
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
-
- // prepare programs
- keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
- keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
- keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
-
- // clip keypoints
- let clippedKeypoints = keypoints.clipBorder(imageWidth, imageHeight, borderTop, borderRight, borderBottom, borderLeft, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort keypoints
- let sortedKeypoints = keypoints.mixKeypointsInit(clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
- clippedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength);
-
- /*
- // debug: view keypoints
- keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
- */
-
- // done!
- this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * buffer.js
- * Keypoint Buffer
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint Buffer: a node with memory.
- * At time t, it outputs the keypoints received at time t-1
- */
- class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} current page: 0 or 1 */
- this._pageIndex = 0;
-
- /** @type {boolean} first run? */
- this._initialized = false;
-
- /** @type {number} previous descriptor size, in bytes */
- this._previousDescriptorSize = 0;
-
- /** @type {number} previous extra size, in bytes */
- this._previousExtraSize = 0;
-
- /** @type {number} previous encoder length */
- this._previousEncoderLength = 0;
-
- /** @type {boolean} frozen buffer? */
- this._frozen = false;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @returns {boolean}
- */
- get frozen() {
- return this._frozen;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @param {boolean} value
- */
- set frozen(value) {
- this._frozen = Boolean(value);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const previousDescriptorSize = this._previousDescriptorSize;
- const previousExtraSize = this._previousExtraSize;
- const previousEncoderLength = this._previousEncoderLength;
- const page = this._tex;
- const previousInputTexture = page[1 - this._pageIndex];
- const outputTexture = page[this._pageIndex];
-
- // bufferize
- if (!this._frozen || !this._initialized) {
- // store input
- this._previousDescriptorSize = descriptorSize;
- this._previousExtraSize = extraSize;
- this._previousEncoderLength = encoderLength;
- previousInputTexture.resize(encoderLength, encoderLength);
- encodedKeypoints.copyTo(previousInputTexture);
-
- // page flipping
- this._pageIndex = 1 - this._pageIndex;
- }
-
- // first run?
- if (!this._initialized) {
- this._initialized = true;
- this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
- return;
- }
-
- // done!
- this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * mixer.js
- * Keypoint Mixer
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint Mixer: merges two sets of keypoints
- */
- class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints), InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in0').read();
- const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in1').read();
- const descriptorSize = kps0.descriptorSize;
- const extraSize = kps0.extraSize;
- const keypoints = gpu.programs.keypoints;
- const tex = this._tex;
-
- // ensure that the format of kps0 equals the format of kps1
- if (!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize)) throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
-
- // find the capacity of kps0 + kps1
- const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
- const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
- const capacity = cap0 + cap1;
-
- // find the dimensions of the output texture
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
-
- // prepare programs
- keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
- keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
- keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
-
- // mix keypoints
- let mixedKeypoints = keypoints.mixKeypointsPreInit(kps0.encodedKeypoints, kps1.encodedKeypoints, kps0.encoderLength, kps1.encoderLength, cap0, cap1, descriptorSize, extraSize, encoderLength);
- let sortedKeypoints = keypoints.mixKeypointsInit(mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
- mixedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength);
-
- /*
- // debug: view keypoints
- keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
- */
-
- this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shuffler.js
- * Keypoint Shuffler
- */
-
-
-
-
-
-
-
-
-
- /**
- * The Keypoint Shuffler shuffles a list of keypoints
- */
- class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} maximum number of keypoints */
- this._maxKeypoints = Number.NaN;
- }
-
- /**
- * Maximum number of keypoints (optional)
- * @returns {number}
- */
- get maxKeypoints() {
- return this._maxKeypoints;
- }
-
- /**
- * Maximum number of keypoints (optional)
- * @param {number} value
- */
- set maxKeypoints(value) {
- if (!Number.isNaN(value)) this._maxKeypoints = Math.max(0, value | 0);else this._maxKeypoints = Number.NaN;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- let {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const maxKeypoints = this._maxKeypoints;
-
- // shuffle the keypoints (including nulls)
- const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
- const permutationLength = Math.min(permutationMaxLength, capacity);
- const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
- encodedKeypoints = gpu.programs.keypoints.shuffle.setUBO('Permutation', permutation).outputs(encoderLength, encoderLength, this._tex[0])(encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort the keypoints
- gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
- gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
- gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
- let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
- encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // clip the output?
- if (!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
- const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
- encodedKeypoints = gpu.programs.keypoints.clip.outputs(newEncoderLength, newEncoderLength, this._tex[5])(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
- encoderLength = newEncoderLength;
- }
-
- // done!
- this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
- * @param {number} n positive integer
- * @param {number} [bufsize] size of the output array
- * @returns {Int32Array} permutation
- */
- _generatePermutation(n, bufsize = n) {
- const array = new Int32Array(bufsize);
- const p = array.subarray(0, n).fill(-1);
- const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
- for (let i = 0, j = 0; i < n; i++) {
- if (p[i] < 0) {
- do {
- p[i] = q[j++];
- } while (p[i] < i);
- p[p[i]] = i;
- }
- }
- return array; // padded with zeros
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * multiplexer.js
- * Keypoint multiplexer
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {string[]} the names of the input ports indexed by their number */
- const multiplexer_INPUT_PORT = ['in0', 'in1'];
-
- /**
- * Keypoint multiplexer
- */
- class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [...multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints)), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} which port should be linked to the output? */
- this._port = 0;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @returns {number}
- */
- get port() {
- return this._port;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @param {number} port
- */
- set port(port) {
- if (port < 0 || port >= multiplexer_INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
- this._port = port | 0;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
- this.output().write(message);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transformer.js
- * Apply a transformation matrix to a set of keypoints
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Apply a transformation matrix to a set of keypoints
- */
- class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyMatrix} transformation matrix */
- this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
- }
-
- /**
- * Transformation matrix
- * @returns {SpeedyMatrix}
- */
- get transform() {
- return this._transform;
- }
-
- /**
- * Transformation matrix. Must be 3x3
- * @param {SpeedyMatrix} transform
- */
- set transform(transform) {
- if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
- this._transform = transform;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const outputTexture = this._tex[0];
- const homography = this._transform.read();
-
- // apply homography
- gpu.programs.keypoints.applyHomography.outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * subpixel.js
- * Subpixel refinement of keypoint location
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
-
- /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
- const METHOD2PROGRAM = Object.freeze({
- 'quadratic1d': 'subpixelQuadratic1d',
- 'taylor2d': 'subpixelTaylor2d',
- 'bicubic-upsample': 'subpixelBicubic',
- 'bilinear-upsample': 'subpixelBilinear'
- });
-
- /**
- * Subpixel refinement of keypoint location
- */
- class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SubpixelRefinementMethod} subpixel refinement method */
- this._method = 'quadratic1d';
-
- /** @type {number} max iterations for the upsampling methods */
- this._maxIterations = 6;
-
- /** @type {number} convergence threshold for the upsampling methods */
- this._epsilon = 0.1;
- }
-
- /**
- * Subpixel refinement method
- * @returns {SubpixelRefinementMethod}
- */
- get method() {
- return this._method;
- }
-
- /**
- * Subpixel refinement method
- * @param {SubpixelRefinementMethod} name
- */
- set method(name) {
- if (!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
- this._method = name;
- }
-
- /**
- * Max. iterations for the upsampling methods
- * @returns {number}
- */
- get maxIterations() {
- return this._maxIterations;
- }
-
- /**
- * Max. iterations for the upsampling methods
- * @param {number} value
- */
- set maxIterations(value) {
- this._maxIterations = Math.max(0, +value);
- }
-
- /**
- * Convergence threshold for the upsampling methods
- * @returns {number}
- */
- get epsilon() {
- return this._epsilon;
- }
-
- /**
- * Convergence threshold for the upsampling methods
- * @param {number} value
- */
- set epsilon(value) {
- this._epsilon = Math.max(0, +value);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read();
- const tex = this._tex;
- const program = METHOD2PROGRAM[this._method];
- const maxIterations = this._maxIterations;
- const epsilon = this._epsilon;
-
- // note: if you detected the keypoints using a pyramid,
- // you need to pass that pyramid as input!
-
- // we'll compute the offsets for each keypoint
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
- const offsets = gpu.programs.keypoints[program].outputs(offsetEncoderLength, offsetEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
-
- // apply the offsets to the keypoints
- const refinedKeypoints = gpu.programs.keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[1])(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
- this.output('displacements').swrite(offsets);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * fast.js
- * FAST corner detector
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_THRESHOLD = 20;
-
- /**
- * FAST corner detector
- */
- class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} FAST threshold in [0,255] */
- this._threshold = DEFAULT_THRESHOLD;
- }
-
- /**
- * FAST threshold in [0,255]
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * FAST threshold in [0,255]
- * @param {number} threshold
- */
- set threshold(threshold) {
- this._threshold = Math.max(0, Math.min(threshold | 0, 255));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const tex = this._tex;
- const capacity = this._capacity;
- const threshold = this._threshold;
- const lodStep = Math.log2(this.scaleFactor);
- const levels = this.levels;
-
- // validate pyramid
- if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
-
- // skip if the capacity is zero
- if (capacity == 0) {
- const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
- const encoderLength = encodedKeypoints.width;
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- return;
- }
-
- // FAST
- gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
- gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
- let corners = tex[1].clear();
- let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
- for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
- corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
- //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
- }
-
- // Same-scale non-maximum suppression
- // *nicer results inside the loop; faster outside
- // Hard to notice a difference when using FAST
- corners = gpu.programs.keypoints.nonmaxSpace(corners);
-
- // Multi-scale non-maximum suppression
- // (doesn't seem to remove many keypoints)
- if (levels > 1) {
- corners = gpu.programs.keypoints.nonmaxScaleSimple.outputs(width, height, tex[1])(corners, image, lodStep);
- }
-
- // encode keypoints
- let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
- const encoderLength = encodedKeypoints.width;
-
- // scale refinement
- if (levels > 1) {
- encodedKeypoints = gpu.programs.keypoints.refineScaleFAST916.outputs(encoderLength, encoderLength, tex[4])(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
- }
-
- // done!
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * harris.js
- * Harris corner detector
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** Window size helper */
- const HARRIS = Object.freeze({
- 1: 'harris1',
- 3: 'harris3',
- 5: 'harris5',
- 7: 'harris7'
- });
-
- /**
- * Harris corner detector
- */
- class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedySize} neighborhood size */
- this._windowSize = new SpeedySize(3, 3);
-
- /** @type {number} min corner quality in [0,1] */
- this._quality = 0.1;
- }
-
- /**
- * Minimum corner quality in [0,1] - this is a fraction of
- * the largest min. eigenvalue of the autocorrelation matrix
- * over the entire image
- * @returns {number}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Minimum corner quality in [0,1]
- * @param {number} quality
- */
- set quality(quality) {
- this._quality = Math.max(0.0, Math.min(+quality, 1.0));
- }
-
- /**
- * Neighborhood size
- * @returns {SpeedySize}
- */
- get windowSize() {
- return this._windowSize;
- }
-
- /**
- * Neighborhood size
- * @param {SpeedySize} windowSize
- */
- set windowSize(windowSize) {
- const d = windowSize.width;
- if (!(d == windowSize.height && (d == 1 || d == 3 || d == 5 || d == 7))) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
- this._windowSize = windowSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const capacity = this._capacity;
- const quality = this._quality;
- const windowSize = this._windowSize.width;
- const levels = this.levels;
- const lodStep = Math.log2(this.scaleFactor);
- const intFactor = levels > 1 ? this.scaleFactor : 1;
- const harris = gpu.programs.keypoints[HARRIS[windowSize]];
- const tex = this._tex;
-
- // validate pyramid
- if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
-
- // skip if the capacity is zero
- if (capacity == 0) {
- const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
- const encoderLength = encodedKeypoints.width;
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- return;
- }
-
- // compute corner response map
- harris.outputs(width, height, tex[0], tex[1]);
- gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
- gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
- let corners = tex[1].clear();
- let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
- for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
- const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
- const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
- corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
- corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
- }
-
- // Same-scale non-maximum suppression
- // *performs better inside the loop
- //corners = gpu.programs.keypoints.nonmaxSpace(corners);
-
- // Multi-scale non-maximum suppression
- // (doesn't seem to remove many keypoints)
- if (levels > 1) {
- const laplacian = gpu.programs.keypoints.laplacian.outputs(width, height, tex[0])(corners, image, lodStep, 0);
- corners = gpu.programs.keypoints.nonmaxScale.outputs(width, height, tex[2])(corners, image, laplacian, lodStep);
- }
-
- // find the maximum corner response over the entire image
- gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
- numPasses = Math.ceil(Math.log2(Math.max(width, height)));
- let maxScore = corners;
- for (let j = 0; j < numPasses; j++) maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
-
- // discard corners below a quality level
- corners = gpu.programs.keypoints.harrisScoreCutoff.outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])(corners, maxScore, quality);
-
- // encode keypoints
- let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
- const encoderLength = encodedKeypoints.width;
-
- // scale refinement
- if (levels > 1) {
- encodedKeypoints = gpu.programs.keypoints.refineScaleLoG.outputs(encoderLength, encoderLength, tex[5])(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
- }
-
- // done!
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * descriptor.js
- * Abstract keypoint descriptor
- */
-
-
-
-
-
-
-
-
- /**
- * Abstract keypoint descriptor
- * @abstract
- */
- class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = 0, portBuilders = undefined) {
- super(name, texCount + 1, portBuilders);
- }
-
- /**
- *
- * Allocate space for keypoint descriptors
- * @param {SpeedyGPU} gpu
- * @param {number} inputDescriptorSize should be 0
- * @param {number} inputExtraSize must be non-negative
- * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
- * @param {number} outputExtraSize must be inputExtraSize
- * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints) {
- utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
- utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
- const inputEncoderLength = inputEncodedKeypoints.width;
- const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
- const outputEncoderCapacity = inputEncoderCapacity;
- const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
- const tex = this._tex[this._tex.length - 1];
- return gpu.programs.keypoints.allocateDescriptors.outputs(outputEncoderLength, outputEncoderLength, tex)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * orb.js
- * ORB descriptors
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DESCRIPTOR_SIZE = 32; // 256 bits
-
- /**
- * ORB descriptors
- */
- class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const image = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read()).image;
- const tex = this._tex;
- const outputTexture = this._tex[2];
-
- // compute orientation
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
- const encodedOrientations = gpu.programs.keypoints.orbOrientation.outputs(orientationEncoderLength, orientationEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- const orientedKeypoints = gpu.programs.keypoints.transferOrientation.outputs(encoderLength, encoderLength, tex[1])(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // allocate space
- const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
- const newEncoderLength = encodedKps.width;
-
- // compute descriptors (it's a good idea to blur the image)
- const describedKeypoints = gpu.programs.keypoints.orbDescriptor.outputs(newEncoderLength, newEncoderLength, outputTexture)(image, encodedKps, extraSize, newEncoderLength);
-
- // done!
- this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lk.js
- * LK optical-flow
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
- const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
- const DEFAULT_NUMBER_OF_ITERATIONS = 30;
- const DEFAULT_DISCARD_THRESHOLD = 0.0001;
- const DEFAULT_EPSILON = 0.01;
- const LK_PROGRAM = {
- 3: 'lk3',
- 5: 'lk5',
- 7: 'lk7',
- 9: 'lk9',
- 11: 'lk11',
- 13: 'lk13',
- 15: 'lk15',
- 17: 'lk17',
- 19: 'lk19',
- 21: 'lk21'
- };
-
- /**
- * LK optical-flow
- */
- class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SpeedySize} window size */
- this._windowSize = DEFAULT_WINDOW_SIZE;
-
- /** @type {number} number of pyramid levels to use */
- this._levels = DEFAULT_DEPTH;
-
- /** @type {number} minimum acceptable corner response */
- this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
-
- /** @type {number} number of iterations per pyramid level (termination criteria) */
- this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
-
- /** @type {number} minimum increment per iteration (termination criteria) */
- this._epsilon = DEFAULT_EPSILON;
- }
-
- /**
- * Window size (use odd numbers)
- * @returns {SpeedySize}
- */
- get windowSize() {
- return this._windowSize;
- }
-
- /**
- * Window size (use odd numbers)
- * @param {SpeedySize} windowSize must be a square window
- */
- set windowSize(windowSize) {
- if (windowSize.width != windowSize.height) {
- throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
- } else if (!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
- const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a, b) => a - b).map(k => k + 'x' + k).join(', ');
- throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
- }
- this._windowSize = windowSize;
- }
-
- /**
- * Number of pyramid levels to use
- * @returns {number}
- */
- get levels() {
- return this._levels;
- }
-
- /**
- * Number of pyramid levels to use
- * @param {number} levels
- */
- set levels(levels) {
- utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
- this._levels = levels | 0;
- }
-
- /**
- * Get the discard threshold, used to discard "bad" keypoints
- * @returns {number}
- */
- get discardThreshold() {
- return this._discardThreshold;
- }
-
- /**
- * Set the discard threshold, used to discard "bad" keypoints
- * @param {number} value typically 10^(-4) - increase to discard more
- */
- set discardThreshold(value) {
- utils/* Utils */.A.assert(value >= 0);
- this._discardThreshold = +value;
- }
-
- /**
- * Get the maximum number of iterations of the pyramidal LK algorithm
- * @returns {number}
- */
- get numberOfIterations() {
- return this._numberOfIterations;
- }
-
- /**
- * Set the maximum number of iterations of the pyramidal LK algorithm
- * @param {number} value
- */
- set numberOfIterations(value) {
- utils/* Utils */.A.assert(value >= 1);
- this._numberOfIterations = value | 0;
- }
-
- /**
- * Get the accuracy threshold, used to stop LK iterations
- * @returns {number}
- */
- get epsilon() {
- return this._epsilon;
- }
-
- /**
- * Get the accuracy threshold, used to stop LK iterations
- * @param {number} value typically 0.01
- */
- set epsilon(value) {
- utils/* Utils */.A.assert(value >= 0);
- this._epsilon = +value;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('previousKeypoints').read();
- const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('previousImage').read()).image;
- const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('nextImage').read()).image;
- const previousKeypoints = encodedKeypoints;
- const levels = this._levels;
- const windowSize = this._windowSize;
- const wsize = windowSize.width; // square window
- const numberOfIterations = this._numberOfIterations;
- const discardThreshold = this._discardThreshold;
- const epsilon = this._epsilon;
- const keypoints = gpu.programs.keypoints;
- const tex = this._tex;
-
- // do we need a pyramid?
- if (!(levels == 1 || previousImage.hasMipmaps() && nextImage.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);else if (previousImage.width !== nextImage.width || previousImage.height !== nextImage.height) throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
-
- // select the appropriate program
- const lk = keypoints[LK_PROGRAM[wsize]];
-
- // find the dimensions of the flow texture (1 pixel per flow vector)
- const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
- lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
-
- // compute optical-flow
- let flow = lk.clear();
- for (let lod = levels - 1; lod >= 0; lod--) flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
-
- // transfer optical-flow to nextKeypoints
- keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
- const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
- this.output('flow').swrite(flow);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lsh-static-tables.js
- * Static LSH tables
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Static LSH tables
- */
- class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.LSHTables)]);
-
- /** @type {SpeedyKeypoint[]} "training" keypoints */
- this._keypoints = [];
-
- /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
- this._keypointsCopy = [];
-
- /** @type {number} number of tables in the LSH data structure */
- this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
-
- /** @type {number} number of bits of a hash */
- this._hashSize = LSH_DEFAULT_HASH_SIZE;
-
- /** @type {SpeedyLSH|null} LSH data structure */
- this._lsh = null;
- }
-
- /**
- * "Training" keypoints
- * @returns {SpeedyKeypoint[]}
- */
- get keypoints() {
- return this._keypoints;
- }
-
- /**
- * "Training" keypoints
- * @param {SpeedyKeypoint[]} keypoints
- */
- set keypoints(keypoints) {
- if (!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint))) throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
- if (this._keypoints !== keypoints) {
- this._keypoints = keypoints; // update internal pointer
- this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
- this._lsh = null; // (re)train the model
- }
- }
-
- /**
- * Number of tables in the LSH data structure
- * @returns {number}
- */
- get numberOfTables() {
- return this._numberOfTables;
- }
-
- /**
- * Number of tables in the LSH data structure
- * @param {number} n
- */
- set numberOfTables(n) {
- if (!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
- if (n !== this._numberOfTables) {
- this._numberOfTables = n | 0;
- this._lsh = null; // need to retrain the model
- }
- }
-
- /**
- * Number of bits of a hash
- * @returns {number}
- */
- get hashSize() {
- return this._hashSize;
- }
-
- /**
- * Number of bits of a hash
- * @param {number} h
- */
- set hashSize(h) {
- if (!LSH_ACCEPTABLE_HASH_SIZES.includes(h)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
- if (h !== this._hashSize) {
- this._hashSize = h | 0;
- this._lsh = null; // need to retrain the model
- }
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- // Need to train the model?
- if (this._lsh == null) {
- // internal work textures are only available after initialization,
- // i.e., after calling this._init()
- this._lsh = this._train();
- }
-
- // Pass it forward
- this.output().swrite(this._lsh);
- }
-
- /**
- * Train the model
- * @returns {SpeedyLSH}
- */
- _train() {
- const keypoints = this._keypointsCopy;
- const numberOfTables = this._numberOfTables;
- const hashSize = this._hashSize;
- if (keypoints.find(keypoint => keypoint.descriptor == null)) throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
- const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
- const lshTables = this._tex[0];
- const descriptorDB = this._tex[1];
- return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lsh-knn.js
- * K approximate nearest neighbors matcher
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
-
- /** @type {number} how many neighbors to search for, by default */
- const DEFAULT_K = 1;
-
- /** @type {LSHKNNQualityLevel} default quality level */
- const DEFAULT_QUALITY = 'default';
-
- /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
- const NUMBER_OF_BIT_SWAPS = {
- 'fastest': 0,
- 'default': 1,
- 'demanding': 2
- };
-
- /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
- const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o, d) => (o[d] = fd(d), o), {}))(d => (fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o, h) => (o[h] = fh(h), o), {}))(h => (fl => [0, 1, 2].reduce((o, l) => (o[l] = fl(l), o), {}))(l => `lshKnn${d}h${h}lv${l}`)));
-
- /**
- * K approximate nearest neighbors matcher
- */
- class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
-
- /** @type {number} how many neighbors do you want? */
- this._k = DEFAULT_K;
-
- /** @type {LSHKNNQualityLevel} quality of the matching */
- this._quality = DEFAULT_QUALITY;
- }
-
- /**
- * How many neighbors do you want?
- * @returns {number}
- */
- get k() {
- return this._k;
- }
-
- /**
- * How many neighbors do you want?
- * @param {number} k number of neighbors
- */
- set k(k) {
- this._k = Math.max(1, k | 0);
- }
-
- /**
- * Quality of the matching
- * @returns {LSHKNNQualityLevel}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Quality of the matching
- * @param {LSHKNNQualityLevel} quality
- */
- set quality(quality) {
- if (!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
- this._quality = quality;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- /** @type {SpeedyLSH} */
- const lsh = this.input('lsh').read().lsh;
- const keypoints = gpu.programs.keypoints;
- const tables = lsh.tables;
- const descriptorDB = lsh.descriptorDB;
- const tablesStride = tables.width;
- const descriptorDBStride = descriptorDB.width;
- const tableCount = lsh.tableCount;
- const hashSize = lsh.hashSize;
- const bucketCapacity = lsh.bucketCapacity;
- const bucketsPerTable = lsh.bucketsPerTable;
- const sequences = lsh.sequences;
- const candidatesA = this._tex[0];
- const candidatesB = this._tex[1];
- const candidatesC = this._tex[2];
- const filters = this._tex[3];
- const transferA = this._tex[4];
- const transferB = this._tex[5];
- const level = NUMBER_OF_BIT_SWAPS[this._quality];
- const matchesPerKeypoint = this._k;
-
- // validate parameters
- if (descriptorSize !== lsh.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
-
- // configure the output texture
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
- let encodedMatches = transferB;
- keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
-
- // prepare the LSH matching
- const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
- keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
- keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
- const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
- lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
- lshKnn.setUBO('LSHSequences', sequences);
-
- // match keypoints
- encodedMatches.clear();
- keypoints.lshKnnInitFilters();
- for (let i = 0; i < matchesPerKeypoint; i++) {
- // find the (i+1)-th best match
- let candidates = keypoints.lshKnnInitCandidates();
- for (let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
- candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- gpu.gl.flush();
- }
- candidates.copyTo(filters);
-
- // transfer matches to an encoded matches texture
- encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
- }
-
- // done
- this.output().swrite(encodedMatches, matchesPerKeypoint);
-
- /*
- // debug
- let data = filters.inspect32(gpu), debug = [];
- for(let i = 0; i < data.length; i++) {
- const bits = MATCH_INDEX_BITS;
- const mask = (1 << bits) - 1;
- const u32 = data[i];
- const index = u32 & mask, distance = u32 >>> bits;
- //debug.push('|'+[ u32 ].toString());
- debug.push('|'+[ index, distance ].toString());
- }
- console.log(debug.join(','));
- */
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * bf-knn.js
- * Brute Force KNN Keypoint Matcher
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {Object<number,string>} program name indexed by descriptor size */
- const PROGRAM_NAME = {
- 32: 'bfMatcher32',
- 64: 'bfMatcher64'
- };
-
- /**
- * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
- * invoking this (use a database of 50 keypoints or so - your mileage may vary)
- */
- class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
-
- /** @type {number} number of matches per keypoint (the "k" of knn) */
- this._matchesPerKeypoint = 1;
- }
-
- /**
- * Number of matches per keypoint
- * @returns {number}
- */
- get k() {
- return this._matchesPerKeypoint;
- }
-
- /**
- * Number of matches per keypoint
- * @param {number} value
- */
- set k(value) {
- this._matchesPerKeypoint = Math.max(1, value | 0);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const database = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('database').read();
- const candidatesA = this._tex[0];
- const candidatesB = this._tex[1];
- const candidatesC = this._tex[2];
- const encodedFiltersA = this._tex[3];
- const encodedMatchesA = this._tex[4];
- const encodedMatchesB = this._tex[5];
- const matchesPerKeypoint = this._matchesPerKeypoint;
- const keypoints = gpu.programs.keypoints;
-
- // validate parameters
- if (descriptorSize !== database.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);else if (!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
-
- // prepare the brute force matching
- const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
- const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
- const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
- const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
- const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
- keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
- keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
- keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
- bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
-
- // match keypoints
- let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
- let encodedFilters = keypoints.bfMatcherInitFilters();
- for (let k = 0; k < matchesPerKeypoint; k++) {
- let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
-
- // find the (k+1)-th best match
- for (let passId = 0; passId < numberOfPasses; passId++) {
- encodedPartialMatches = bfMatcher(encodedPartialMatches, encodedFilters, partialMatcherLength, database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength, encodedKeypoints, descriptorSize, extraSize, encoderLength, passId);
- gpu.gl.flush();
- }
- //gpu.gl.flush();
-
- // copy the (k+1)-th best match to the filter
- if (matchesPerKeypoint > 1) encodedPartialMatches.copyTo(encodedFilters);
-
- // aggregate matches
- encodedMatches = keypoints.bfMatcherTransfer(encodedMatches, encodedPartialMatches, matchesPerKeypoint, k);
- }
-
- // done!
- this.output().swrite(encodedMatches, matchesPerKeypoint);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * distance-filter.js
- * Given a set of pairs of keypoints, discard all pairs whose distance is
- * above a user-defined threshold. Useful for bidirectional optical-flow.
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Given a set of pairs of keypoints, discard all pairs whose distance is
- * above a user-defined threshold. Useful for bidirectional optical-flow.
- *
- * The pairs of keypoints are provided as two separate sets, "in" and
- * "reference". Keypoints that are kept will have their data extracted
- * from the "in" set.
- */
- class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} maximum accepted distance */
- this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
- }
-
- /**
- * Maximum accepted distance
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * Maximum accepted distance
- * @param {number} value
- */
- set threshold(value) {
- this._threshold = Math.max(0, +value);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
- const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
- const threshold = this._threshold;
-
- // validate shapes
- if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
-
- // calculate the shape of the output
- const outputTexture = this._tex[0];
- const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
- const descriptorSize = set0.descriptorSize;
- const extraSize = set0.extraSize;
-
- // apply the distance filter
- gpu.programs.keypoints.distanceFilter.outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * hamming-distance-filter.js
- * Given a set of pairs of keypoints, discard all pairs whose hamming
- * distance (of descriptor) is above a user-defined threshold
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @type {Object<number,string>} Program names */
- const hamming_distance_filter_PROGRAM_NAME = {
- 32: 'hammingDistanceFilter32',
- 64: 'hammingDistanceFilter64'
- };
-
- /**
- * Given a set of pairs of keypoints, discard all pairs whose hamming
- * distance (of descriptor) is above a user-defined threshold
- *
- * The pairs of keypoints are provided as two separate sets, "in" and
- * "reference". Keypoints that are kept will have their data extracted
- * from the "in" set.
- */
- class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} distance threshold, an integer */
- this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
- }
-
- /**
- * Distance threshold, an integer
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * Distance threshold, an integer
- * @param {number} value
- */
- set threshold(value) {
- this._threshold = Math.max(0, value | 0);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
- const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
- const threshold = this._threshold;
-
- // validate shapes
- if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
-
- // validate descriptor size
- if (!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
-
- // calculate the shape of the output
- const outputTexture = this._tex[0];
- const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
- const descriptorSize = set0.descriptorSize;
- const extraSize = set0.extraSize;
-
- // apply the distance filter
- const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
- gpu.programs.keypoints[program].outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * portal.js
- * Keypoint Portals
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A sink of a Keypoint Portal
- * This is not a pipeline sink - it doesn't export any data!
- */
- class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} descriptor size, in bytes */
- this._descriptorSize = 0;
-
- /** @type {number} extra size, in bytes */
- this._extraSize = 0;
-
- /** @type {number} extra size */
- this._encoderLength = 0;
-
- /** @type {boolean} is this node initialized? */
- this._initialized = false;
- }
-
- /**
- * Encoded keypoints
- * @returns {SpeedyTexture}
- */
- get encodedKeypoints() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._tex[0];
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._descriptorSize;
- }
-
- /**
- * Extra size, in bytes
- * @returns {number}
- */
- get extraSize() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._extraSize;
- }
-
- /**
- * Encoder length
- * @returns {number}
- */
- get encoderLength() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._encoderLength;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
- this._tex[0].resize(encoderLength, encoderLength).clearToColor(1, 1, 1, 1); // initial texture
- this._descriptorSize = this._extraSize = 0;
- this._encoderLength = encoderLength;
- this._initialized = true;
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const tex = this._tex[0];
-
- // copy input
- tex.resize(encodedKeypoints.width, encodedKeypoints.height);
- encodedKeypoints.copyTo(tex);
- this._descriptorSize = descriptorSize;
- this._extraSize = extraSize;
- this._encoderLength = encoderLength;
- }
- }
-
- /**
- * A source of a Keypoint Portal
- */
- class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
- this._source = null;
- }
-
- /**
- * Data source
- * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
- */
- get source() {
- return this._source;
- }
-
- /**
- * Data source
- * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
- */
- set source(node) {
- if (node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
- this._source = node;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
- this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * keypoint-factory.js
- * Keypoint-related nodes
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint detectors
- */
- class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * FAST corner detector
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeFASTKeypointDetector}
- */
- static FAST(name = undefined) {
- return new SpeedyPipelineNodeFASTKeypointDetector(name);
- }
-
- /**
- * Harris corner detector
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
- */
- static Harris(name = undefined) {
- return new SpeedyPipelineNodeHarrisKeypointDetector(name);
- }
- }
-
- /**
- * Keypoint descriptors
- */
- class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * ORB descriptors
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
- */
- static ORB(name = undefined) {
- return new SpeedyPipelineNodeORBKeypointDescriptor(name);
- }
- }
-
- /**
- * Keypoint trackers
- */
- class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * LK optical-flow
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeLKKeypointTracker}
- */
- static LK(name = undefined) {
- return new SpeedyPipelineNodeLKKeypointTracker(name);
- }
- }
-
- /**
- * Keypoint matchers
- */
- class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Static LSH tables
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeStaticLSHTables}
- */
- static StaticLSHTables(name = undefined) {
- return new SpeedyPipelineNodeStaticLSHTables(name);
- }
-
- /**
- * LSH-based K-approximate nearest neighbors
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
- */
- static LSHKNN(name = undefined) {
- return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
- }
-
- /**
- * Brute-force K-nearest neighbors keypoint matcher
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
- */
- static BFKNN(name = undefined) {
- return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
- }
- }
-
- /**
- * Portal nodes
- */
- class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image portal source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeKeypointPortalSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeKeypointPortalSource(name);
- }
-
- /**
- * Create an image portal sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeKeypointPortalSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeKeypointPortalSink(name);
- }
- }
-
- /**
- * Keypoint-related nodes
- */
- class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Keypoint detectors
- * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
- */
- static get Detector() {
- return SpeedyPipelineKeypointDetectorFactory;
- }
-
- /**
- * Keypoint descriptors
- * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
- */
- static get Descriptor() {
- return SpeedyPipelineKeypointDescriptorFactory;
- }
-
- /**
- * Keypoint trackers
- * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
- */
- static get Tracker() {
- return SpeedyPipelineKeypointTrackerFactory;
- }
-
- /**
- * Keypoint matchers
- * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
- */
- static get Matcher() {
- return SpeedyPipelineKeypointMatcherFactory;
- }
-
- /**
- * Keypoint Portals
- * @returns {typeof SpeedyPipelineKeypointPortalFactory}
- */
- static get Portal() {
- return SpeedyPipelineKeypointPortalFactory;
- }
-
- /**
- * Create a keypoint source
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeKeypointSource(name);
- }
-
- /**
- * Create a keypoint sink
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeKeypointSink(name);
- }
-
- /**
- * Create a sink of tracked keypoints
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeTrackedKeypointSink}
- */
- static SinkOfTrackedKeypoints(name = undefined) {
- return new SpeedyPipelineNodeTrackedKeypointSink(name);
- }
-
- /**
- * Create a sink of matched keypoints
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeMatchedKeypointSink}
- */
- static SinkOfMatchedKeypoints(name = undefined) {
- return new SpeedyPipelineNodeMatchedKeypointSink(name);
- }
-
- /**
- * Keypoint clipper
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointClipper}
- */
- static Clipper(name = undefined) {
- return new SpeedyPipelineNodeKeypointClipper(name);
- }
-
- /**
- * Border Clipper
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointBorderClipper}
- */
- static BorderClipper(name = undefined) {
- return new SpeedyPipelineNodeKeypointBorderClipper(name);
- }
-
- /**
- * Create a keypoint buffer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointBuffer}
- */
- static Buffer(name = undefined) {
- return new SpeedyPipelineNodeKeypointBuffer(name);
- }
-
- /**
- * Create a keypoint mixer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointMixer}
- */
- static Mixer(name = undefined) {
- return new SpeedyPipelineNodeKeypointMixer(name);
- }
-
- /**
- * Create a keypoint shuffler
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointShuffler}
- */
- static Shuffler(name = undefined) {
- return new SpeedyPipelineNodeKeypointShuffler(name);
- }
-
- /**
- * Create a keypoint multiplexer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointMultiplexer}
- */
- static Multiplexer(name = undefined) {
- return new SpeedyPipelineNodeKeypointMultiplexer(name);
- }
-
- /**
- * Create a keypoint transformer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointTransformer}
- */
- static Transformer(name = undefined) {
- return new SpeedyPipelineNodeKeypointTransformer(name);
- }
-
- /**
- * Create a subpixel refiner of keypoint locations
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
- */
- static SubpixelRefiner(name = undefined) {
- return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
- }
-
- /**
- * Distance filter
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeDistanceFilter}
- */
- static DistanceFilter(name = undefined) {
- return new SpeedyPipelineNodeKeypointDistanceFilter(name);
- }
-
- /**
- * Hamming distance filter
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeHammingDistanceFilter}
- */
- static HammingDistanceFilter(name = undefined) {
- return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * sink.js
- * Gets keypoints out of the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
- // next power of 2
- const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /**
- * Gets 2D vectors out of the pipeline
- */
- class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'vec2') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SpeedyVector2[]} 2D vectors (output) */
- this._vectors = [];
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader();
-
- /** @type {number} page flipping index */
- this._page = 0;
-
- /** @type {boolean} accelerate GPU-CPU transfers */
- this._turbo = false;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @returns {boolean}
- */
- get turbo() {
- return this._turbo;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @param {boolean} value
- */
- set turbo(value) {
- this._turbo = Boolean(value);
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<SpeedyVector2[]>}
- */
- export() {
- return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- vectors
- } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input().read();
- const useBufferedDownloads = this._turbo;
- const encoderLength = vectors.width;
-
- /*
- I have found experimentally that, in Firefox, readPixelsAsync()
- performs MUCH better if the width of the target texture is a power
- of two. I have no idea why this is the case, nor if it's related to
- some interaction with the GL drivers, somehow. This seems to make no
- difference on Chrome, however. In any case, let's convert the input
- texture to POT.
- */
- const encoderWidth = vector2_sink_nextPot(encoderLength);
- const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
- //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
-
- // copy the set of vectors to an internal texture
- const copiedTexture = this._tex[this._page];
- gpu.programs.utils.copy2DVectors.outputs(encoderWidth, encoderHeight, copiedTexture)(vectors);
-
- // flip page
- this._page = 1 - this._page;
-
- // download the internal texture
- return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
- this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
- });
- }
-
- /**
- * Decode a sequence of vectors, given a flattened image of encoded pixels
- * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
- * @param {number} encoderWidth
- * @param {number} encoderHeight
- * @returns {SpeedyVector2[]} vectors
- */
- static _decode(pixels, encoderWidth, encoderHeight) {
- const bytesPerVector = 4; // 1 pixel per vector
- const vectors = [];
- let hi = 0,
- lo = 0;
- let x = 0,
- y = 0;
-
- // how many bytes should we read?
- const e2 = encoderWidth * encoderHeight * bytesPerVector;
- const size = Math.min(pixels.length, e2);
-
- // for each encoded vector
- for (let i = 0; i < size; i += bytesPerVector) {
- // extract 16-bit words
- lo = pixels[i + 1] << 8 | pixels[i];
- hi = pixels[i + 3] << 8 | pixels[i + 2];
-
- // the vector is "null": we have reached the end of the list
- if (lo == 0xFFFF && hi == 0xFFFF) break;
-
- // the vector must be discarded
- if (lo == 0xFF00 && hi == 0xFF00) continue;
-
- // decode floats
- x = utils/* Utils */.A.decodeFloat16(lo);
- y = utils/* Utils */.A.decodeFloat16(hi);
-
- // register vector
- vectors.push(new SpeedyVector2(x, y));
- }
-
- // done!
- return vectors;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * vector2-factory.js
- * 2D vectors
- */
-
-
-
-
- /**
- * 2D vectors
- */
- class SpeedyPipelineVector2Factory extends Function {
- /**
- * Constructor
- */
- constructor() {
- // This factory can be invoked as a function
- super('...args', 'return this._create(...args)');
- return this.bind(this);
- }
-
- /**
- * @private
- *
- * Create a 2D vector
- * @param {number} x x-coordinate
- * @param {number} y y-coordinate
- * @returns {SpeedyVector2}
- */
- _create(x, y) {
- return new SpeedyVector2(x, y);
- }
-
- /**
- * Create a Vector2 sink
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeVector2Sink}
- */
- Sink(name = undefined) {
- return new SpeedyPipelineNodeVector2Sink(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * fps-counter.js
- * A FPS counter
- */
-
-
-
- /** @const {number} update interval in milliseconds */
- const UPDATE_INTERVAL = 500;
-
- /** @type {FPSCounter|null} Singleton */
- let instance = null;
-
- /**
- * FPS counter
- */
- class FPSCounter {
- /**
- * Creates a new FPSCounter
- * @private
- */
- constructor() {
- /** @type {number} current FPS rate */
- this._fps = 60;
-
- /** @type {number} frame counter */
- this._frames = 0;
-
- /** @type {number} update interval in milliseconds */
- this._updateInterval = UPDATE_INTERVAL;
-
- /** @type {number} time of the last update */
- this._lastUpdate = performance.now();
-
- /** @type {function(): void} bound update function */
- this._boundUpdate = this._update.bind(this);
-
- // this should never happen...
- if (instance !== null) throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
-
- // start FPS counter
- this._boundUpdate();
- }
-
- /**
- * Gets an instance of the FPS counter.
- * We use lazy loading, i.e., we will not
- * create a FPS counter unless we need to!
- * @returns {FPSCounter}
- */
- static get instance() {
- if (instance === null) instance = new FPSCounter();
- return instance;
- }
-
- /**
- * Get the FPS rate
- * @returns {number} frames per second
- */
- get fps() {
- return this._fps;
- }
-
- /**
- * Updates the FPS counter
- */
- _update() {
- const now = performance.now();
- const deltaTime = now - this._lastUpdate;
- if (deltaTime >= this._updateInterval) {
- this._fps = Math.round(this._frames / (deltaTime * 0.001));
- this._frames = 0;
- this._lastUpdate = now;
- }
- this._frames++;
- requestAnimationFrame(this._boundUpdate);
- }
- }
- ;// CONCATENATED MODULE: ./src/main.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * main.js
- * The entry point of the library
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /* eslint-disable no-undef */
- /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
- /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
- /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
- /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
-
- // Constants
-
- /** @type {SpeedyMatrixFactory} */
- const matrixFactory = new SpeedyMatrixFactory();
-
- /** @type {SpeedyPipelineVector2Factory} */
- const vector2Factory = new SpeedyPipelineVector2Factory();
-
- /**
- * GPU-accelerated Computer Vision for JavaScript
- */
- class Speedy {
- /**
- * The version of the library
- * @returns {string}
- */
- static get version() {
- if (false) {}else return "0.9.1";
- }
-
- /**
- * Checks if Speedy can be executed in this machine & browser
- * @returns {boolean}
- */
- static isSupported() {
- return typeof WebAssembly !== 'undefined' && typeof WebGL2RenderingContext !== 'undefined' && speedy_gl/* SpeedyGL */.c.instance.gl != null;
- }
-
- /**
- * Global settings
- * @returns {typeof Settings}
- */
- static get Settings() {
- return settings/* Settings */.w;
- }
-
- /**
- * Create a 2D vector
- * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
- */
- static get Vector2() {
- return vector2Factory;
- }
-
- /**
- * Create a 2D point
- * @param {number} x
- * @param {number} y
- * @returns {SpeedyPoint2}
- */
- static Point2(x, y) {
- return new SpeedyPoint2(x, y);
- }
-
- /**
- * Create a new size object
- * @param {number} width
- * @param {number} height
- * @returns {SpeedySize}
- */
- static Size(width, height) {
- return new SpeedySize(width, height);
- }
-
- /**
- * Create a Matrix (entries are given in column-major format)
- * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
- */
- static get Matrix() {
- return matrixFactory;
- }
-
- /**
- * Speedy Promises
- * @returns {typeof SpeedyPromise}
- */
- static get Promise() {
- return speedy_promise/* SpeedyPromise */.i;
- }
-
- /**
- * Create a new Pipeline
- * @returns {SpeedyPipeline}
- */
- static Pipeline() {
- return new SpeedyPipeline();
- }
-
- /**
- * Image-related nodes
- * @returns {typeof SpeedyPipelineImageFactory}
- */
- static get Image() {
- return SpeedyPipelineImageFactory;
- }
-
- /**
- * Image filters
- * @returns {typeof SpeedyPipelineFilterFactory}
- */
- static get Filter() {
- return SpeedyPipelineFilterFactory;
- }
-
- /**
- * Image transforms
- * @returns {typeof SpeedyPipelineTransformFactory}
- */
- static get Transform() {
- return SpeedyPipelineTransformFactory;
- }
-
- /**
- * Keypoint-related nodes
- * @returns {typeof SpeedyPipelineKeypointFactory}
- */
- static get Keypoint() {
- return SpeedyPipelineKeypointFactory;
- }
-
- /**
- * Loads a SpeedyMedia object based on the provided source element
- * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
- * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static load(sourceElement, options = {}) {
- return SpeedyMedia.load(sourceElement, options);
- }
-
- /**
- * Loads a camera stream
- * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
- * @param {number} [height] height of the stream
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static camera(widthOrConstraints = 640, height = 360) {
- const constraints = typeof widthOrConstraints === 'object' ? widthOrConstraints : {
- audio: false,
- video: {
- width: widthOrConstraints | 0,
- height: height | 0
- }
- };
- return utils/* Utils */.A.requestCameraStream(constraints).then(video => SpeedyMedia.load(video));
- }
-
- /**
- * Utilities to query information about the graphics driver
- * @returns {typeof SpeedyPlatform}
- */
- static get Platform() {
- return SpeedyPlatform;
- }
-
- /**
- * The FPS rate
- * @returns {number} Frames per second (FPS)
- */
- static get fps() {
- return FPSCounter.instance.fps;
- }
- }
-
- // Freeze the namespace
- Object.freeze(Speedy);
-
- // Display a notice
- utils/* Utils */.A.log(`Speedy Vision version ${Speedy.version}. ` + `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` + "https://github.com/alemart/speedy-vision");
-
- // Big-endian machine? Currently untested.
- if (!globals.LITTLE_ENDIAN) utils/* Utils */.A.warning('Running on a big-endian machine');
- })();
-
- __nested_webpack_exports__ = __nested_webpack_exports__["default"];
- /******/ return __nested_webpack_exports__;
- /******/ })()
- ;
- });
-
- /***/ })
-
- /******/ });
- /************************************************************************/
- /******/ // The module cache
- /******/ var __webpack_module_cache__ = {};
- /******/
- /******/ // The require function
- /******/ function __webpack_require__(moduleId) {
- /******/ // Check if module is in cache
- /******/ var cachedModule = __webpack_module_cache__[moduleId];
- /******/ if (cachedModule !== undefined) {
- /******/ return cachedModule.exports;
- /******/ }
- /******/ // Create a new module (and put it into the cache)
- /******/ var module = __webpack_module_cache__[moduleId] = {
- /******/ // no module.id needed
- /******/ // no module.loaded needed
- /******/ exports: {}
- /******/ };
- /******/
- /******/ // Execute the module function
- /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
- /******/
- /******/ // Return the exports of the module
- /******/ return module.exports;
- /******/ }
- /******/
- /************************************************************************/
- /******/ /* webpack/runtime/compat get default export */
- /******/ (() => {
- /******/ // getDefaultExport function for compatibility with non-harmony modules
- /******/ __webpack_require__.n = (module) => {
- /******/ var getter = module && module.__esModule ?
- /******/ () => (module['default']) :
- /******/ () => (module);
- /******/ __webpack_require__.d(getter, { a: getter });
- /******/ return getter;
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/define property getters */
- /******/ (() => {
- /******/ // define getter functions for harmony exports
- /******/ __webpack_require__.d = (exports, definition) => {
- /******/ for(var key in definition) {
- /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
- /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
- /******/ }
- /******/ }
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/hasOwnProperty shorthand */
- /******/ (() => {
- /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
- /******/ })();
- /******/
- /************************************************************************/
- var __webpack_exports__ = {};
- // This entry need to be wrapped in an IIFE because it need to be in strict mode.
- (() => {
- "use strict";
-
- // EXPORTS
- __webpack_require__.d(__webpack_exports__, {
- "default": () => (/* binding */ Martins)
- });
-
- // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
- var speedy_vision = __webpack_require__(774);
- var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
- ;// CONCATENATED MODULE: ./src/utils/errors.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * errors.ts
- * Error classes
- */
- /**
- * Generic error class
- */
- class MartinsError extends Error {
- /**
- * Constructor
- * @param message error message
- * @param cause cause of the error
- */
- constructor(message = '', cause = null) {
- super(message);
- this.cause = cause;
- }
- /*{
- // incorrect when minified
- //return this.constructor.name;
- }*/
- /**
- * Convert to string
- */
- toString() {
- const extendedMessage = this.cause ? '\n-> ' + this.cause.toString() : '';
- if (this.message != '')
- return this.name + ': ' + this.message + extendedMessage;
- else
- return this.name + extendedMessage;
- }
- }
- /**
- * A method has received one or more illegal arguments
- */
- class IllegalArgumentError extends MartinsError {
- get name() {
- return 'IllegalArgumentError';
- }
- }
- /**
- * The method arguments are valid, but the method can't be called due to the
- * current state of the object
- */
- class IllegalOperationError extends MartinsError {
- get name() {
- return 'IllegalOperationError';
- }
- }
- /**
- * The requested operation is not supported
- */
- class NotSupportedError extends MartinsError {
- get name() {
- return 'NotSupportedError';
- }
- }
- /**
- * Access denied
- */
- class AccessDeniedError extends MartinsError {
- get name() {
- return 'AccessDeniedError';
- }
- }
- /**
- * Timeout
- */
- class TimeoutError extends MartinsError {
- get name() {
- return 'TimeoutError';
- }
- }
- /**
- * Assertion error
- */
- class AssertionError extends MartinsError {
- get name() {
- return 'AssertionError';
- }
- }
- /**
- * Tracking error
- */
- class TrackingError extends MartinsError {
- get name() {
- return 'TrackingError';
- }
- }
- /**
- * Detection error
- */
- class DetectionError extends MartinsError {
- get name() {
- return 'DetectionError';
- }
- }
- /**
- * Training error
- */
- class TrainingError extends MartinsError {
- get name() {
- return 'TrainingError';
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/resolution.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * resolution.ts
- * Resolution utilities
- */
-
- /** Reference heights when in landscape mode, measured in pixels */
- const REFERENCE_HEIGHT = {
- 'xs': 120,
- 'xs+': 160,
- 'sm': 200,
- 'sm+': 240,
- 'md': 320,
- 'md+': 360,
- 'lg': 480,
- 'lg+': 600,
- };
- /**
- * Convert a resolution type to a (width, height) pair
- * @param resolution resolution type
- * @param aspectRatio desired width / height ratio
- * @returns size in pixels
- */
- function computeResolution(resolution, aspectRatio) {
- const referenceHeight = REFERENCE_HEIGHT[resolution];
- let width = 0, height = 0;
- if (aspectRatio >= 1) {
- // landscape
- height = referenceHeight;
- width = Math.round(height * aspectRatio);
- width -= width % 2;
- }
- else {
- // portrait
- width = referenceHeight;
- height = Math.round(width / aspectRatio);
- height -= height % 2;
- }
- return speedy_vision_default().Size(width, height);
- }
-
- ;// CONCATENATED MODULE: ./src/utils/utils.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * utils.ts
- * Generic utilities
- */
-
-
-
- /**
- * Generic utilities
- */
- class Utils {
- /**
- * Log a message
- * @param message
- * @param args optional additional messages
- */
- static log(message, ...args) {
- console.log('[martins-js]', message, ...args);
- }
- /**
- * Display a warning
- * @param message
- * @param args optional additional messages
- */
- static warning(message, ...args) {
- console.warn('[martins-js]', message, ...args);
- }
- /**
- * Display an error message
- * @param message
- * @param args optional additional messages
- */
- static error(message, ...args) {
- console.error('[martins-js]', message, ...args);
- }
- /**
- * Assertion
- * @param expr expression
- * @param errorMessage optional error message
- * @throws {AssertionError}
- */
- static assert(expr, errorMessage = '') {
- if (!expr)
- throw new AssertionError(errorMessage);
- }
- /**
- * Returns a range [0, 1, ..., n-1]
- * @param n non-negative integer
- * @returns range from 0 to n-1, inclusive
- */
- static range(n) {
- if ((n |= 0) < 0)
- throw new IllegalArgumentError();
- return Array.from({ length: n }, (_, i) => i);
- }
- /**
- * Convert a resolution type to a resolution measured in pixels
- * @param resolution resolution type
- * @param aspectRatio width / height ratio
- * @returns resolution measured in pixels
- */
- static resolution(resolution, aspectRatio) {
- return computeResolution(resolution, aspectRatio);
- }
- /**
- * Returns a string containing platform brand information
- * @returns platform brand information
- */
- static platformString() {
- return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
- navigator.userAgentData.platform : // use only low entropy data
- navigator.platform // navigator.platform is deprecated
- )(navigator);
- }
- /**
- * Checks if we're on iOS
- * @returns true if we're on iOS
- */
- static isIOS() {
- // at the time of this writing, navigator.userAgentData is not yet
- // compatible with Safari. navigator.platform is deprecated, but
- // predictable.
- if (/(iOS|iPhone|iPad|iPod)/i.test(navigator.platform))
- return true;
- if (/Mac/i.test(navigator.platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
- return navigator.maxTouchPoints > 2;
- return false;
- }
- /**
- * Checks if we're on a WebKit-based browser
- * @returns true if we're on a WebKit-based browser
- */
- static isWebKit() {
- // note: navigator.vendor is deprecated
- if (/Apple/.test(navigator.vendor))
- return true;
- // Can a non WebKit-based browser pass this test?
- // Test masked GL_RENDERER == "Apple GPU" (valid since Feb 2020)
- // https://bugs.webkit.org/show_bug.cgi?id=207608
- /*
- if(Speedy.Platform.renderer == 'Apple GPU' && Speedy.Platform.vendor == 'Apple Inc.')
- return true;
- */
- // Desktop and Mobile Safari, Epiphany on Linux
- if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
- return true;
- // Chrome, Firefox, Edge on iOS
- if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
- return true;
- // not WebKit
- return false;
- }
- /**
- * Device-specific information for debugging purposes
- */
- static deviceInfo() {
- return 'Device info: ' + JSON.stringify({
- isIOS: Utils.isIOS(),
- isWebKit: Utils.isWebKit(),
- renderer: (speedy_vision_default()).Platform.renderer,
- vendor: (speedy_vision_default()).Platform.vendor,
- screen: [screen.width, screen.height].join('x'),
- platform: [navigator.platform, navigator.vendor].join('; '),
- userAgent: navigator.userAgent,
- userAgentData: navigator.userAgentData || null,
- }, null, 2);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * ar-events.ts
- * AR-related Events
- */
- /**
- * AR Event
- */
- class AREvent extends Event {
- /**
- * Constructor
- * @param type event type
- */
- constructor(type) {
- super(type);
- }
- /**
- * Event type
- */
- get type() {
- return super.type;
- }
- }
- /**
- * AR Event Target
- */
- class AREventTarget {
- /**
- * Constructor
- */
- constructor() {
- this._delegate = new EventTarget();
- }
- /**
- * Add event listener
- * @param type event type
- * @param callback
- */
- addEventListener(type, callback) {
- this._delegate.addEventListener(type, callback);
- }
- /**
- * Remove event listener
- * @param type event type
- * @param callback
- */
- removeEventListener(type, callback) {
- this._delegate.removeEventListener(type, callback);
- }
- /**
- * Synchronously trigger an event
- * @param event
- * @returns same value as a standard event target
- * @internal
- */
- dispatchEvent(event) {
- return this._delegate.dispatchEvent(event);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/hud.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * hud.ts
- * Heads Up Display
- */
-
-
- /**
- * Heads Up Display: an overlay displayed in front of the augmented scene
- */
- class HUD {
- /**
- * Constructor
- * @param parent parent of the hud container
- * @param hudContainer an existing hud container (optional)
- */
- constructor(parent, hudContainer) {
- this._container = hudContainer || this._createContainer(parent);
- this._ownContainer = (hudContainer == null);
- // validate
- if (this._container.parentElement !== parent)
- throw new IllegalArgumentError('The container of the HUD must be a direct child of the container of the viewport');
- // the HUD should be hidden initially
- if (!this._container.hidden)
- Utils.warning(`The container of the HUD should have the hidden attribute`);
- }
- /**
- * The container of the HUD
- */
- get container() {
- return this._container;
- }
- /**
- * Whether or not the HUD is visible
- */
- get visible() {
- return !this._container.hidden;
- }
- /**
- * Whether or not the HUD is visible
- */
- set visible(visible) {
- this._container.hidden = !visible;
- }
- /**
- * Initialize the HUD
- * @param zIndex the z-index of the container
- * @internal
- */
- _init(zIndex) {
- const container = this._container;
- container.style.position = 'absolute';
- container.style.left = container.style.top = '0px';
- container.style.right = container.style.bottom = '0px';
- container.style.padding = container.style.margin = '0px';
- container.style.zIndex = String(zIndex);
- container.style.userSelect = 'none';
- }
- /**
- * Release the HUD
- * @internal
- */
- _release() {
- if (this._ownContainer) {
- this._ownContainer = false;
- this._container.remove();
- }
- }
- /**
- * Create a HUD container as an immediate child of the input node
- * @param parent parent container
- * @returns HUD container
- */
- _createContainer(parent) {
- const node = document.createElement('div');
- node.hidden = true;
- parent.appendChild(node);
- return node;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/viewport.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * viewport.ts
- * Viewport
- */
-
-
-
-
-
- /** An event emitted by a Viewport */
- class ViewportEvent extends AREvent {
- }
- /** Viewport event target */
- class ViewportEventTarget extends AREventTarget {
- }
- /** Default viewport constructor settings */
- const DEFAULT_VIEWPORT_SETTINGS = {
- container: null,
- hudContainer: null,
- resolution: 'lg',
- style: 'best-fit',
- canvas: null,
- };
- /** Z-index of the viewport container */
- const CONTAINER_ZINDEX = 1000000000;
- /** Base z-index of the children of the viewport container */
- const BASE_ZINDEX = 0;
- /** Z-index of the background canvas */
- const BACKGROUND_ZINDEX = BASE_ZINDEX + 0;
- /** Z-index of the foreground canvas */
- const FOREGROUND_ZINDEX = BASE_ZINDEX + 1;
- /** Z-index of the HUD */
- const HUD_ZINDEX = BASE_ZINDEX + 2;
- /** Default viewport width, in pixels */
- const DEFAULT_VIEWPORT_WIDTH = 300;
- /** Default viewport height, in pixels */
- const DEFAULT_VIEWPORT_HEIGHT = 150;
- /**
- * Viewport
- */
- class BaseViewport extends ViewportEventTarget {
- /**
- * Constructor
- * @param viewportSettings
- */
- constructor(viewportSettings) {
- super();
- const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
- const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
- // validate settings
- if (settings.container == null)
- throw new IllegalArgumentError('Unspecified viewport container');
- else if (!(settings.container instanceof HTMLElement))
- throw new IllegalArgumentError('Invalid viewport container');
- // initialize attributes
- this._resolution = settings.resolution;
- this._container = settings.container;
- this._hud = new HUD(settings.container, settings.hudContainer);
- // make this more elegant?
- // need to initialize this._style and validate settings.style
- this._style = DEFAULT_VIEWPORT_SETTINGS.style;
- this.style = settings.style;
- // create the background canvas
- this.__backgroundCanvas = this._createBackgroundCanvas(this._container, size);
- // create the foreground canvas
- if (settings.canvas == null) {
- this._foregroundCanvas = this._createForegroundCanvas(this._container, size);
- this._parentOfImportedForegroundCanvas = null;
- }
- else {
- this._foregroundCanvas = settings.canvas;
- this._parentOfImportedForegroundCanvas = settings.canvas.parentNode;
- }
- }
- /**
- * Make a request to the user agent so that the viewport container is
- * displayed in fullscreen mode. The container must be a compatible element[1]
- * and the user must interact with the page in order to comply with browser
- * policies[2]. In case of error, the returned promise is rejected.
- * [1] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#compatible_elements
- * [2] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#security
- */
- requestFullscreen() {
- const container = this._container;
- // fallback for older WebKit versions
- if (container.requestFullscreen === undefined) {
- if (container.webkitRequestFullscreen === undefined)
- return speedy_vision_default().Promise.reject(new NotSupportedError());
- else if (!document.webkitFullscreenEnabled)
- return speedy_vision_default().Promise.reject(new AccessDeniedError());
- // webkitRequestFullscreen() does not return a value
- container.webkitRequestFullscreen();
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- setTimeout(() => {
- if (container === document.webkitFullscreenElement) {
- Utils.log('Entering fullscreen mode...');
- resolve();
- }
- else
- reject(new TypeError());
- }, 100);
- });
- }
- // check if the fullscreen mode is available
- if (!document.fullscreenEnabled)
- return speedy_vision_default().Promise.reject(new AccessDeniedError());
- // request fullscreen
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- container.requestFullscreen({
- navigationUI: 'hide'
- }).then(() => {
- Utils.log('Entering fullscreen mode...');
- resolve();
- }, reject);
- });
- }
- /**
- * Exit fullscreen mode
- */
- exitFullscreen() {
- // fallback for older WebKit versions
- if (document.exitFullscreen === undefined) {
- const doc = document;
- if (doc.webkitExitFullscreen === undefined)
- return speedy_vision_default().Promise.reject(new NotSupportedError());
- else if (doc.webkitFullscreenElement === null)
- return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
- // webkitExitFullscreen() does not return a value
- doc.webkitExitFullscreen();
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- setTimeout(() => {
- if (doc.webkitFullscreenElement === null) {
- Utils.log('Exiting fullscreen mode...');
- resolve();
- }
- else
- reject(new TypeError());
- }, 100);
- });
- }
- // error if not in fullscreen mode
- if (document.fullscreenElement === null)
- return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
- // exit fullscreen
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- document.exitFullscreen().then(() => {
- Utils.log('Exiting fullscreen mode...');
- resolve();
- }, reject);
- });
- }
- /** Is the fullscreen mode available? */
- isFullscreenAvailable() {
- return document.fullscreenEnabled ||
- !!(document.webkitFullscreenEnabled);
- }
- /**
- * True if the viewport is being displayed in fullscreen mode
- */
- get fullscreen() {
- if (document.fullscreenElement !== undefined)
- return document.fullscreenElement === this._container;
- else if (document.webkitFullscreenElement !== undefined)
- return document.webkitFullscreenElement === this._container;
- else
- return false;
- }
- /**
- * Viewport container
- */
- get container() {
- return this._container;
- }
- /**
- * Viewport style
- */
- get style() {
- return this._style;
- }
- /**
- * Set viewport style
- */
- set style(value) {
- if (value != 'best-fit' && value != 'stretch' && value != 'inline')
- throw new IllegalArgumentError('Invalid viewport style: ' + value);
- const changed = (value != this._style);
- this._style = value;
- if (changed) {
- const event = new ViewportEvent('resize');
- this.dispatchEvent(event);
- }
- }
- /**
- * HUD
- */
- get hud() {
- return this._hud;
- }
- /**
- * Resolution of the virtual scene
- */
- get resolution() {
- return this._resolution;
- }
- /**
- * Size in pixels of the drawing buffer of the canvas
- * on which the virtual scene will be drawn
- */
- get virtualSize() {
- const aspectRatio = this._backgroundCanvas.width / this._backgroundCanvas.height;
- return Utils.resolution(this._resolution, aspectRatio);
- }
- /**
- * The canvas on which the virtual scene will be drawn
- */
- get canvas() {
- return this._foregroundCanvas;
- }
- /**
- * The canvas on which the physical scene will be drawn
- * @internal
- */
- get _backgroundCanvas() {
- return this.__backgroundCanvas;
- }
- /**
- * Size of the drawing buffer of the background canvas, in pixels
- * @internal
- */
- get _realSize() {
- throw new IllegalOperationError();
- }
- /**
- * Initialize the viewport (when the session starts)
- * @internal
- */
- _init() {
- // import foreground canvas
- if (this._parentOfImportedForegroundCanvas != null) {
- const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
- this._importForegroundCanvas(this._foregroundCanvas, this._container, size);
- }
- // setup CSS
- this._container.style.touchAction = 'none';
- this._container.style.backgroundColor = 'black';
- this._container.style.zIndex = String(CONTAINER_ZINDEX);
- // initialize the HUD
- this._hud._init(HUD_ZINDEX);
- this._hud.visible = true;
- }
- /**
- * Release the viewport (when the session starts)
- * @internal
- */
- _release() {
- // release the HUD
- this._hud._release();
- // reset the CSS
- this._container.style.touchAction = 'auto';
- // restore imported canvas
- if (this._parentOfImportedForegroundCanvas != null)
- this._restoreImportedForegroundCanvas();
- }
- /**
- * Create a canvas and attach it to another HTML element
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createCanvas(parent, size) {
- const canvas = document.createElement('canvas');
- canvas.width = size.width;
- canvas.height = size.height;
- parent.appendChild(canvas);
- return canvas;
- }
- /**
- * Create the background canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createBackgroundCanvas(parent, size) {
- const canvas = this._createCanvas(parent, size);
- return this._styleCanvas(canvas, BACKGROUND_ZINDEX);
- }
- /**
- * Create the foreground canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createForegroundCanvas(parent, size) {
- const canvas = this._createCanvas(parent, size);
- return this._styleCanvas(canvas, FOREGROUND_ZINDEX);
- }
- /**
- * Import an existing foreground canvas to the viewport
- * @param canvas existing canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns the input canvas
- */
- _importForegroundCanvas(canvas, parent, size) {
- if (!(canvas instanceof HTMLCanvasElement))
- throw new IllegalArgumentError('Not a canvas: ' + canvas);
- // borrow the canvas; add it as a child of the viewport container
- canvas.remove();
- parent.appendChild(canvas);
- canvas.width = size.width;
- canvas.height = size.height;
- canvas.dataset.cssText = canvas.style.cssText; // save CSS
- canvas.style.cssText = ''; // clear CSS
- this._styleCanvas(canvas, FOREGROUND_ZINDEX);
- return canvas;
- }
- /**
- * Restore a previously imported foreground canvas to its original parent
- */
- _restoreImportedForegroundCanvas() {
- // not an imported canvas; nothing to do
- if (this._parentOfImportedForegroundCanvas == null)
- throw new IllegalOperationError();
- const canvas = this._foregroundCanvas;
- canvas.style.cssText = canvas.dataset.cssText || ''; // restore CSS
- canvas.remove();
- this._parentOfImportedForegroundCanvas.appendChild(canvas);
- }
- /**
- * Add suitable CSS rules to a canvas
- * @param canvas
- * @param canvasType
- * @returns canvas
- */
- _styleCanvas(canvas, zIndex) {
- canvas.style.position = 'absolute';
- canvas.style.left = '0px';
- canvas.style.top = '0px';
- canvas.style.width = '100%';
- canvas.style.height = '100%';
- canvas.style.zIndex = String(zIndex);
- return canvas;
- }
- }
- /**
- * Viewport decorator
- */
- class ViewportDecorator extends ViewportEventTarget {
- /**
- * Constructor
- * @param base to be decorated
- * @param getSize size getter
- */
- constructor(base, getSize) {
- super();
- this._base = base;
- this._getSize = getSize;
- }
- /**
- * Viewport container
- */
- get container() {
- return this._base.container;
- }
- /**
- * Viewport style
- */
- get style() {
- return this._base.style;
- }
- /**
- * Set viewport style
- */
- set style(value) {
- this._base.style = value;
- }
- /**
- * HUD
- */
- get hud() {
- return this._base.hud;
- }
- /**
- * Fullscreen mode
- */
- get fullscreen() {
- return this._base.fullscreen;
- }
- /**
- * Resolution of the virtual scene
- */
- get resolution() {
- return this._base.resolution;
- }
- /**
- * Size in pixels of the drawing buffer of the canvas
- * on which the virtual scene will be drawn
- */
- get virtualSize() {
- return this._base.virtualSize;
- }
- /**
- * The canvas on which the virtual scene will be drawn
- */
- get canvas() {
- return this._base.canvas;
- }
- /**
- * Request fullscreen mode
- */
- requestFullscreen() {
- return this._base.requestFullscreen();
- }
- /**
- * Exit fullscreen mode
- */
- exitFullscreen() {
- return this._base.exitFullscreen();
- }
- /**
- * Is the fullscreen mode available?
- */
- isFullscreenAvailable() {
- return this._base.isFullscreenAvailable();
- }
- /**
- * Background canvas
- * @internal
- */
- get _backgroundCanvas() {
- return this._base._backgroundCanvas;
- }
- /**
- * Size of the drawing buffer of the background canvas, in pixels
- * @internal
- */
- get _realSize() {
- return this._getSize();
- }
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- this._base._init();
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this._base._release();
- }
- /**
- * Add event listener
- * @param type event type
- * @param callback
- */
- addEventListener(type, callback) {
- this._base.addEventListener(type, callback);
- }
- /**
- * Remove event listener
- * @param type event type
- * @param callback
- */
- removeEventListener(type, callback) {
- this._base.removeEventListener(type, callback);
- }
- /**
- * Synchronously trigger an event
- * @param event
- * @returns same value as a standard event target
- * @internal
- */
- dispatchEvent(event) {
- return this._base.dispatchEvent(event);
- }
- }
- /**
- * A viewport that watches for page resizes
- */
- class ResizableViewport extends ViewportDecorator {
- /**
- * Constructor
- * @param base to be decorated
- * @param getSize size getter
- */
- constructor(base, getSize) {
- super(base, getSize);
- this._active = false;
- this.addEventListener('resize', this._onResize.bind(this));
- }
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- super._init();
- this._active = true;
- // Configure the resize listener. We want the viewport
- // to adjust itself if the phone/screen is resized or
- // changes orientation
- let timeout = null;
- const onWindowResize = () => {
- if (!this._active) {
- window.removeEventListener('resize', onWindowResize);
- return;
- }
- if (timeout !== null)
- clearTimeout(timeout);
- timeout = setTimeout(() => {
- timeout = null;
- this._resize();
- }, 50);
- };
- window.addEventListener('resize', onWindowResize);
- // handle changes of orientation
- // (is this needed? we already listen to resize events)
- if (screen.orientation !== undefined)
- screen.orientation.addEventListener('change', this._resize.bind(this));
- else
- window.addEventListener('orientationchange', this._resize.bind(this)); // deprecated
- // trigger a resize to setup the sizes / the CSS
- this._resize();
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- if (screen.orientation !== undefined)
- screen.orientation.removeEventListener('change', this._resize);
- else
- window.removeEventListener('orientationchange', this._resize); // deprecated
- this._active = false;
- super._release();
- }
- /**
- * Trigger a resize event
- */
- _resize() {
- const event = new ViewportEvent('resize');
- this.dispatchEvent(event);
- }
- /**
- * Function to be called when the viewport is resized
- */
- _onResize() {
- // Resize the drawing buffer of the foreground canvas, so that it
- // matches the desired resolution, as well as the aspect ratio of the
- // background canvas
- const foregroundCanvas = this.canvas;
- const virtualSize = this.virtualSize;
- foregroundCanvas.width = virtualSize.width;
- foregroundCanvas.height = virtualSize.height;
- // Resize the drawing buffer of the background canvas
- const backgroundCanvas = this._backgroundCanvas;
- const realSize = this._realSize;
- backgroundCanvas.width = realSize.width;
- backgroundCanvas.height = realSize.height;
- }
- }
- /**
- * Immersive viewport: it occupies the entire page
- */
- class ImmersiveViewport extends ResizableViewport {
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this.canvas.remove();
- this._backgroundCanvas.remove();
- this.hud.visible = false;
- this.container.style.cssText = ''; // reset CSS
- super._release();
- }
- /**
- * Resize the immersive viewport, so that it occupies the entire page.
- * We respect the aspect ratio of the source media
- */
- _onResize() {
- super._onResize();
- const container = this.container;
- container.style.position = 'fixed';
- if (this.style == 'best-fit') {
- // cover the page while maintaining the aspect ratio
- let viewportWidth = 0, viewportHeight = 0;
- const windowAspectRatio = window.innerWidth / window.innerHeight;
- const viewportAspectRatio = this._realSize.width / this._realSize.height;
- if (viewportAspectRatio <= windowAspectRatio) {
- viewportHeight = window.innerHeight;
- viewportWidth = (viewportHeight * viewportAspectRatio) | 0;
- }
- else {
- viewportWidth = window.innerWidth;
- viewportHeight = (viewportWidth / viewportAspectRatio) | 0;
- }
- container.style.left = `calc(50% - ${(viewportWidth + 1) >>> 1}px)`;
- container.style.top = `calc(50% - ${(viewportHeight + 1) >>> 1}px)`;
- container.style.width = viewportWidth + 'px';
- container.style.height = viewportHeight + 'px';
- }
- else if (this.style == 'stretch') {
- // stretch to cover the entire page
- container.style.left = '0px';
- container.style.top = '0px';
- container.style.width = window.innerWidth + 'px';
- container.style.height = window.innerHeight + 'px';
- }
- else
- throw new IllegalOperationError('Invalid immersive viewport style: ' + this.style);
- }
- }
- /**
- * Inline viewport: it follows the typical flow of a web page
- */
- class InlineViewport extends ResizableViewport {
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- super._init();
- this.style = 'inline';
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this.container.style.cssText = ''; // reset CSS
- super._release();
- }
- /**
- * Resize the inline viewport
- * (we still take orientation changes into account)
- */
- _onResize() {
- super._onResize();
- const container = this.container;
- container.style.position = 'relative';
- if (this.style == 'inline') {
- container.style.left = '0px';
- container.style.top = '0px';
- container.style.width = this.virtualSize.width + 'px';
- container.style.height = this.virtualSize.height + 'px';
- }
- else
- throw new IllegalOperationError('Invalid inline viewport style: ' + this.style);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/stats.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * stats.ts
- * Stats for performance measurements
- */
- /** update interval, given in seconds */
- const UPDATE_INTERVAL = 0.5;
- /**
- * Stats for performance measurements
- */
- class Stats {
- /**
- * Constructor
- */
- constructor() {
- this._timeOfLastUpdate = this._now();
- this._partialCycleCount = 0;
- this._cyclesPerSecond = 0;
- }
- /**
- * Update stats - call every frame
- */
- update() {
- const now = this._now();
- ++this._partialCycleCount;
- if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
- this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
- this._partialCycleCount = 0;
- this._timeOfLastUpdate = now;
- }
- }
- /**
- * Reset stats
- */
- reset() {
- this._timeOfLastUpdate = this._now();
- this._partialCycleCount = 0;
- this._cyclesPerSecond = 0;
- }
- /**
- * Number of cycles per second
- */
- get cyclesPerSecond() {
- return this._cyclesPerSecond;
- }
- /**
- * A measurement of time, in milliseconds
- * @returns time in ms
- */
- _now() {
- return performance.now();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * stats-panel.ts
- * Stats panel used for development purposes
- */
-
-
-
- /** Update interval, in ms */
- const stats_panel_UPDATE_INTERVAL = 500;
- /** Icons for different power profiles */
- const POWER_ICON = Object.freeze({
- 'default': '',
- 'low-power': '🔋',
- 'high-performance': '⚡'
- });
- /**
- * Stats panel used for development purposes
- */
- class StatsPanel {
- /**
- * Constructor
- * @param parent parent element of the panel
- */
- constructor(viewport) {
- this._viewport = viewport;
- this._lastUpdate = 0;
- this._container = this._createContainer();
- viewport.hud.container.appendChild(this._container);
- }
- /**
- * Release the panel
- */
- release() {
- this._container.remove();
- }
- /**
- * A method to be called in the update loop
- * @param time current time in ms
- * @param trackers the trackers attached to the session
- * @param sources the sources of media linked to the session
- * @param gpu GPU cycles per second
- * @param fps frames per second
- */
- update(time, trackers, sources, gpu, fps) {
- if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
- this._lastUpdate = time;
- this._update(trackers, sources, fps, gpu);
- }
- }
- /**
- * Visibility of the panel
- */
- get visible() {
- return !this._container.hidden;
- }
- /**
- * Visibility of the panel
- */
- set visible(visible) {
- this._container.hidden = !visible;
- }
- /**
- * Update the contents of the panel
- * @param trackers the trackers attached to the session
- * @param sources the sources of media linked to the session
- * @param fps frames per second
- * @param gpu GPU cycles per second
- */
- _update(trackers, sources, fps, gpu) {
- // all sanitized
- const lfps = this._label('_ar_fps');
- if (lfps !== null) {
- lfps.style.color = this._color(fps);
- lfps.innerText = String(fps);
- }
- const lgpu = this._label('_ar_gpu');
- if (lgpu !== null) {
- lgpu.style.color = this._color(gpu);
- lgpu.innerText = String(gpu);
- }
- const lpower = this._label('_ar_power');
- if (lpower !== null)
- lpower.innerHTML = POWER_ICON[Settings.powerPreference];
- const lin = this._label('_ar_in');
- if (lin !== null) {
- const sourceStats = sources.map(source => source._stats).join(', ');
- lin.innerText = sourceStats;
- }
- const lout = this._label('_ar_out');
- if (lout !== null) {
- const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
- lout.innerText = trackerStats;
- }
- }
- /**
- * Get a label of the panel
- * @param className
- * @returns the HTML element, or null if it doesn't exist
- */
- _label(className) {
- return this._container.getElementsByClassName(className).item(0);
- }
- /**
- * Associate a color to a frequency number
- * @param f frequency given in cycles per second
- * @returns colorized number (HTML)
- */
- _color(f) {
- const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
- const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
- const color2 = f >= 30 ? GREEN : RED;
- const color = Settings.powerPreference != 'low-power' ? color3 : color2;
- return color;
- }
- /**
- * Create the container for the panel
- * @returns a container
- */
- _createContainer() {
- const container = document.createElement('div');
- const print = (html) => container.insertAdjacentHTML('beforeend', html);
- container.style.position = 'absolute';
- container.style.left = container.style.top = '0px';
- container.style.zIndex = '1000000';
- container.style.padding = '4px';
- container.style.whiteSpace = 'pre-line';
- container.style.backgroundColor = 'rgba(0,0,0,0.5)';
- container.style.color = 'white';
- container.style.fontFamily = 'monospace';
- container.style.fontSize = '14px';
- // all sanitized
- container.innerText = 'MARTINS.js ' + Martins.version;
- print('<br>');
- print('FPS: <span class="_ar_fps"></span> | ');
- print('GPU: <span class="_ar_gpu"></span> ');
- print('<span class="_ar_power"></span>');
- print('<br>');
- print('IN: <span class="_ar_in"></span>');
- print('<br>');
- print('OUT: <span class="_ar_out"></span>');
- if (this._viewport.isFullscreenAvailable()) {
- print('<br>');
- container.appendChild(this._createFullscreenToggle());
- }
- return container;
- }
- /**
- * Create a fullscreen toggle
- * @returns a fullscreen toggle
- */
- _createFullscreenToggle() {
- const toggle = document.createElement('a');
- Utils.assert(this._viewport != null);
- toggle.href = 'javascript:void(0)';
- toggle.innerText = 'Toggle fullscreen';
- toggle.style.color = 'white';
- toggle.setAttribute('role', 'button');
- toggle.addEventListener('click', () => {
- if (!this._viewport.fullscreen) {
- this._viewport.requestFullscreen().catch(err => {
- alert(`Can't enable fullscreen mode. ` + err.toString());
- });
- }
- else {
- this._viewport.exitFullscreen();
- }
- });
- return toggle;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/frame.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * frame.ts
- * A Frame holds information used to render a single animation frame of a Session
- */
- /**
- * Iterable frame results (helper class)
- */
- class IterableTrackerResults {
- constructor(_results) {
- this._results = _results;
- this._index = 0;
- }
- next() {
- const i = this._index++;
- return i < this._results.length ?
- { done: false, value: this._results[i] } :
- { done: true, value: undefined };
- }
- [Symbol.iterator]() {
- return this;
- }
- }
- /**
- * A Frame holds information used to render a single animation frame of a Session
- */
- class Frame {
- /**
- * Constructor
- * @param session
- * @param results
- */
- constructor(session, results) {
- this._session = session;
- this._results = new IterableTrackerResults(results);
- }
- /**
- * The session of which this frame holds data
- */
- get session() {
- return this._session;
- }
- /**
- * The results of all trackers in this frame
- */
- get results() {
- return this._results;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/time.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * time.ts
- * Time utilities
- */
- /**
- * Time Manager
- */
- class Time {
- constructor() {
- /** time scale */
- this._scale = 1;
- /** time since the start of the session, in milliseconds */
- this._time = 0;
- /** unscaled time since the start of the session, in milliseconds */
- this._unscaledTime = 0;
- /** elapsed time between the current and the previous frame, in milliseconds */
- this._delta = 0;
- /** time of the first update call, in milliseconds */
- this._firstUpdate = 0;
- /** time of the last update call, in milliseconds */
- this._lastUpdate = Number.POSITIVE_INFINITY;
- }
- /**
- * Update the Time Manager
- * @param timestamp in milliseconds
- * @internal
- */
- _update(timestamp) {
- if (timestamp < this._lastUpdate) {
- this._firstUpdate = this._lastUpdate = timestamp;
- return;
- }
- this._delta = (timestamp - this._lastUpdate) * this._scale;
- this._time += this._delta;
- this._unscaledTime = timestamp - this._firstUpdate;
- this._lastUpdate = timestamp;
- }
- /**
- * Elapsed time since the start of the session, measured at the
- * beginning of the current animation frame and given in seconds
- */
- get elapsed() {
- return this._time * 0.001;
- }
- /**
- * Elapsed time between the current and the previous animation
- * frame, given in seconds
- */
- get delta() {
- return this._delta * 0.001;
- }
- /**
- * Time scale (defaults to 1)
- */
- get scale() {
- return this._scale;
- }
- /**
- * Time scale (defaults to 1)
- */
- set scale(scale) {
- this._scale = Math.max(0, +scale);
- }
- /**
- * Time scale independent elapsed time since the start of the session,
- * measured at the beginning of the current animation frame and given
- * in seconds
- */
- get unscaled() {
- return this._unscaledTime * 0.001;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/gizmos.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * gizmos.ts
- * Visual cues for testing & debugging
- */
-
- /** The maximum match distance ratio we'll consider to be "good" */
- const GOOD_MATCH_THRESHOLD = 0.7;
- /**
- * Visual cues for testing & debugging
- */
- class Gizmos {
- /**
- * Constructor
- */
- constructor() {
- this._visible = false;
- }
- /**
- * Whether or not the gizmos will be rendered
- */
- get visible() {
- return this._visible;
- }
- /**
- * Whether or not the gizmos will be rendered
- */
- set visible(visible) {
- this._visible = visible;
- }
- /**
- * Render gizmos
- * @param viewport
- * @param trackers
- * @internal
- */
- _render(viewport, trackers) {
- // no need to render?
- if (!this._visible)
- return;
- // viewport
- const viewportSize = viewport._realSize;
- const canvas = viewport._backgroundCanvas;
- const ctx = canvas.getContext('2d', { alpha: false });
- if (!ctx)
- throw new IllegalOperationError();
- // debug
- //ctx.fillStyle = '#000';
- //ctx.fillRect(0, 0, canvas.width, canvas.height);
- //ctx.clearRect(0, 0, canvas.width, canvas.height);
- // render keypoints
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const keypoints = output.keypoints;
- const screenSize = output.screenSize;
- if (keypoints !== undefined && screenSize !== undefined)
- this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
- }
- // render polylines
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const polyline = output.polyline;
- const screenSize = output.screenSize;
- if (polyline !== undefined && screenSize !== undefined)
- this._renderPolyline(ctx, polyline, screenSize, viewportSize);
- }
- // render the axes of the 3D coordinate system
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const cameraMatrix = output.cameraMatrix;
- const screenSize = output.screenSize;
- if (cameraMatrix !== undefined && screenSize !== undefined)
- this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
- }
- }
- /**
- * Split keypoints in matched/unmatched categories and
- * render them for testing & development purposes
- * @param ctx canvas 2D context
- * @param keypoints keypoints to render
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param size base keypoint rendering size
- */
- _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
- if (keypoints.length == 0)
- return;
- if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
- this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
- return;
- }
- const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
- (keypoint.matches.length > 1 &&
- keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
- keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
- const matchedKeypoints = keypoints;
- const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
- const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
- this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
- this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
- }
- /**
- * Render keypoints for testing & development purposes
- * @param ctx canvas 2D context
- * @param keypoints keypoints to render
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param color color of the rendered keypoints
- * @param size base keypoint rendering size
- */
- _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- ctx.beginPath();
- for (let i = keypoints.length - 1; i >= 0; i--) {
- const keypoint = keypoints[i];
- const x = (keypoint.x * sx + 0.5) | 0;
- const y = (keypoint.y * sy + 0.5) | 0;
- const r = (size * keypoint.scale + 0.5) | 0;
- ctx.rect(x - r, y - r, 2 * r, 2 * r);
- }
- ctx.strokeStyle = color;
- ctx.lineWidth = 1;
- ctx.stroke();
- }
- /**
- * Render polyline for testing & development purposes
- * @param ctx canvas 2D context
- * @param polyline vertices
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param color color of the rendered polyline
- * @param lineWidth
- */
- _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
- if (polyline.length == 0)
- return;
- const n = polyline.length;
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- // render polyline
- ctx.beginPath();
- ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
- for (let j = 0; j < n; j++)
- ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
- ctx.strokeStyle = color;
- ctx.lineWidth = lineWidth;
- ctx.stroke();
- }
- /**
- * Render the axes of a 3D coordinate system
- * @param ctx canvas 2D context
- * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param lineWidth
- */
- _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
- const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
- const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
- const length = 1; // length of each axis-corresponding line, given in normalized space units
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- /*
-
- Multiply the 3x4 camera matrix P by:
-
- [ 0 L 0 0 ]
- [ 0 0 L 0 ] , where L = length in normalized space of the lines
- [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
- [ 1 1 1 1 ]
-
- Each column of the resulting matrix will give us the pixel coordinates
- we're looking for.
-
- Note: we're working with homogeneous coordinates
-
- */
- const p = cameraMatrix.read();
- const l = length;
- const o = [p[9], p[10], p[11]]; // origin of the coordinate system
- const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
- const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
- const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
- const axis = [x, y, z];
- // draw each axis
- const ox = o[0] / o[2], oy = o[1] / o[2];
- for (let i = 0; i < 3; i++) {
- const q = axis[i];
- const x = q[0] / q[2], y = q[1] / q[2];
- ctx.beginPath();
- ctx.moveTo(ox * sx, oy * sy);
- ctx.lineTo(x * sx, y * sy);
- ctx.strokeStyle = color[i];
- ctx.lineWidth = lineWidth;
- ctx.stroke();
- }
- //console.log("Origin",ox,oy);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/utils/asap.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * asap.ts
- * Schedule a function to run "as soon as possible"
- */
- /** callbacks */
- const callbacks = [];
- /** arguments to be passed to the callbacks */
- const args = [];
- /** asap key */
- const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
- // Register an event listener
- window.addEventListener('message', event => {
- if (event.source !== window || event.data !== ASAP_KEY)
- return;
- event.stopPropagation();
- if (callbacks.length == 0)
- return;
- const fn = callbacks.pop();
- const argArray = args.pop();
- fn.apply(undefined, argArray);
- }, true);
- /**
- * Schedule a function to run "as soon as possible"
- * @param fn callback
- * @param params optional parameters
- */
- function asap(fn, ...params) {
- callbacks.unshift(fn);
- args.unshift(params);
- window.postMessage(ASAP_KEY, '*');
- }
-
- ;// CONCATENATED MODULE: ./src/core/session.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * session.ts
- * WebAR Session
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** An event emitted by a Session */
- class SessionEvent extends AREvent {
- }
- /** Default options when starting a session */
- const DEFAULT_OPTIONS = {
- mode: 'immersive',
- trackers: [],
- sources: [],
- viewport: null,
- stats: false,
- gizmos: false,
- };
- /**
- * A Session represents an intent to display AR content
- * and encapsulates the main loop (update-render cycle)
- */
- class Session extends AREventTarget {
- /**
- * Constructor
- * @param sources previously initialized sources of data
- * @param mode session mode
- * @param viewport viewport
- * @param stats render stats panel?
- * @param gizmos render gizmos?
- */
- constructor(sources, mode, viewport, stats, gizmos) {
- super();
- this._mode = mode;
- this._trackers = [];
- this._sources = sources;
- this._updateStats = new Stats();
- this._renderStats = new Stats();
- this._active = true;
- this._frameReady = true; // no trackers at the moment
- this._rafQueue = [];
- this._time = new Time();
- this._gizmos = new Gizmos();
- this._gizmos.visible = gizmos;
- // get media
- const media = this.media;
- // setup the viewport
- if (mode == 'immersive')
- this._viewport = new ImmersiveViewport(viewport, () => media.size);
- else if (mode == 'inline')
- this._viewport = new InlineViewport(viewport, () => media.size);
- else
- throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
- this._viewport._init();
- // setup the main loop
- this._setupUpdateLoop();
- this._setupRenderLoop();
- // setup the stats panel
- this._statsPanel = new StatsPanel(this._viewport);
- this._statsPanel.visible = stats;
- // done!
- Session._count++;
- Utils.log(`The ${mode} session is now active!`);
- }
- /**
- * Checks if the engine can be run in the browser the client is using
- * @returns true if the engine is compatible with the browser
- */
- static isSupported() {
- //alert(Utils.deviceInfo()); // debug
- // If Safari / iOS, require version 15.2 or later
- if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
- /*
-
- iOS compatibility
- -----------------
-
- The engine is known to work on iPhone 8 or later, with iOS 15.2 or
- later. Tested on many devices, including iPads, on the cloud.
-
- The engine crashes on an iPhone 13 Pro Max with iOS 15.1 and on an
- iPhone 12 Pro with iOS 15.0.2. A (valid) shader from speedy-vision
- version 0.9.1 (bf-knn) fails to compile: "WebGL error. Program has
- not been successfully linked".
-
- The engine freezes on an older iPhone 6S (2015) with iOS 15.8.2.
- The exact cause is unknown, but it happens when training an image
- tracker, at ImageTrackerTrainingState._gpuUpdate() (a WebGL error?
- a hardware limitation?)
-
- Successfully tested down to iPhone 8 so far.
- Successfully tested down to iOS 15.2.
-
- >> WebGL2 support was introduced in Safari 15 <<
-
- Note: the webp image format used in the demos is supported on
- Safari for iOS 14+. Desktop Safari 14-15.6 supports webp, but
- requires macOS 11 Big Sur or later. https://caniuse.com/webp
-
- */
- const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
- const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
- const matches = safari || ios; // match safari first (min version)
- if (matches !== null) {
- const version = matches[3] || '0.0';
- const [x, y] = version.split(/[\._]/).map(v => parseInt(v) | 0);
- if ((x < 15) || (x == 15 && y < 2)) {
- Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
- return false;
- }
- // XXX reject older iPhone models? Which ones?
- /*if(navigator.userAgent.includes('iPhone')) {
- // detect screen size?
- }*/
- }
- else
- Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
- }
- // Android: reject very old / weak devices?
- // XXX establish criteria?
- /*if(Utils.isAndroid()) {
- }*/
- // Check if WebGL2 and WebAssembly are supported
- return speedy_vision_default().isSupported();
- }
- /**
- * Instantiate a session
- * @param options options
- * @returns a promise that resolves to a new session
- */
- static instantiate(options = DEFAULT_OPTIONS) {
- const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
- Utils.log(`Starting a new ${mode} session...`);
- return speedy_vision_default().Promise.resolve().then(() => {
- // is the engine supported?
- if (!Session.isSupported())
- throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with the MARTINS.js engine');
- // block multiple immersive sessions
- if (mode !== 'inline' && Session.count > 0)
- throw new IllegalOperationError(`Can't start more than one immersive session`);
- // initialize matrix routines
- return speedy_vision_default().Matrix.ready();
- }).then(() => {
- // validate sources of data
- const videoSources = sources.filter(source => source._type == 'video');
- if (videoSources.length != 1)
- throw new IllegalArgumentError(`One video source of data must be provided`);
- for (let i = sources.length - 1; i >= 0; i--) {
- if (sources.indexOf(sources[i]) < i)
- throw new IllegalArgumentError(`Found repeated sources of data`);
- }
- // initialize sources of data
- return speedy_vision_default().Promise.all(sources.map(source => source._init()));
- }).then(() => {
- // get the viewport
- if (!viewport)
- throw new IllegalArgumentError(`Can't create a session without a viewport`);
- // instantiate session
- return new Session(sources, mode, viewport, stats, gizmos);
- }).then(session => {
- // validate trackers
- if (trackers.length == 0)
- Utils.warning(`No trackers have been attached to the session!`);
- for (let i = trackers.length - 1; i >= 0; i--) {
- if (trackers.indexOf(trackers[i]) < i)
- throw new IllegalArgumentError(`Found repeated trackers`);
- }
- // attach trackers and return the session
- return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
- }).catch(err => {
- // log errors, if any
- Utils.error(`Can't start session: ${err.message}`);
- throw err;
- });
- }
- /**
- * Number of active sessions
- */
- static get count() {
- return this._count;
- }
- /**
- * End the session
- * @returns promise that resolves after the session is shut down
- */
- end() {
- // is the session inactive?
- if (!this._active)
- return speedy_vision_default().Promise.resolve();
- // deactivate the session
- Utils.log('Shutting down the session...');
- this._active = false; // set before wait()
- // wait a few ms, so that the GPU is no longer sending any data
- const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
- setTimeout(resolve, ms);
- });
- // release resources
- return wait(100).then(() => speedy_vision_default().Promise.all(
- // release trackers
- this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
- // release input sources
- this._sources.map(source => source._release()))).then(() => {
- this._sources.length = 0;
- this._trackers.length = 0;
- // release internal components
- this._updateStats.reset();
- this._renderStats.reset();
- this._statsPanel.release();
- this._viewport._release();
- // end the session
- Session._count--;
- // dispatch event
- const event = new SessionEvent('end');
- this.dispatchEvent(event);
- // done!
- Utils.log('Session ended.');
- });
- }
- /**
- * Analogous to window.requestAnimationFrame()
- * @param callback
- * @returns a handle
- */
- requestAnimationFrame(callback) {
- const handle = Symbol('raf-handle');
- if (this._active)
- this._rafQueue.push([handle, callback]);
- else
- throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
- return handle;
- }
- /**
- * Analogous to window.cancelAnimationFrame()
- * @param handle a handle returned by this.requestAnimationFrame()
- */
- cancelAnimationFrame(handle) {
- for (let i = this._rafQueue.length - 1; i >= 0; i--) {
- if (this._rafQueue[i][0] === handle) {
- this._rafQueue.splice(i, 1);
- break;
- }
- }
- }
- /**
- * The underlying media (generally a camera stream)
- * @internal
- */
- get media() {
- for (let i = this._sources.length - 1; i >= 0; i--) {
- if (this._sources[i]._type == 'video')
- return this._sources[i]._data;
- }
- // this shouldn't happen
- throw new IllegalOperationError(`Invalid input source`);
- }
- /**
- * Session mode
- */
- get mode() {
- return this._mode;
- }
- /**
- * Rendering viewport
- */
- get viewport() {
- return this._viewport;
- }
- /**
- * Time utilities
- */
- get time() {
- return this._time;
- }
- /**
- * Visual cues for testing & debugging
- */
- get gizmos() {
- return this._gizmos;
- }
- /**
- * Attach a tracker to the session
- * @param tracker
- */
- _attachTracker(tracker) {
- if (this._trackers.indexOf(tracker) >= 0)
- throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
- else if (!this._active)
- throw new IllegalOperationError(`Inactive session`);
- this._trackers.push(tracker);
- return tracker._init(this);
- }
- /**
- * Render the user media to the background canvas
- */
- _renderUserMedia() {
- const canvas = this._viewport._backgroundCanvas;
- const ctx = canvas.getContext('2d', { alpha: false });
- if (ctx && this.media.type != 'data') {
- ctx.imageSmoothingEnabled = false;
- // draw user media
- const image = this.media.source;
- ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
- // render output image(s)
- for (let i = 0; i < this._trackers.length; i++) {
- const media = this._trackers[i]._output.image;
- if (media !== undefined) {
- const image = media.source;
- ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
- //ctx.drawImage(image, canvas.width - media.width, canvas.height - media.height, media.width, media.height);
- }
- }
- // render gizmos
- this._gizmos._render(this._viewport, this._trackers);
- }
- }
- /**
- * Setup the update loop
- */
- _setupUpdateLoop() {
- const scheduleNextFrame = () => {
- if (this._active) {
- if (Settings.powerPreference == 'high-performance')
- asap(repeat);
- else
- window.requestAnimationFrame(repeat);
- }
- };
- const update = () => {
- this._update().then(scheduleNextFrame).turbocharge();
- };
- function repeat() {
- if (Settings.powerPreference == 'low-power') // 30 fps
- window.requestAnimationFrame(update);
- else
- update();
- }
- window.requestAnimationFrame(update);
- }
- /**
- * The core of the update loop
- */
- _update() {
- // active session?
- if (this._active) {
- return speedy_vision_default().Promise.all(
- // update trackers
- this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
- // update internals
- this._updateStats.update();
- this._frameReady = true;
- }).catch(err => {
- // log error
- Utils.error('Tracking error: ' + err.toString(), err);
- // handle WebGL errors
- const cause = err.cause;
- if (err.name == 'GLError') {
- alert(err.message); // fatal error?
- alert(Utils.deviceInfo()); // display useful info
- throw err;
- }
- else if (typeof cause == 'object' && cause.name == 'GLError') {
- alert(err.message);
- alert(cause.message);
- alert(Utils.deviceInfo());
- throw err;
- }
- });
- }
- else {
- // inactive session
- this._updateStats.reset();
- return speedy_vision_default().Promise.resolve();
- }
- }
- /**
- * Setup the render loop
- */
- _setupRenderLoop() {
- let skip = false, toggle = false;
- const render = (timestamp) => {
- const enableFrameSkipping = (Settings.powerPreference == 'low-power');
- const highPerformance = (Settings.powerPreference == 'high-performance');
- // advance time
- this._time._update(timestamp);
- // skip frames
- if (!enableFrameSkipping || !(skip = !skip))
- this._render(timestamp, false);
- //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
- // repeat
- if (this._active)
- window.requestAnimationFrame(render);
- };
- window.requestAnimationFrame(render);
- }
- /**
- * Render a frame (RAF callback)
- * @param time current time, in ms
- * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
- */
- _render(time, skipUserMedia) {
- // is the session active?
- if (this._active) {
- // are we ready to render a frame?
- if (this._frameReady) {
- // create a frame
- const results = this._trackers.map(tracker => tracker._output.exports || ({
- tracker: tracker,
- trackables: [],
- }));
- const frame = new Frame(this, results);
- // clone & clear the RAF queue
- const rafQueue = this._rafQueue.slice(0);
- this._rafQueue.length = 0;
- // render user media
- if (!skipUserMedia)
- this._renderUserMedia();
- // render frame
- for (let i = 0; i < rafQueue.length; i++)
- rafQueue[i][1].call(undefined, time, frame);
- // update internals
- this._renderStats.update();
- this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
- this._frameReady = false;
- }
- else {
- // skip frame
- ;
- // we'll update the renderStats even if we skip the frame,
- // otherwise this becomes updateStats! (approximately)
- // This is a window.requestAnimationFrame() call, so the
- // browser is rendering content even if we're not.
- this._renderStats.update();
- }
- }
- else {
- // inactive session
- this._renderStats.reset();
- }
- }
- }
- /** Number of active sessions */
- Session._count = 0;
-
- ;// CONCATENATED MODULE: ./src/core/settings.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * settings.ts
- * Global Settings
- */
-
-
-
-
- /**
- * Global Settings
- */
- class Settings {
- /**
- * Power preference (may impact performance x battery life)
- */
- static get powerPreference() {
- return this._powerPreference;
- }
- /**
- * Power preference (may impact performance x battery life)
- * Note: this setting should be the very first thing you set
- * (before the WebGL context is created by Speedy)
- */
- static set powerPreference(value) {
- // validate
- if (Session.count > 0)
- throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
- else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
- throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
- /*
- // we won't use 'high-performance' for Speedy's GPU computations
- // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
- // also, it seems like low-power mode may break WebGL2 in some drivers?!
-
- if(value == 'high-performance')
- Speedy.Settings.powerPreference = 'default';
- else
- Speedy.Settings.powerPreference = value;
- */
- // change the GPU polling mode
- if (value == 'high-performance')
- (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
- else
- (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
- // update the power preference
- this._powerPreference = value;
- // log
- Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
- }
- }
- Settings._powerPreference = 'default';
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * reference-image-database.ts
- * A collection of Reference Images
- */
-
-
- /** Default capacity of a Reference Image Database */
- const DEFAULT_CAPACITY = 100; // this number should exceed normal usage
- // XXX this number may be changed (is 100 too conservative?)
- // further testing is needed to verify the appropriateness of this number;
- // it depends on the images, on the keypoint descriptors, and even on the target devices
- /** Generate a unique name for a reference image */
- const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
- /**
- * A collection of Reference Images
- */
- class ReferenceImageDatabase {
- /**
- * Constructor
- */
- constructor() {
- this._capacity = DEFAULT_CAPACITY;
- this._database = [];
- this._locked = false;
- }
- /**
- * The number of reference images stored in this database
- */
- get count() {
- return this._database.length;
- }
- /**
- * Maximum number of elements
- */
- get capacity() {
- return this._capacity;
- }
- /**
- * Maximum number of elements
- * Increasing the capacity is considered experimental
- */
- set capacity(value) {
- const capacity = Math.max(0, value | 0);
- if (this.count > capacity)
- throw new IllegalArgumentError(`Can't set the capacity of the database to ${capacity}: it currently stores ${this.count} entries`);
- this._capacity = capacity;
- }
- /**
- * Iterates over the collection
- */
- *[Symbol.iterator]() {
- const ref = this._database.map(entry => entry.referenceImage);
- yield* ref;
- }
- /**
- * Add reference images to this database
- * Add only the images you actually need to track!
- * (each image take up storage space)
- * @param referenceImages one or more reference images with unique names (a unique name will
- * be generated automatically if you don't specify one)
- * @returns a promise that resolves as soon as the images are loaded and added to this database
- */
- add(referenceImages) {
- // handle no input
- if (referenceImages.length == 0)
- return speedy_vision_default().Promise.resolve();
- // handle multiple images as input
- if (referenceImages.length > 1) {
- const promises = referenceImages.map(image => this.add([image]));
- return speedy_vision_default().Promise.all(promises).then(() => void (0));
- }
- // handle a single image as input
- const referenceImage = referenceImages[0];
- // locked database?
- if (this._locked)
- throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
- // reached full capacity?
- if (this.count >= this.capacity)
- throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
- // check for duplicate names
- if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
- throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
- // load the media and add the reference image to the database
- return speedy_vision_default().load(referenceImage.image).then(media => {
- this._database.push({
- referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
- media: media
- });
- });
- }
- /**
- * Lock the database, so that new reference images can no longer be added to it
- * @internal
- */
- _lock() {
- this._locked = true;
- }
- /**
- * Get the media object associated to a reference image
- * @param name reference image name
- * @returns media
- * @internal
- */
- _findMedia(name) {
- for (let i = 0; i < this._database.length; i++) {
- if (this._database[i].referenceImage.name === name)
- return this._database[i].media;
- }
- throw new IllegalArgumentError(`Can't find reference image "${name}"`);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * settings.ts
- * Settings of the Image Tracker
- */
- /** Default tracking resolution */
- const DEFAULT_TRACKING_RESOLUTION = 'sm+';
- /** Maximum number of keypoints to be stored for each reference image when in the training state */
- const TRAIN_MAX_KEYPOINTS = 1024; //512;
- /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
- const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
- /** Normalized width & height of an image target, in pixels */
- const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
- /** Used to identify the best maches */
- const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
- /** Maximum number of keypoints to be analyzed when in the scanning state */
- const SCAN_MAX_KEYPOINTS = 512;
- /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
- const SCAN_PYRAMID_LEVELS = 4; //7;
- /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
- const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
- /** Threshold of the FAST corner detector used in the scanning/training states */
- const SCAN_FAST_THRESHOLD = 60;
- /** Minimum number of accepted matches for us to move out from the scanning state */
- const SCAN_MIN_MATCHES = 20; //30;
- /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
- const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
- /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
- const SCAN_RANSAC_REPROJECTIONERROR = 5;
- /** Number of tables used in the LSH-based keypoint matching */
- const SCAN_LSH_TABLES = 8; // up to 32
- /** Hash size, in bits, used in the LSH-based keypoint matching */
- const SCAN_LSH_HASHSIZE = 15; // up to 16
- /** Use the Nightvision filter when in the scanning/training state? */
- const SCAN_WITH_NIGHTVISION = true;
- /** Nightvision filter: gain */
- const NIGHTVISION_GAIN = 0.3; // 0.2;
- /** Nightvision filter: offset */
- const NIGHTVISION_OFFSET = 0.5;
- /** Nightvision filter: decay */
- const NIGHTVISION_DECAY = 0.0;
- /** Nightvision filter: quality level */
- const NIGHTVISION_QUALITY = 'low';
- /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
- const ORB_GAUSSIAN_KSIZE = 9;
- /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
- const ORB_GAUSSIAN_SIGMA = 2.0;
- /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
- const SUBPIXEL_GAUSSIAN_KSIZE = 5;
- /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
- const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
- /** Subpixel refinement method */
- const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
- /** Minimum acceptable number of matched keypoints when in the tracking state */
- const TRACK_MIN_MATCHES = 4; //10; //20;
- /** Maximum number of keypoints to be analyzed in the tracking state */
- const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
- /** Capacity of the keypoint detector used in the the tracking state */
- const TRACK_DETECTOR_CAPACITY = 2048; //4096;
- /** Quality of the Harris/Shi-Tomasi corner detector */
- const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
- /** Use the Nightvision filter when in the tracking state? */
- const TRACK_WITH_NIGHTVISION = false; // produces shaking?
- /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
- const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
- /** Relative size (%) used to clip keypoints from the borders of the rectified image */
- const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
- /** Number of iterations used to refine the target image before tracking */
- const TRACK_REFINEMENT_ITERATIONS = 3;
- /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
- const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
- /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
- const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
- /** Used to identify the best maches */
- const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
- /** Number of consecutive frames in which we tolerate a "target lost" situation */
- const TRACK_LOST_TOLERANCE = 10;
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * state.ts
- * Abstract state of the Image Tracker
- */
-
-
-
- /**
- * Abstract state of the Image Tracker
- */
- class ImageTrackerState {
- /**
- * Constructor
- * @param name
- * @param imageTracker
- */
- constructor(name, imageTracker) {
- this._name = name;
- this._imageTracker = imageTracker;
- this._pipeline = this._createPipeline();
- }
- /**
- * State name
- */
- get name() {
- return this._name;
- }
- /**
- * AR screen size
- */
- get screenSize() {
- const screen = this._pipeline.node('screen');
- if (!screen)
- throw new IllegalOperationError();
- // this is available once this state has run at least once
- return screen.size;
- }
- /**
- * Initialize the state
- */
- init() {
- }
- /**
- * Release resources
- */
- release() {
- return this._pipeline.release();
- }
- /**
- * Update the state
- * @param media user media
- * @param screenSize AR screen size for image processing
- * @param state all states
- * @returns promise
- */
- update(media, screenSize) {
- const source = this._pipeline.node('source');
- const screen = this._pipeline.node('screen');
- // validate the pipeline
- if (!source || !screen)
- throw new IllegalOperationError();
- // prepare the pipeline
- source.media = media;
- screen.size = screenSize;
- // run the pipeline
- return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- }
- /**
- * Called when leaving the state, after update()
- */
- onLeaveState() {
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * GPU processing
- * @returns promise with the pipeline results
- */
- _gpuUpdate() {
- return this._pipeline.run();
- }
- //
- // Some utility methods common to various states
- //
- /**
- * Find the coordinates of a polyline surrounding the target image
- * @param homography maps the target image to the AR screen
- * @param targetSize size of the target space
- * @returns promise that resolves to 4 points in AR screen space
- */
- _findPolylineCoordinates(homography, targetSize) {
- const w = targetSize.width, h = targetSize.height;
- const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
- 0, 0,
- w, 0,
- w, h,
- 0, h,
- ]);
- const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
- return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
- }
- /**
- * Find a polyline surrounding the target image
- * @param homography maps the target image to the AR screen
- * @param targetSize size of the target space
- * @returns promise that resolves to 4 points in AR screen space
- */
- _findPolyline(homography, targetSize) {
- return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
- const polydata = polylineCoordinates.read();
- const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
- return polyline;
- });
- }
- /**
- * Whether or not to rotate the warped image in order to best fit the AR screen
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns boolean
- */
- _mustRotateWarpedImage(media, screenSize) {
- const screenAspectRatio = screenSize.width / screenSize.height;
- const mediaAspectRatio = media.width / media.height;
- const eps = 0.1;
- return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
- }
- /**
- * Find a rectification matrix to be applied to an image fitting the entire AR screen
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findRectificationMatrixOfFullscreenImage(media, screenSize) {
- const b = TRACK_RECTIFIED_BORDER;
- const sw = screenSize.width, sh = screenSize.height;
- const mediaAspectRatio = media.width / media.height;
- const mustRotate = this._mustRotateWarpedImage(media, screenSize);
- // compute the vertices of the target in screen space
- // we suppose portrait or landscape mode for both screen & media
- const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
- const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
- const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
- const right = sw - left;
- const bottom = sh - top;
- const targetVertices = speedy_vision_default().Matrix(2, 4, [
- left, top,
- right, top,
- right, bottom,
- left, bottom,
- ]);
- const screenVertices = speedy_vision_default().Matrix(2, 4, [
- 0, 0,
- sw, 0,
- sw, sh,
- 0, sh
- ]);
- const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
- const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
- const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
- return (mustRotate ? speedy_vision_default().Matrix.perspective(
- // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
- preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
- // alignment: align the target to the center of the screen
- speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
- // pre-rectify and then align
- rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
- }
- /**
- * Find a rectification matrix to be applied to the target image
- * @param homography maps a reference image to the AR screen
- * @param targetSize size of the target space
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
- const sw = screenSize.width, sh = screenSize.height;
- const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
- const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
- return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
- // from target space to (full)screen
- speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
- // from (full)screen to rectified coordinates
- this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
- // function composition
- rectificationMatrix.setTo(mat.times(rectificationMatrix)));
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * initial.ts
- * Initial state of the Image Tracker
- */
-
-
-
-
- /**
- * The purpose of the initial state of the Image Tracker
- * is to initialize the training state using the state machine
- */
- class ImageTrackerInitialState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('initial', imageTracker);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const source = this._pipeline.node('source');
- const media = source.media;
- const mediaSize = media.size;
- if (mediaSize.area() < this.screenSize.area())
- Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- return speedy_vision_default().Promise.resolve({
- nextState: 'training',
- trackerOutput: {},
- });
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- // this pipeline does nothing useful,
- // but it does preload some shaders...
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
- const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
- const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
- const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
- const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- nightvision.quality = NIGHTVISION_QUALITY;
- subpixel.method = SUBPIXEL_METHOD;
- //borderClipper.imageSize = screen.size;
- borderClipper.imageSize = speedy_vision_default().Size(100, 100);
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- matcher.k = 1; //2;
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- keypointPortalSource.source = keypointPortalSink;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(blur.input());
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
- muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
- muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // store reference keypoints
- keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
- bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
- keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
- // portals
- descriptor.output().connectTo(keypointPortalSink.input());
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- // done!
- pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
- /*
- const run = pipeline.run.bind(pipeline);
- pipeline.run = function() {
- console.time("TIME");
- return run().then(x => {
- console.timeEnd("TIME");
- return x;
- });
- };
- */
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * training.ts
- * Training state of the Image Tracker
- */
-
-
-
-
-
- /**
- * Training state of the Image Tracker
- */
- class ImageTrackerTrainingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('training', imageTracker);
- this._currentImageIndex = 0;
- this._image = [];
- // initialize the training map
- this._trainingMap = {
- referenceImageIndex: [],
- referenceImage: [],
- keypoints: []
- };
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const database = this._imageTracker.database;
- // validate
- if (database.count == 0)
- throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
- // prepare to train...
- this._currentImageIndex = 0;
- this._image.length = 0;
- this._trainingMap.referenceImageIndex.length = 0;
- this._trainingMap.referenceImage.length = 0;
- this._trainingMap.keypoints.length = 0;
- // lock the database
- Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
- database._lock();
- // collect all images
- for (const referenceImage of database)
- this._image.push(referenceImage);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const arScreenSize = this.screenSize;
- const source = this._pipeline.node('source');
- const screen = this._pipeline.node('screen');
- const keypointScaler = this._pipeline.node('keypointScaler');
- // this shouldn't happen
- if (this._currentImageIndex >= this._image.length)
- return speedy_vision_default().Promise.reject(new IllegalOperationError());
- // set the appropriate training media
- const database = this._imageTracker.database;
- const referenceImage = this._image[this._currentImageIndex];
- const media = database._findMedia(referenceImage.name);
- source.media = media;
- // compute the appropriate size of the training image space
- const resolution = this._imageTracker.resolution;
- const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
- const aspectRatioOfTrainingImage = media.width / media.height;
- /*
- let sin = 0, cos = 1;
-
- if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
- // training image and source video: both in landscape mode or both in portrait mode
- screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- }
- else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
- // training image: portrait mode; source video: landscape mode
- screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- sin = 1; cos = 0; // rotate 90deg
- }
- else {
- // training image: landscape mode; source video: portrait mode
- }
- */
- screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- // convert keypoints from the training image space to AR screen space
- // let's pretend that trained keypoints belong to the AR screen space,
- // regardless of the size of the target image. This will make things
- // easier when computing the homography.
- /*
- const sw = arScreenSize.width / screen.size.width;
- const sh = arScreenSize.height / screen.size.height;
- */
- const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
- const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
- keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
- sw, 0, 0,
- 0, sh, 0,
- 0, 0, 1,
- ]);
- // log
- Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const referenceImage = this._image[this._currentImageIndex];
- const keypoints = result.keypoints;
- const image = result.image;
- // log
- Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
- // set the training map, so that we can map all keypoints of the current image to the current image
- this._trainingMap.referenceImage.push(referenceImage);
- for (let i = 0; i < keypoints.length; i++) {
- this._trainingMap.keypoints.push(keypoints[i]);
- this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
- }
- // the current image has been processed!
- ++this._currentImageIndex;
- // set output
- if (this._currentImageIndex >= this._image.length) {
- // finished training!
- return speedy_vision_default().Promise.resolve({
- //nextState: 'training',
- nextState: 'scanning',
- nextStateSettings: {
- keypoints: this._trainingMap.keypoints,
- },
- trackerOutput: {},
- //trackerOutput: { image, keypoints, screenSize: this.screenSize },
- });
- }
- else {
- // we're not done yet
- return speedy_vision_default().Promise.resolve({
- nextState: 'training',
- trackerOutput: {},
- //trackerOutput: { image, keypoints, screenSize: this.screenSize },
- });
- }
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
- const pyramid = speedy_vision_default().Image.Pyramid();
- const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const blurredPyramid = speedy_vision_default().Image.Pyramid();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
- const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- detector.levels = SCAN_PYRAMID_LEVELS;
- detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
- detector.threshold = SCAN_FAST_THRESHOLD;
- detector.capacity = 8192;
- subpixel.method = SUBPIXEL_METHOD;
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- clipper.size = TRAIN_MAX_KEYPOINTS;
- keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess image
- greyscale.output().connectTo(nightvisionMux.input('in0'));
- greyscale.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(pyramid.input());
- // keypoint detection
- pyramid.output().connectTo(detector.input());
- detector.output().connectTo(clipper.input());
- // keypoint refinement
- greyscale.output().connectTo(denoiser.input()); // reduce noise
- denoiser.output().connectTo(blurredPyramid.input());
- clipper.output().connectTo(subpixel.input('keypoints'));
- blurredPyramid.output().connectTo(subpixel.input('image'));
- // keypoint description
- greyscale.output().connectTo(blur.input());
- blur.output().connectTo(descriptor.input('image'));
- clipper.output().connectTo(descriptor.input('keypoints'));
- // prepare output
- descriptor.output().connectTo(keypointScaler.input());
- keypointScaler.output().connectTo(keypointSink.input());
- nightvisionMux.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
- return pipeline;
- }
- /**
- * Get reference image
- * @param keypointIndex -1 if not found
- * @returns reference image
- */
- referenceImageOfKeypoint(keypointIndex) {
- const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
- if (imageIndex < 0)
- return null;
- return this._trainingMap.referenceImage[imageIndex];
- }
- /**
- * Get reference image index
- * @param keypointIndex -1 if not found
- * @returns reference image index, or -1 if not found
- */
- referenceImageIndexOfKeypoint(keypointIndex) {
- const n = this._trainingMap.referenceImageIndex.length;
- if (keypointIndex < 0 || keypointIndex >= n)
- return -1;
- const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
- if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
- return -1;
- return imageIndex;
- }
- /**
- * Get keypoint of the trained set
- * @param keypointIndex -1 if not found
- * @returns a keypoint
- */
- referenceKeypoint(keypointIndex) {
- if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
- return null;
- return this._trainingMap.keypoints[keypointIndex];
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * scanning.ts
- * Scanning state of the Image Tracker
- */
-
-
-
-
- /** Default target space size (used when training) */
- const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
- /** Port of the portal multiplexer: get new data from the camera */
- const PORT_CAMERA = 0;
- /** Port of the portal multiplexer: get previously memorized data */
- const PORT_MEMORY = 1;
- /**
- * Scanning state of the Image Tracker
- */
- class ImageTrackerScanningState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('scanning', imageTracker);
- this._counter = 0;
- this._bestScore = 0;
- this._bestHomography = speedy_vision_default().Matrix.Eye(3);
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const imagePortalMux = this._pipeline.node('imagePortalMux');
- const lshTables = this._pipeline.node('lshTables');
- const keypoints = settings.keypoints;
- // set attributes
- this._counter = 0;
- this._bestScore = 0;
- // reset the image memorization circuit
- imagePortalMux.port = PORT_CAMERA;
- // prepare the keypoint matcher
- if (keypoints !== undefined)
- lshTables.keypoints = keypoints;
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const imagePortalMux = this._pipeline.node('imagePortalMux');
- const keypoints = result.keypoints;
- const matchedKeypoints = this._goodMatches(keypoints);
- // tracker output
- const trackerOutput = {
- keypoints: keypoints,
- screenSize: this.screenSize
- };
- // keep the last memorized image
- imagePortalMux.port = PORT_MEMORY;
- // have we found enough matches...?
- if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
- return this._findHomography(matchedKeypoints).then(([homography, score]) => {
- // have we found the best homography so far?
- if (score >= this._bestScore) {
- // store it only if we'll be running the pipeline again
- if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
- this._bestScore = score;
- this._bestHomography = homography;
- // memorize the last image, corresponding to the best homography(*)
- imagePortalMux.port = PORT_CAMERA;
- /*
-
- (*) technically speaking, this is not exactly the case. Since we're
- using turbo to download the keypoints, there's a slight difference
- between the data used to compute the homography and the last image.
- Still, assuming continuity of the video stream, this logic is
- good enough.
-
- */
- }
- }
- // find a polyline surrounding the target
- return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
- }).then(polyline => {
- // continue a little longer in the scanning state
- if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
- return {
- nextState: this.name,
- trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
- };
- }
- // this image should correspond to the best homography
- const snapshot = this._pipeline.node('imagePortalSink');
- // the reference image that we'll track
- const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
- // let's track the target!
- return {
- nextState: 'pre-tracking',
- nextStateSettings: {
- homography: this._bestHomography,
- snapshot: snapshot,
- referenceImage: referenceImage,
- },
- trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
- };
- }).catch(() => {
- // continue in the scanning state
- return {
- nextState: this.name,
- trackerOutput: trackerOutput,
- };
- });
- }
- else {
- // not enough matches...!
- this._counter = 0;
- this._bestScore = 0;
- }
- // we'll continue to scan the scene
- return speedy_vision_default().Promise.resolve({
- nextState: this.name,
- trackerOutput: trackerOutput,
- });
- }
- /**
- * Find "high quality" matches of a single reference image
- * @param keypoints
- * @returns high quality matches
- */
- _goodMatches(keypoints) {
- const matchedKeypointsPerImageIndex = Object.create(null);
- // filter "good matches"
- for (let j = keypoints.length - 1; j >= 0; j--) {
- const keypoint = keypoints[j];
- if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
- const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
- // the best match should be "much better" than the second best match,
- // which means that they are "distinct enough"
- if (d1 <= SCAN_MATCH_RATIO * d2) {
- const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
- //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
- //if(idx1 == idx2 && idx1 >= 0) {
- if (idx1 >= 0) {
- if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
- matchedKeypointsPerImageIndex[idx1] = [];
- matchedKeypointsPerImageIndex[idx1].push(keypoint);
- }
- }
- }
- }
- // find the image with the most matches
- let matchedKeypoints = [];
- for (const imageIndex in matchedKeypointsPerImageIndex) {
- if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
- matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
- }
- // done!
- return matchedKeypoints;
- }
- /**
- * Find a homography matrix using matched keypoints
- * @param matchedKeypoints "good" matches only
- * @returns homography from reference image space to AR screen space & homography "quality" score
- */
- _findHomography(matchedKeypoints) {
- const srcCoords = [];
- const dstCoords = [];
- // find matching coordinates of the keypoints
- for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
- const matchedKeypoint = matchedKeypoints[i];
- const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
- if (referenceKeypoint != null) {
- srcCoords.push(referenceKeypoint.x);
- srcCoords.push(referenceKeypoint.y);
- dstCoords.push(matchedKeypoint.x);
- dstCoords.push(matchedKeypoint.y);
- }
- else {
- // this shouldn't happen
- return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
- }
- }
- // too few points?
- const n = srcCoords.length / 2;
- if (n < 4) {
- return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
- }
- // compute a homography
- const src = speedy_vision_default().Matrix(2, n, srcCoords);
- const dst = speedy_vision_default().Matrix(2, n, dstCoords);
- const mask = speedy_vision_default().Matrix.Zeros(1, n);
- const homography = speedy_vision_default().Matrix.Zeros(3);
- return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
- method: 'pransac',
- reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512,
- bundleSize: 128,
- mask: mask,
- }).then(homography => {
- // check if this is a valid homography
- const a00 = homography.at(0, 0);
- if (Number.isNaN(a00))
- throw new DetectionError(`Can't compute homography`);
- // count the number of inliers
- const inliers = mask.read();
- let inlierCount = 0;
- for (let i = inliers.length - 1; i >= 0; i--)
- inlierCount += inliers[i];
- const score = inlierCount / inliers.length;
- // done!
- return [homography, score];
- });
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
- const pyramid = speedy_vision_default().Image.Pyramid();
- const detector = speedy_vision_default().Keypoint.Detector.FAST();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
- const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
- const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
- const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
- const imagePortalBuffer = speedy_vision_default().Image.Buffer();
- const imagePortalCopy = speedy_vision_default().Transform.Resize();
- //const imageSink = Speedy.Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- detector.levels = SCAN_PYRAMID_LEVELS;
- detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
- detector.threshold = SCAN_FAST_THRESHOLD;
- detector.capacity = 2048;
- clipper.size = SCAN_MAX_KEYPOINTS;
- lshTables.keypoints = [];
- lshTables.numberOfTables = SCAN_LSH_TABLES;
- lshTables.hashSize = SCAN_LSH_HASHSIZE;
- knn.k = 2;
- knn.quality = 'default';
- //knn.quality = 'fastest';
- imagePortalSource.source = imagePortalSink;
- imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
- imagePortalCopy.size = speedy_vision_default().Size(0, 0);
- imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
- keypointSink.turbo = true;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess image
- greyscale.output().connectTo(blur.input());
- greyscale.output().connectTo(nightvisionMux.input('in0'));
- greyscale.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(pyramid.input());
- // keypoint detection
- pyramid.output().connectTo(detector.input());
- detector.output().connectTo(clipper.input());
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- clipper.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(knn.input('keypoints'));
- lshTables.output().connectTo(knn.input('lsh'));
- // prepare output
- clipper.output().connectTo(keypointSink.input());
- knn.output().connectTo(keypointSink.input('matches'));
- //pyramid.output().connectTo(imageSink.input());
- // memorize image
- source.output().connectTo(imagePortalBuffer.input());
- imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
- imagePortalSource.output().connectTo(imagePortalCopy.input());
- imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
- imagePortalMux.output().connectTo(imagePortalSink.input());
- // done!
- pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * pre-tracking.ts
- * Pre-tracking state of the Image Tracker
- */
-
-
-
-
-
- /** Default target space size (used when training) */
- const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
- /** Use the camera stream as the input of the pipeline */
- const PORT_CAMERA_IMAGE = 1;
- /** Use the reference image as the input of the pipeline */
- const PORT_REFERENCE_IMAGE = 0;
- /**
- * The pre-tracking state of the Image Tracker is a new training
- * phase for the particular, actual target we'll be tracking
- */
- class ImageTrackerPreTrackingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('pre-tracking', imageTracker);
- this._homography = speedy_vision_default().Matrix.Eye(3);
- this._referenceImage = null;
- this._step = 'read-reference-image';
- this._referenceKeypoints = [];
- this._iterations = 0;
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const imagePortalSource = this._pipeline.node('imagePortalSource');
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
- const homography = settings.homography;
- const referenceImage = settings.referenceImage;
- const snapshot = settings.snapshot;
- // this shouldn't happen
- if (!referenceImage)
- throw new TrackingError(`Can't track a null reference image`);
- // set attributes
- this._homography = homography;
- this._referenceImage = referenceImage;
- this._step = 'read-reference-image';
- this._referenceKeypoints = [];
- this._iterations = 0;
- // setup the pipeline
- imagePortalSource.source = snapshot;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const referenceImage = this._referenceImage;
- const source = this._pipeline.node('source');
- const sourceMux = this._pipeline.node('sourceMux');
- const imageRectifier = this._pipeline.node('imageRectifier');
- const keypointRectifier = this._pipeline.node('keypointRectifier');
- const borderClipper = this._pipeline.node('borderClipper');
- const screenSize = this.screenSize;
- // set the source media to the reference image we're going to track
- const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
- source.media = targetMedia;
- // setup the source multiplexer
- if (this._step == 'read-reference-image')
- sourceMux.port = PORT_REFERENCE_IMAGE;
- else
- sourceMux.port = PORT_CAMERA_IMAGE;
- // clip keypoints from the borders of the target image
- borderClipper.imageSize = screenSize;
- borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
- // rectify the image
- const rectify = (this._step == 'read-reference-image') ?
- this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
- this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
- return rectify.then(rectificationMatrix => {
- imageRectifier.transform = rectificationMatrix;
- });
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const referenceImage = this._referenceImage;
- const imagePortalSink = this._pipeline.node('imagePortal');
- const keypointPortalSink = this._pipeline.node('keypointPortalSink');
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
- const keypoints = result.keypoints;
- const image = result.image;
- // tracker output
- const trackerOutput = {
- keypoints: image !== undefined ? keypoints : undefined,
- image: image,
- screenSize: this.screenSize,
- };
- // decide what to do next
- switch (this._step) {
- case 'read-reference-image': {
- // enable matching
- muxOfReferenceKeypoints.port = 1;
- // store reference keypoints
- this._referenceKeypoints = keypoints;
- // next step
- this._step = 'warp-camera-image';
- return speedy_vision_default().Promise.resolve({
- nextState: 'pre-tracking',
- trackerOutput: trackerOutput,
- });
- }
- case 'warp-camera-image': {
- // freeze reference keypoints
- bufferOfReferenceKeypoints.frozen = true;
- muxOfBufferOfReferenceKeypoints.port = 1;
- // refine warp?
- if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
- this._step = 'warp-camera-image';
- else
- this._step = 'train-camera-image';
- // warp image & go to next step
- return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
- nextState: 'pre-tracking',
- trackerOutput: trackerOutput,
- })).catch(err => {
- Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
- return {
- nextState: 'scanning',
- trackerOutput: trackerOutput,
- };
- });
- }
- case 'train-camera-image': {
- // log
- Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
- // change the coordinates
- return this._changeSpace(this._homography, this.screenSize).then(homography => {
- // we're ready to track the target!
- return speedy_vision_default().Promise.resolve({
- //nextState: 'pre-tracking',
- nextState: 'tracking',
- trackerOutput: trackerOutput,
- nextStateSettings: {
- homography: homography,
- referenceImage: referenceImage,
- templateKeypoints: keypoints,
- keypointPortalSink: keypointPortalSink,
- imagePortalSink: imagePortalSink,
- screenSize: this.screenSize,
- },
- });
- });
- }
- }
- }
- /**
- * Find an adjustment warp between the camera image and the reference image
- * @param dstKeypoints destination
- * @param srcKeypoints source
- * @returns a promise that resolves to a 3x3 homography
- */
- _findWarp(dstKeypoints, srcKeypoints) {
- //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
- const srcCoords = [];
- const dstCoords = [];
- // find matching coordinates of the keypoints
- for (let i = 0; i < dstKeypoints.length; i++) {
- const dstKeypoint = dstKeypoints[i];
- if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
- const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
- // the best match should be "much better" than the second best match,
- // which means that they are "distinct enough"
- if (d1 <= TRACK_MATCH_RATIO * d2) {
- const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
- srcCoords.push(srcKeypoint.x);
- srcCoords.push(srcKeypoint.y);
- dstCoords.push(dstKeypoint.x);
- dstCoords.push(dstKeypoint.y);
- }
- }
- }
- // too few points?
- const n = srcCoords.length / 2;
- if (n < 4) {
- return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
- }
- // compute warp
- const model = speedy_vision_default().Matrix.Eye(3);
- return this._findKeypointWarp().then(transform =>
- // rectify keypoints
- speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
- // find warp
- speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512 * 4,
- bundleSize: 128,
- })).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute warp: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a warp to be applied to the keypoints
- * @returns affine transform
- */
- _findKeypointWarp() {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const screenSize = this.screenSize;
- // no rotation is needed
- if (!this._mustRotateWarpedImage(media, screenSize))
- return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
- // rotate by 90 degrees clockwise around the pivot
- const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
- return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
- 0, 1, 0,
- -1, 0, 0,
- py + px, py - px, 1,
- ]));
- }
- /**
- * Change the space of the homography in order to improve tracking quality
- * @param homography mapping coordinates from normalized target space to AR screen space
- * @param screenSize AR screen size
- * @returns homography mapping coordinates from AR screen space to AR screen space
- */
- _changeSpace(homography, screenSize) {
- const sw = screenSize.width, sh = screenSize.height;
- const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
- const mat = speedy_vision_default().Matrix.Zeros(3);
- return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
- const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
- const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
- const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imagePortalSource.source = null;
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- sourceMux.port = PORT_REFERENCE_IMAGE;
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- detector.quality = TRACK_HARRIS_QUALITY;
- detector.capacity = TRACK_DETECTOR_CAPACITY;
- subpixel.method = SUBPIXEL_METHOD;
- clipper.size = TRACK_MAX_KEYPOINTS;
- borderClipper.imageSize = screen.size;
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- matcher.k = 2;
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- keypointPortalSource.source = keypointPortalSink;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
- imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
- sourceMux.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(blur.input());
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
- muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
- muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // store reference keypoints
- keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
- bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
- keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
- // portals
- descriptor.output().connectTo(keypointPortalSink.input());
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- //imageRectifier.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * image-tracker-event.ts
- * Events emitted by an Image Tracker
- */
-
- /**
- * An event emitted by an Image Tracker
- */
- class ImageTrackerEvent extends AREvent {
- /**
- * Constructor
- * @param type event type
- * @param referenceImage optional reference image
- */
- constructor(type, referenceImage) {
- super(type);
- this._referenceImage = referenceImage;
- }
- /**
- * Reference image
- */
- get referenceImage() {
- return this._referenceImage;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * camera-model.ts
- * Camera model
- */
-
-
-
-
- /** A guess of the horizontal field-of-view of a typical camera, in degrees */
- const HFOV_GUESS = 60; // https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html
- /** Number of iterations used to refine the estimated pose */
- const POSE_ITERATIONS = 30;
- /** Number of samples used in the rotation filter */
- const ROTATION_FILTER_SAMPLES = 10;
- /** Number of samples used in the translation filter */
- const TRANSLATION_FILTER_SAMPLES = 5;
- /** Convert degrees to radians */
- const DEG2RAD = 0.017453292519943295; // pi / 180
- /** Convert radians to degrees */
- const RAD2DEG = 57.29577951308232; // 180 / pi
- /** Numerical tolerance */
- const EPSILON = 1e-6;
- /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
- const FX = 0;
- /** Index of the vertical focal length in the camera intrinsics matrix */
- const FY = 4;
- /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
- const U0 = 6;
- /** Index of the vertical position of the principal point in the camera intrinsics matrix */
- const V0 = 7;
- /**
- * Camera model
- */
- class CameraModel {
- /**
- * Constructor
- */
- constructor() {
- this._screenSize = speedy_vision_default().Size(0, 0);
- this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
- this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // 3x3 identity matrix
- this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // 3x4 matrix [ R | t ] = [ I | 0 ] no rotation & no translation
- this._partialRotationBuffer = [];
- this._translationBuffer = [];
- }
- /**
- * Initialize the model
- * @param screenSize
- */
- init(screenSize) {
- // validate
- if (screenSize.area() == 0)
- throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
- // set the screen size
- this._screenSize.width = screenSize.width;
- this._screenSize.height = screenSize.height;
- // reset the model
- this.reset();
- // log
- Utils.log(`Initializing the camera model...`);
- }
- /**
- * Release the model
- */
- release() {
- this.reset();
- return null;
- }
- /**
- * Update the camera model
- * @param homography 3x3 perspective transform
- * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
- * @returns promise that resolves to a camera matrix
- */
- update(homography, screenSize) {
- // validate the shape of the homography
- if (homography.rows != 3 || homography.columns != 3)
- throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
- // validate screenSize
- if (screenSize.area() == 0)
- throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
- // changed screen size?
- if (!this._screenSize.equals(screenSize)) {
- Utils.log(`Camera model: detected a change in screen size...`);
- // update the screen size
- this._screenSize.width = screenSize.width;
- this._screenSize.height = screenSize.height;
- // reset camera
- this.reset();
- }
- // read the entries of the homography
- const h = homography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
- // validate the homography (homography matrices aren't singular)
- const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
- if (Math.abs(det) < EPSILON) {
- Utils.warning(`Can't update the camera model using an invalid homography matrix`);
- return speedy_vision_default().Promise.resolve(this._matrix);
- }
- // estimate the pose
- const pose = this._estimatePose(homography);
- this._extrinsics = pose.read();
- // compute the camera matrix
- const C = this.denormalizer();
- const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
- const E = pose; //Speedy.Matrix(3, 4, this._extrinsics);
- this._matrix.setToSync(K.times(E).times(C));
- //console.log("intrinsics -----------", K.toString());
- //console.log("matrix ----------------",this._matrix.toString());
- return speedy_vision_default().Promise.resolve(this._matrix);
- }
- /**
- * Reset camera model
- */
- reset() {
- this._resetIntrinsics();
- this._resetExtrinsics();
- }
- /**
- * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
- * 2D AR screen space (measured in pixels)
- * @returns 3x4 camera matrix
- */
- get matrix() {
- return this._matrix;
- }
- /**
- * Camera intrinsics matrix
- * @returns 3x3 intrinsics matrix in column-major format
- */
- get intrinsics() {
- return this._intrinsics;
- }
- /**
- * Camera extrinsics matrix
- * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
- */
- get extrinsics() {
- return this._extrinsics;
- }
- /**
- * Convert coordinates from normalized space [-1,1]^3 to a
- * "3D pixel space" based on the dimensions of the AR screen.
- *
- * We perform a 180-degrees rotation around the x-axis so that
- * it looks nicer (the y-axis grows downwards in image space).
- *
- * The final camera matrix is P = K * [ R | t ] * C, where
- * C is this conversion matrix. The intent behind this is to
- * make tracking independent of target and screen sizes.
- *
- * Reminder: we use a right-handed coordinate system in 3D!
- * In 2D image space the coordinate system is left-handed.
- *
- * @returns 4x4 conversion matrix C
- */
- denormalizer() {
- const w = this._screenSize.width / 2; // half width, in pixels
- const h = this._screenSize.height / 2; // half height, in pixels
- const d = Math.min(w, h); // virtual unit length, in pixels
- /*
- return Speedy.Matrix(4, 4, [
- 1, 0, 0, 0,
- 0,-1, 0, 0,
- 0, 0,-1, 0,
- w/d, h/d, 0, 1/d
- ]);
- */
- return speedy_vision_default().Matrix(4, 4, [
- d, 0, 0, 0,
- 0, -d, 0, 0,
- 0, 0, -d, 0,
- w, h, 0, 1,
- ]);
- }
- /**
- * Size of the AR screen space, in pixels
- * @returns size in pixels
- */
- get screenSize() {
- return this._screenSize;
- }
- /**
- * Focal length in pixel units (projection distance in the pinhole camera model)
- * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
- * @returns focal length
- */
- get focalLength() {
- return this._intrinsics[FY]; // fx == fy
- }
- /**
- * Horizontal field-of-view, given in radians
- * @returns vertical field-of-view
- */
- get fovx() {
- return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
- }
- /**
- * Vertical field-of-view, given in radians
- * @returns vertical field-of-view
- */
- get fovy() {
- return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
- }
- /**
- * Principal point
- * @returns principal point, in pixel coordinates
- */
- principalPoint() {
- return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
- }
- /**
- * Reset camera extrinsics
- */
- _resetExtrinsics() {
- // set the rotation matrix to the identity
- this._extrinsics.fill(0);
- this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
- // reset filters
- this._partialRotationBuffer.length = 0;
- this._translationBuffer.length = 0;
- }
- /**
- * Reset camera intrinsics
- */
- _resetIntrinsics() {
- const cameraWidth = Math.max(this._screenSize.width, this._screenSize.height); // portrait?
- const u0 = this._screenSize.width / 2;
- const v0 = this._screenSize.height / 2;
- const fx = (cameraWidth / 2) / Math.tan(DEG2RAD * HFOV_GUESS / 2);
- const fy = fx;
- this._intrinsics[FX] = fx;
- this._intrinsics[FY] = fy;
- this._intrinsics[U0] = u0;
- this._intrinsics[V0] = v0;
- }
- /**
- * Compute a normalized homography H^ = K^(-1) * H for an
- * ideal pinhole with f = 1 and principal point = (0,0)
- * @param homography homography H to be normalized
- * @returns normalized homography H^
- */
- _normalizeHomography(homography) {
- const h = homography.read();
- const u0 = this._intrinsics[U0];
- const v0 = this._intrinsics[V0];
- const fx = this._intrinsics[FX];
- const fy = this._intrinsics[FY];
- const u0fx = u0 / fx;
- const v0fy = v0 / fy;
- const h11 = h[0] / fx - u0fx * h[2], h12 = h[3] / fx - u0fx * h[5], h13 = h[6] / fx - u0fx * h[8];
- const h21 = h[1] / fy - v0fy * h[2], h22 = h[4] / fy - v0fy * h[5], h23 = h[7] / fy - v0fy * h[8];
- const h31 = h[2], h32 = h[5], h33 = h[8];
- /*console.log([
- h11, h21, h31,
- h12, h22, h32,
- h13, h23, h33,
- ]);*/
- return speedy_vision_default().Matrix(3, 3, [
- h11, h21, h31,
- h12, h22, h32,
- h13, h23, h33,
- ]);
- }
- /**
- * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
- * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
- * @returns a 3x3 matrix
- */
- _estimatePartialPose(normalizedHomography) {
- const h = normalizedHomography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6];
- const h21 = h[1], h22 = h[4], h23 = h[7];
- const h31 = h[2], h32 = h[5], h33 = h[8];
- const h1norm2 = h11 * h11 + h21 * h21 + h31 * h31;
- const h2norm2 = h12 * h12 + h22 * h22 + h32 * h32;
- const h1norm = Math.sqrt(h1norm2);
- const h2norm = Math.sqrt(h2norm2);
- //const hnorm = (h1norm + h2norm) / 2;
- //const hnorm = Math.sqrt(h1norm * h2norm);
- const hnorm = Math.max(h1norm, h2norm); // this seems to work. why?
- // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
- // if h1norm is not approximately h2norm, it means that the first two columns of
- // the normalized homography are not really encoding a rotation (up to a scale)
- //console.log("h1,h2",h1norm,h2norm);
- //console.log(normalizedHomography.toString());
- // compute a rough estimate for the scale factor
- // select the sign so that t3 = tz > 0
- const sign = h33 >= 0 ? 1 : -1;
- let scale = sign / hnorm;
- // sanity check
- if (Number.isNaN(scale))
- return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
- // recover the rotation
- let r = new Array(6);
- r[0] = scale * h11;
- r[1] = scale * h21;
- r[2] = scale * h31;
- r[3] = scale * h12;
- r[4] = scale * h22;
- r[5] = scale * h32;
- // refine the rotation
- r = this._refineRotation(r); // r is initially noisy
- /*
-
- After refining the rotation vectors, let's adjust the scale factor as
- follows:
-
- We know that [ r1 | r2 | t ] is equal to the normalized homography H up
- to a non-zero scale factor s, i.e., [ r1 | r2 | t ] = s H. Let's call M
- the first two columns of H, i.e., M = [ h1 | h2 ], and R = [ r1 | r2 ].
- It follows that R = s M, meaning that M'R = s M'M. The trace of 2x2 M'R
- is such that tr(M'R) = tr(s M'M) = s tr(M'M), which means:
-
- s = tr(M'R) / tr(M'M) = (r1'h1 + r2'h2) / (h1'h1 + h2'h2)
-
- (also: s^2 = det(M'R) / det(M'M))
-
- */
- // adjust the scale factor
- scale = r[0] * h11 + r[1] * h21 + r[2] * h31;
- scale += r[3] * h12 + r[4] * h22 + r[5] * h32;
- scale /= h1norm2 + h2norm2;
- // recover the translation
- let t = new Array(3);
- t[0] = scale * h13;
- t[1] = scale * h23;
- t[2] = scale * h33;
- // done!
- return speedy_vision_default().Matrix(3, 3, r.concat(t));
- }
- /**
- * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
- * @param rot rotation vectors [ r1 | r2 ] in column-major format
- * @returns a 3x2 matrix R such that R'R = I (column-major format)
- */
- _refineRotation(rot) {
- const [r11, r21, r31, r12, r22, r32] = rot;
- /*
-
- A little technique I figured out to correct the rotation vectors
- ----------------------------------------------------------------
-
- We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
- R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
- because vectors r1 and r2 are not perfectly orthonormal due to noise.
-
- Let's first notice that R'R is symmetric. You can easily check that its
- two eigenvalues are both real and positive (as long as r1, r2 != 0 and
- r1 is not parallel to r2, but we never take such vectors as input).
-
- R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
- [ r1'r2 r2'r2 ]
-
- We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
- chosen to be orthogonal and D is a diagonal matrix whose entries are
- the eigenvalues of R'R.
-
- Let LL' be the Cholesky decomposition of D. Such decomposition exists
- and is trivially computed: just take the square roots of the entries of
- D. Since L is diagonal, we have L = L'. Its inverse is also trivially
- computed - call it Linv.
-
- Now, define a 2x2 correction matrix C as follows:
-
- C = Q * Linv * Q'
-
- This matrix rotates the input vector, scales it by some amount, and
- then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
-
- We compute RC in order to correct the rotation vectors. We take its
- two columns as the corrected vectors.
-
- In order to show that the two columns of RC are orthonormal, we can
- show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
- expand the expression:
-
- (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
- Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
- Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
-
- I have provided below a closed formula to correct the rotation vectors.
-
- What C does to R is very interesting: it makes the singular values
- become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
- values of R are the square roots of the eigenvalues of R'R. Letting
- S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
- means that RC is equivalent to the correction "trick" using the SVD
- found in the computer vision literature (i.e., compute the SVD and
- return U V'). That "trick" is known to return the rotation matrix that
- minimizes the Frobenius norm of the difference between the input and
- the output. Consequently, the technique I have just presented is also
- optimal in that sense!
-
- By the way, the input matrix R does not need to be 3x2.
-
- */
- // compute the entries of R'R
- const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
- const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
- const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
- // compute the two real eigenvalues of R'R
- const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
- const sqrt = Math.sqrt(delta); // delta >= 0 always
- const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
- const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
- // compute two unit eigenvectors qi = (xi,yi) of R'R
- const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
- const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
- const y1 = x1 / alpha1;
- const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
- const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
- const y2 = x2 / alpha2;
- // compute the Cholesky decomposition LL' of the diagonal matrix D
- // whose entries are the two eigenvalues of R'R and then invert L
- const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
- const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
- // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
- // is orthogonal and Linv is computed as above
- const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
- const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
- const C = Q.times(Linv).times(Qt);
- // correct the rotation vectors r1 and r2 using C
- const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
- return speedy_vision_default().Matrix(R.times(C)).read();
- }
- /**
- * Compute a refined translation vector
- * @param normalizedHomography ideal pinhole K = I
- * @param rot rotation vectors [ r1 | r2 ] in column-major format
- * @param t0 initial estimate for the translation vector
- * @returns 3x1 translation vector in column-major format
- */
- _refineTranslation(normalizedHomography, rot, t0) {
- /*
-
- Given a normalized homography H, the rotation vectors r1, r2, and a
- translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
- scale factor s.
-
- If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
- [ r1 | r2 | t ] u is parallel to H u, which means that their cross
- product is zero:
-
- [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
-
- The following code finds an optimal translation vector t based on the
- above observation. H, r1, r2 are known.
-
- */
- const h = normalizedHomography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6];
- const h21 = h[1], h22 = h[4], h23 = h[7];
- const h31 = h[2], h32 = h[5], h33 = h[8];
- const r11 = rot[0], r12 = rot[3];
- const r21 = rot[1], r22 = rot[4];
- const r31 = rot[2], r32 = rot[5];
- // sample points [ xi yi ]' in AR screen space
- //const x = [ 0.5, 0.0, 1.0, 1.0, 0.0, 0.5, 1.0, 0.5, 0.0 ];
- //const y = [ 0.5, 0.0, 0.0, 1.0, 1.0, 0.0, 0.5, 1.0, 0.5 ];
- const x = [0.5, 0.0, 1.0, 1.0, 0.0];
- const y = [0.5, 0.0, 0.0, 1.0, 1.0];
- const n = x.length;
- const n3 = 3 * n;
- const width = this._screenSize.width;
- const height = this._screenSize.height;
- for (let i = 0; i < n; i++) {
- x[i] *= width;
- y[i] *= height;
- }
- // set auxiliary values: ai = H [ xi yi 1 ]'
- const a1 = new Array(n);
- const a2 = new Array(n);
- const a3 = new Array(n);
- for (let i = 0; i < n; i++) {
- a1[i] = x[i] * h11 + y[i] * h12 + h13;
- a2[i] = x[i] * h21 + y[i] * h22 + h23;
- a3[i] = x[i] * h31 + y[i] * h32 + h33;
- }
- // we'll solve M t = v for t with linear least squares
- // M: 3n x 3, v: 3n x 1, t: 3 x 1
- const m = new Array(3 * n * 3);
- const v = new Array(3 * n);
- for (let i = 0, k = 0; k < n; i += 3, k++) {
- m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
- m[i + n3] = -(m[i + 1] = a3[k]);
- m[i + 2] = -(m[i + n3 + n3] = a2[k]);
- m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
- v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
- v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
- v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
- }
- /*
- // this works, but I want more lightweight
- const M = Speedy.Matrix(n3, 3, m);
- const v_ = Speedy.Matrix(n3, 1, v);
- return Speedy.Matrix(M.ldiv(v_)).read();
- */
- /*
-
- Gradient descent with optimal step size / learning rate
- -------------------------------------------------------
-
- Let's find the column-vector x that minimizes the error function
- E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
- least squares. We want to find x easily, QUICKLY and iteratively.
-
- The update rule of gradient descent is set to:
-
- x := x - w * grad(E)
-
- where w is the learning rate and grad(E) is the gradient of E(x):
-
- grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
-
- Let's adjust w to make x "converge quickly". Define function S(w) as:
-
- S(w) = x - w * grad(E) (step)
-
- and another function F(w) as:
-
- F(w) = E(S(w))
-
- which is the error of the step. We minimize F by setting its derivative
- to zero:
-
- 0 = dF = dF dS
- dw dS dw
-
- What follows is a fair amount of algebra. Do the math and you'll find
- the following optimal update rule:
-
- (c'c)
- x := x - --------- c
- (Ac)'(Ac)
-
- where c = A'r = A'(Ax - b)
-
- */
- // gradient descent: super lightweight implementation
- const r = new Array(3 * n);
- const c = new Array(3);
- const Mc = new Array(3 * n);
- // initial guess
- const t = new Array(3);
- t[0] = t0[0];
- t[1] = t0[1];
- t[2] = t0[2];
- // iterate
- const MAX_ITERATIONS = 15;
- const TOLERANCE = 1;
- for (let it = 0; it < MAX_ITERATIONS; it++) {
- //console.log("it",it+1);
- // compute residual r = Mt - v
- for (let i = 0; i < n3; i++) {
- r[i] = 0;
- for (let j = 0; j < 3; j++)
- r[i] += m[j * n3 + i] * t[j];
- r[i] -= v[i];
- }
- // compute c = M'r
- for (let i = 0; i < 3; i++) {
- c[i] = 0;
- for (let j = 0; j < n3; j++)
- c[i] += m[i * n3 + j] * r[j];
- }
- // compute Mc
- for (let i = 0; i < n3; i++) {
- Mc[i] = 0;
- for (let j = 0; j < 3; j++)
- Mc[i] += m[j * n3 + i] * c[j];
- }
- // compute c'c
- let num = 0;
- for (let i = 0; i < 3; i++)
- num += c[i] * c[i];
- //console.log("c'c=",num);
- if (num < TOLERANCE)
- break;
- // compute (Mc)'(Mc)
- let den = 0;
- for (let i = 0; i < n3; i++)
- den += Mc[i] * Mc[i];
- // compute frc = c'c / (Mc)'(Mc)
- const frc = num / den;
- if (Number.isNaN(frc)) // this shouldn't happen
- break;
- // iterate: t = t - frc * c
- for (let i = 0; i < 3; i++)
- t[i] -= frc * c[i];
- }
- //console.log("OLD t:\n\n",t0.join('\n'));
- //console.log("new t:\n\n",t.join('\n'));
- // done!
- return t;
- }
- /**
- * Apply a smoothing filter to the partial pose
- * @param partialPose 3x3 [ r1 | r2 | t ]
- * @returns filtered partial pose
- */
- _filterPartialPose(partialPose) {
- const avg = new Array(9).fill(0);
- const entries = partialPose.read();
- const rotationBlock = entries.slice(0, 6);
- const translationBlock = entries.slice(6, 9);
- // how many samples should we store, at most?
- const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
- const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
- const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
- // is it a valid partial pose?
- if (!Number.isNaN(entries[0])) {
- // store samples
- this._partialRotationBuffer.unshift(rotationBlock);
- if (this._partialRotationBuffer.length > N)
- this._partialRotationBuffer.length = N;
- this._translationBuffer.unshift(translationBlock);
- if (this._translationBuffer.length > M)
- this._translationBuffer.length = M;
- }
- else if (this._partialRotationBuffer.length == 0) {
- // invalid pose, no samples
- return speedy_vision_default().Matrix.Eye(3);
- }
- // average *nearby* rotations
- const n = this._partialRotationBuffer.length;
- for (let i = 0; i < n; i++) {
- const r = this._partialRotationBuffer[i];
- for (let j = 0; j < 6; j++)
- avg[j] += r[j] / n;
- }
- const r = this._refineRotation(avg);
- // average translations
- const m = this._translationBuffer.length;
- for (let i = 0; i < m; i++) {
- const t = this._translationBuffer[i];
- for (let j = 0; j < 3; j++)
- avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
- //avg[6 + j] += t[j] / m;
- }
- const t = [avg[6], avg[7], avg[8]];
- // done!
- return speedy_vision_default().Matrix(3, 3, r.concat(t));
- }
- /**
- * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
- * @param partialPose
- * @returns 3x4 matrix
- */
- _estimateFullPose(partialPose) {
- const p = partialPose.read();
- const r11 = p[0], r12 = p[3], t1 = p[6];
- const r21 = p[1], r22 = p[4], t2 = p[7];
- const r31 = p[2], r32 = p[5], t3 = p[8];
- // r3 = +- ( r1 x r2 )
- let r13 = r21 * r32 - r31 * r22;
- let r23 = r31 * r12 - r11 * r32;
- let r33 = r11 * r22 - r21 * r12;
- // let's make sure that det R = +1 (keep the orientation)
- const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
- if (det < 0) {
- r13 = -r13;
- r23 = -r23;
- r33 = -r33;
- }
- // done!
- return speedy_vision_default().Matrix(3, 4, [
- r11, r21, r31,
- r12, r22, r32,
- r13, r23, r33,
- t1, t2, t3,
- ]);
- }
- /**
- * Estimate the pose [ R | t ] given a homography in AR screen space
- * @param homography must be valid
- * @returns 3x4 matrix
- */
- _estimatePose(homography) {
- const normalizedHomography = this._normalizeHomography(homography);
- const partialPose = speedy_vision_default().Matrix.Eye(3);
- // we want the estimated partial pose [ r1 | r2 | t ] to be as close
- // as possible to the normalized homography, up to a scale factor;
- // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
- // it won't be a perfect equality due to noise in the homography.
- // remark: composition of homographies
- const residual = speedy_vision_default().Matrix(normalizedHomography);
- for (let k = 0; k < POSE_ITERATIONS; k++) {
- // incrementally improve the partial pose
- const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
- partialPose.setToSync(rt.times(partialPose));
- residual.setToSync(residual.times(rt.inverse()));
- //console.log("rt",rt.toString());
- //console.log("residual",residual.toString());
- }
- //console.log('-----------');
- // refine the translation vector
- const mat = partialPose.read();
- const r = mat.slice(0, 6);
- const t0 = mat.slice(6, 9);
- const t = this._refineTranslation(normalizedHomography, r, t0);
- const refinedPartialPose = speedy_vision_default().Matrix(3, 3, r.concat(t));
- // filter the partial pose
- const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
- // estimate the full pose
- //const finalPartialPose = partialPose;
- const finalPartialPose = filteredPartialPose;
- return this._estimateFullPose(finalPartialPose);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/pose.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * pose.ts
- * A pose represents a position and an orientation in a 3D space
- */
- /**
- * A pose represents a position and an orientation in a 3D space
- * (and sometimes a scale, too...)
- */
- class Pose {
- /**
- * Constructor
- * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
- */
- constructor(transform) {
- this._transform = transform;
- }
- /**
- * A transform describing the position and the orientation
- * of the pose relative to the 3D space to which it belongs
- */
- get transform() {
- return this._transform;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/transform.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * transform.ts
- * 3D geometrical transforms
- */
-
-
- /**
- * A 3D transformation
- */
- class BaseTransform {
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- if (matrix.rows != 4 || matrix.columns != 4)
- throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
- this._matrix = matrix;
- }
- /**
- * The 4x4 transformation matrix (read-only)
- */
- get matrix() {
- return this._matrix;
- }
- }
- /**
- * An invertible 3D transformation
- */
- class InvertibleTransform extends BaseTransform {
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes an invertible transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
- return new InvertibleTransform(inverseMatrix);
- }
- }
- /**
- * A 3D transformation described by translation, rotation and scale
- */
- class StandardTransform extends InvertibleTransform {
- // TODO: position, rotation and scale attributes
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes a standard transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- /*
-
- The inverse of a 4x4 standard transform T * R * S...
-
- [ RS t ] is [ ZR' -ZR't ]
- [ 0' 1 ] [ 0' 1 ]
-
- where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
-
- */
- return super.inverse;
- }
- }
- /**
- * A 3D transformation described by position and orientation
- */
- class RigidTransform extends StandardTransform {
- // TODO: position and rotation attributes (need to decompose the matrix)
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes a rigid transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- /*
-
- The inverse of a 4x4 rigid transform
-
- [ R t ] is [ R' -R't ]
- [ 0' 1 ] [ 0' 1 ]
-
- where R is 3x3, t is 3x1 and 0' is 1x3
-
- */
- const m = this._matrix.read();
- if (m[15] == 0) // error? abs()??
- throw new IllegalOperationError('Not a rigid transform');
- const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
- const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
- const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
- const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
- const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
- const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
- const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
- const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
- const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
- r11, r12, r13, 0,
- r21, r22, r23, 0,
- r31, r32, r33, 0,
- -rt1, -rt2, -rt3, 1
- ]);
- return new RigidTransform(inverseMatrix);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * viewer-pose.ts
- * The pose of a virtual camera in 3D world space at a moment in time
- */
-
-
-
- /**
- * The pose of a virtual camera in 3D world space at a moment in time
- */
- class ViewerPose extends Pose {
- /**
- * Constructor
- * @param camera camera model
- */
- constructor(camera) {
- // compute the view matrix and its inverse in AR screen space
- const viewMatrix = ViewerPose._computeViewMatrix(camera);
- const inverseTransform = new RigidTransform(viewMatrix);
- super(inverseTransform.inverse);
- this._viewMatrix = viewMatrix;
- }
- /**
- * This 4x4 matrix moves 3D points from world space to viewer space. We
- * assume that the camera is looking in the direction of the negative
- * z-axis (WebGL-friendly)
- */
- get viewMatrix() {
- return this._viewMatrix;
- }
- /**
- * Compute the view matrix in AR screen space, measured in pixels
- * @param camera
- * @returns a 4x4 matrix describing a rotation and a translation
- */
- static _computeViewMatrix(camera) {
- /*
-
- // this is the view matrix in AR screen space, measured in pixels
- // we augment the extrinsics matrix, making it 4x4 by adding a
- // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
- const V = Speedy.Matrix(4, 4, [
- E[0], E[1], E[2], 0,
- E[3], E[4], E[5], 0,
- E[6], E[7], E[8], 0,
- E[9], E[10], E[11], 1
- ]);
-
- // we premultiply V by F, which performs a rotation around the
- // x-axis by 180 degrees, so that we get the 3D objects in front
- // of the camera pointing in the direction of the negative z-axis
- const F = Speedy.Matrix(4, 4, [
- 1, 0, 0, 0,
- 0,-1, 0, 0,
- 0, 0,-1, 0,
- 0, 0, 0, 1
- ]);
-
- Matrix F * V is matrix V with the second and third rows negated
-
- */
- const E = camera.extrinsics;
- return speedy_vision_default().Matrix(4, 4, [
- E[0], -E[1], -E[2], 0,
- E[3], -E[4], -E[5], 0,
- E[6], -E[7], -E[8], 0,
- E[9], -E[10], -E[11], 1
- ]);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/view.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * view.ts
- * A view of the 3D world at a moment in time,
- * featuring the means to project points into clip space
- */
-
-
-
- /** Default distance in pixels of the near plane to the optical center of the camera */
- const DEFAULT_NEAR = 1;
- /** Default distance in pixels of the far plane to the optical center of the camera */
- const DEFAULT_FAR = 20000;
- /**
- * A PerspectiveView is a View defining a symmetric frustum around the z-axis
- * (perspective projection)
- */
- class PerspectiveView {
- /**
- * Constructor
- * @param camera camera model
- * @param near distance of the near plane
- * @param far distance of the far plane
- */
- constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
- const intrinsics = camera.intrinsics;
- const screenSize = camera.screenSize;
- this._near = Math.max(0, +near);
- this._far = Math.max(0, +far);
- if (this._near >= this._far)
- throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
- this._aspect = screenSize.width / screenSize.height;
- this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
- this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
- }
- /**
- * A 4x4 projection matrix for WebGL
- */
- get projectionMatrix() {
- return this._projectionMatrix;
- }
- /**
- * Aspect ratio of the frustum
- */
- get aspect() {
- return this._aspect;
- }
- /**
- * Vertical field-of-view of the frustum, measured in radians
- */
- get fovy() {
- return 2 * Math.atan(this._tanOfHalfFovy);
- }
- /**
- * Distance of the near plane
- */
- get near() {
- return this._near;
- }
- /**
- * Distance of the far plane
- */
- get far() {
- return this._far;
- }
- /**
- * Compute a perspective projection matrix for WebGL
- * @param K camera intrinsics
- * @param near distance of the near plane
- * @param far distance of the far plane
- */
- static _computeProjectionMatrix(K, near, far) {
- // we assume that the principal point is at the center of the image
- const top = near * (K[V0] / K[FY]);
- const right = near * (K[U0] / K[FX]);
- const bottom = -top, left = -right; // symmetric frustum
- // a derivation of this projection matrix can be found at
- // https://www.songho.ca/opengl/gl_projectionmatrix.html
- // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
- return speedy_vision_default().Matrix(4, 4, [
- 2 * near / (right - left), 0, 0, 0,
- 0, 2 * near / (top - bottom), 0, 0,
- (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
- 0, 0, -2 * far * near / (far - near), 0
- ]);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * view.ts
- * A viewer represents a virtual camera in 3D world space
- */
-
-
-
-
-
- /**
- * A viewer represents a virtual camera in 3D world space
- */
- class Viewer {
- /**
- * Constructor
- * @param camera camera model
- */
- constructor(camera) {
- this._pose = new ViewerPose(camera);
- this._views = [new PerspectiveView(camera)];
- }
- /**
- * The pose of this viewer
- */
- get pose() {
- return this._pose;
- }
- /**
- * The view of this viewer (only for monoscopic rendering)
- */
- get view() {
- /*
- if(this._views.length > 1)
- throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
- */
- return this._views[0];
- }
- /**
- * The views of this viewer
- */
- /*
- get views(): View[]
- {
- return this._views.concat([]);
- }
- */
- /**
- * Convert a pose from world space to viewer space
- * @param pose a pose in world space
- * @returns a pose in viewer space
- */
- convertToViewerSpace(pose) {
- const modelMatrix = pose.transform.matrix;
- const viewMatrix = this._pose.viewMatrix;
- const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
- const transform = new StandardTransform(modelViewMatrix);
- return new Pose(transform);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * tracking.ts
- * Tracking state of the Image Tracker
- */
-
-
-
-
-
-
-
-
-
-
-
- /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
- const USE_TURBO = true;
- /** Number of PBOs; meaningful only when using turbo */
- const NUMBER_OF_PBOS = 2;
- /** Frame skipping; meaningful only when using turbo */
- const TURBO_SKIP = 2;
- /**
- * The tracking state of the Image Tracker tracks
- * keypoints of the image target and updates the
- * rectification matrix
- */
- class ImageTrackerTrackingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('tracking', imageTracker);
- this._referenceImage = null;
- this._warpHomography = speedy_vision_default().Matrix.Eye(3);
- this._poseHomography = speedy_vision_default().Matrix.Eye(3);
- this._initialHomography = speedy_vision_default().Matrix.Eye(3);
- this._initialKeypoints = [];
- this._counter = 0;
- this._camera = new CameraModel();
- this._predictedKeypoints = [];
- this._lastPipelineOutput = { keypoints: [] };
- this._pipelineCounter = 0;
- this._lastOutput = {};
- this._lostCounter = 0;
- // we need at least 4 correspondences of points to compute a homography matrix
- Utils.assert(TRACK_MIN_MATCHES >= 4);
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const homography = settings.homography;
- const referenceImage = settings.referenceImage;
- const templateKeypoints = settings.templateKeypoints;
- const keypointPortalSink = settings.keypointPortalSink;
- const screenSize = settings.screenSize; // this.screenSize is not yet set
- const keypointPortalSource = this._pipeline.node('keypointPortalSource');
- // this shouldn't happen
- if (!referenceImage)
- throw new IllegalOperationError(`Can't track a null reference image`);
- // set attributes
- this._referenceImage = referenceImage;
- this._warpHomography = speedy_vision_default().Matrix(homography);
- this._poseHomography = speedy_vision_default().Matrix(homography);
- this._initialHomography = speedy_vision_default().Matrix(homography);
- this._initialKeypoints = templateKeypoints;
- this._counter = 0;
- this._predictedKeypoints = [];
- this._lastPipelineOutput = { keypoints: [] };
- this._pipelineCounter = 0;
- this._lastOutput = {};
- this._lostCounter = 0;
- // setup portals
- keypointPortalSource.source = keypointPortalSink;
- // setup camera
- this._camera.init(screenSize);
- // emit event
- const ev = new ImageTrackerEvent('targetfound', referenceImage);
- this._imageTracker.dispatchEvent(ev);
- // log
- Utils.log(`Tracking image "${referenceImage.name}"...`);
- }
- /**
- * Called when leaving the state
- */
- onLeaveState() {
- const referenceImage = this._referenceImage;
- // release the camera
- this._camera.release();
- // emit event
- const ev = new ImageTrackerEvent('targetlost', referenceImage);
- this._imageTracker.dispatchEvent(ev);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const imageRectifier = this._pipeline.node('imageRectifier');
- const borderClipper = this._pipeline.node('borderClipper');
- const keypointRectifier = this._pipeline.node('keypointRectifier');
- const screenSize = this.screenSize;
- /*
- // pause media (test)
- const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
- const media = source.media as SpeedyMedia;
- (media.source as HTMLVideoElement).pause();
- */
- // clip keypoints from the borders of the target image
- borderClipper.imageSize = screenSize;
- borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
- // rectify the image
- return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
- imageRectifier.transform = warp;
- });
- }
- /**
- * GPU processing
- * @returns promise with the pipeline results
- */
- _gpuUpdate() {
- //return super._gpuUpdate();
- // No turbo?
- if (!USE_TURBO || Settings.powerPreference == 'low-power')
- return super._gpuUpdate();
- // When using turbo, we reduce the GPU usage by skipping every other frame
- const counter = this._pipelineCounter;
- this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
- // Skip frame
- if (counter != 0) {
- if (this._lastPipelineOutput.keypoints !== undefined) {
- this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
- }
- else
- this._predictedKeypoints.length = 0;
- this._lastPipelineOutput.keypoints = this._predictedKeypoints;
- return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
- }
- // Run the pipeline and store the results
- return super._gpuUpdate().then(results => {
- this._lastPipelineOutput = results;
- return results;
- });
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const imageRectifier = this._pipeline.node('imageRectifier');
- const keypoints = result.keypoints;
- const image = result.image;
- const referenceImage = this._referenceImage;
- // find the best keypoint matches
- return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
- // find motion models
- return speedy_vision_default().Promise.all([
- this._findAffineMotion(matches),
- this._findPerspectiveMotion(matches)
- ]);
- }).then(([affineMotion, perspectiveMotion]) => {
- const lowPower = (Settings.powerPreference == 'low-power');
- const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
- // update warp homography
- const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
- const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
- if (!USE_TURBO || this._counter % delay == remainder)
- this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
- // update pose homography
- if (!frozen)
- this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
- // update counter
- this._counter = (this._counter + 1) % delay;
- // update the camera
- if (!frozen)
- return this._camera.update(this._poseHomography, this.screenSize);
- else
- return this._camera.matrix;
- }).then(_ => {
- // find the inverse of the rectification matrix
- const rectificationMatrix = imageRectifier.transform;
- const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
- // move keypoints from rectified space back to image space
- const n = keypoints.length;
- const coords = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- coords[j] = keypoints[i].position.x;
- coords[j + 1] = keypoints[i].position.y;
- }
- return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
- /*
- // test image center
- const coords2: number[] = new Array(2 * n);
- for(let i = 0, j = 0; i < n; i++, j += 2) {
- coords2[j] = this._imageTracker.screenSize.width / 2;
- coords2[j+1] = this._imageTracker.screenSize.height / 2;
- if(i % 2 == 0) {
- coords2[j] = this._imageTracker.screenSize.width / 4;
- coords2[j+1] = this._imageTracker.screenSize.height / 4;
- }
- }
-
- return Speedy.Matrix.applyPerspectiveTransform(
- Speedy.Matrix.Zeros(2, n),
- Speedy.Matrix(2, n, coords2),
- this._poseHomography
- //this._warpHomography
- );
- */
- }).then(mat => {
- /*
-
- const n = keypoints.length;
- const coords = mat.read();
-
- // ** this will interfere with the calculations when frame skipping is on **
-
- // get keypoints in image space
- for(let i = 0, j = 0; i < n; i++, j += 2) {
- keypoints[i].position.x = coords[j];
- keypoints[i].position.y = coords[j+1];
- }
-
- */
- // find a polyline surrounding the target
- return this._findPolyline(this._poseHomography, this.screenSize);
- //return this._findPolyline(this._warpHomography, this.screenSize);
- }).then(polyline => {
- // we let the target object be at the origin of the world space
- // (identity transform). We also perform a change of coordinates,
- // so that we move out from pixel space and into normalized space
- const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
- const transform = new StandardTransform(modelMatrix);
- const pose = new Pose(transform);
- // given the current state of the camera model, we get a viewer
- // compatible with the pose of the target
- const viewer = new Viewer(this._camera);
- // the trackable object
- const trackable = {
- pose: pose,
- referenceImage: referenceImage
- };
- // the result generated by the image tracker
- const result = {
- tracker: this._imageTracker,
- trackables: [trackable],
- viewer: viewer
- };
- // build and save the output
- this._lastOutput = {
- exports: result,
- cameraMatrix: this._camera.matrix,
- homography: this._warpHomography,
- //keypoints: keypoints,
- screenSize: this.screenSize,
- image: image,
- polyline: polyline,
- };
- // we have successfully tracked the target in this frame
- this._lostCounter = 0;
- // done!
- return {
- nextState: 'tracking',
- trackerOutput: this._lastOutput
- };
- }).catch(err => {
- // give some tolerance to tracking errors
- if (err instanceof TrackingError) {
- if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
- //console.log("ABSORB",this._lostCounter,err.toString())
- // absorb the error
- return {
- nextState: 'tracking',
- trackerOutput: this._lastOutput
- };
- }
- }
- // lost tracking
- Utils.warning(`The target has been lost! ${err.toString()}`);
- this._camera.reset();
- // go back to the scanning state
- return {
- nextState: 'scanning',
- trackerOutput: {
- image: image,
- screenSize: this.screenSize,
- },
- };
- });
- }
- /**
- * Find quality matches between two sets of keypoints
- * @param currKeypoints keypoints of the current frame
- * @param prevKeypoints keypoints of the previous frame
- * @returns quality matches
- */
- _findQualityMatches(currKeypoints, prevKeypoints) {
- const result = [[], []];
- const n = currKeypoints.length;
- for (let i = 0; i < n; i++) {
- const currKeypoint = currKeypoints[i];
- if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
- const d1 = currKeypoint.matches[0].distance;
- const d2 = currKeypoint.matches[1].distance;
- if (d1 <= TRACK_MATCH_RATIO * d2) {
- const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
- result[0].push(currKeypoint);
- result[1].push(prevKeypoint);
- }
- }
- }
- return result;
- }
- /**
- * Find a better spatial distribution of the input matches
- * @param matches quality matches
- * @returns refined quality matches
- */
- _refineQualityMatches(matches) {
- const currKeypoints = matches[0];
- const prevKeypoints = matches[1];
- // find a better spatial distribution of the keypoints
- const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
- const n = indices.length; // number of refined matches
- // assemble output
- const result = [new Array(n), new Array(n)];
- for (let i = 0; i < n; i++) {
- result[0][i] = currKeypoints[indices[i]];
- result[1][i] = prevKeypoints[indices[i]];
- }
- // done!
- return result;
- }
- /**
- * Spatially distribute keypoints over a grid
- * @param keypoints keypoints to be distributed
- * @param gridCells number of grid elements in each axis
- * @returns a list of indices of keypoints[]
- */
- _distributeKeypoints(keypoints, gridCells) {
- // get the coordinates of the keypoints
- const n = keypoints.length;
- const points = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- points[j] = keypoints[i].x;
- points[j + 1] = keypoints[i].y;
- }
- // normalize the coordinates to [0,1] x [0,1]
- this._normalizePoints(points);
- // distribute the keypoints over a grid
- const numberOfCells = gridCells * gridCells;
- const grid = (new Array(numberOfCells)).fill(-1);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- // find the grid location of the i-th point
- const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
- const yg = Math.floor(points[j + 1] * gridCells);
- // store the index of the i-th point in the grid
- grid[yg * gridCells + xg] = i;
- }
- // retrieve points of the grid
- const indices = [];
- for (let g = 0; g < numberOfCells; g++) {
- if (grid[g] >= 0) {
- const i = grid[g];
- indices.push(i);
- }
- }
- // done!
- return indices;
- }
- /**
- * Normalize points to [0,1)^2
- * @param points 2 x n matrix of points in column-major format
- * @returns points
- */
- _normalizePoints(points) {
- Utils.assert(points.length % 2 == 0);
- const n = points.length / 2;
- if (n == 0)
- return points;
- let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
- let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- const x = points[j], y = points[j + 1];
- xmin = x < xmin ? x : xmin;
- ymin = y < ymin ? y : ymin;
- xmax = x > xmax ? x : xmax;
- ymax = y > ymax ? y : ymax;
- }
- const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
- const ylen = ymax - ymin + 1;
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- points[j] = (points[j] - xmin) / xlen;
- points[j + 1] = (points[j + 1] - ymin) / ylen;
- }
- return points;
- }
- /**
- * Find a matrix with the coordinates of quality matches
- * @param matches n quality matches
- * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
- */
- _findMatrixOfMatches(matches) {
- const n = matches[0].length;
- Utils.assert(n > 0);
- // sets of keypoints
- const currKeypoints = matches[0];
- const prevKeypoints = matches[1];
- // get the coordinates of the keypoints of the set of refined matches
- const src = new Array(2 * n);
- const dst = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- src[j] = prevKeypoints[i].x;
- src[j + 1] = prevKeypoints[i].y;
- dst[j] = currKeypoints[i].x;
- dst[j + 1] = currKeypoints[i].y;
- }
- // assemble the matrix
- return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
- }
- /**
- * Preprocess keypoint matches
- * @param currKeypoints keypoints of the current frame
- * @param prevKeypoints keypoints of the previous frame
- * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
- * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
- */
- _preprocessMatches(currKeypoints, prevKeypoints) {
- // find and refine quality matches
- const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
- const refinedMatches = this._refineQualityMatches(qualityMatches);
- // not enough matches?
- const n = refinedMatches[0].length;
- if (n < TRACK_MIN_MATCHES)
- return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
- // find matrix of matches
- const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
- // warp matrix of matches
- const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
- return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
- }
- /**
- * Find an affine motion model of the target image
- * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
- * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
- */
- _findAffineMotion(preprocessedMatches) {
- const model = speedy_vision_default().Matrix.Eye(3);
- const n = preprocessedMatches.columns / 2; // number of preprocessed matches
- // find motion model
- return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512,
- bundleSize: 128,
- }).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a perspective motion model of the target image
- * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
- * @returns a promise that resolves to a 3x3 perspective motion model
- */
- _findPerspectiveMotion(preprocessedMatches) {
- /*
-
- We can probably get more accurate motion estimates if we
- work in 3D rather than in 2D. We're currently estimating
- an affine transform in image space. What if we projected
- the keypoints into world space, estimated the camera motion
- (rotation and translation) that best describes the observed
- observed motion of the keypoints, and then projected things
- back to image space? Need to figure this out; we'll get a
- homography matrix.
-
- Note: keypoints are in rectified image space.
-
- Note: work with a 6 DoF perspective transform instead of 8.
-
- */
- const model = speedy_vision_default().Matrix.Zeros(3);
- const n = preprocessedMatches.columns / 2; // number of preprocessed matches
- // find motion model
- return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512 * 2,
- bundleSize: 128 * 4, //*4
- }).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a rectification matrix to be applied to the target image
- * @param homography maps a reference image to the AR screen
- * @param media target
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findImageWarp(homography, screenSize) {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const mat = speedy_vision_default().Matrix.Zeros(3);
- return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
- }
- /**
- * Find a warp to be applied to the keypoints
- * @returns affine transform
- */
- _findKeypointWarp() {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const screenSize = this.screenSize;
- const sw = screenSize.width, sh = screenSize.height;
- const mat = speedy_vision_default().Matrix.Eye(3, 3);
- // no rotation is needed
- if (!this._mustRotateWarpedImage(media, screenSize))
- return speedy_vision_default().Promise.resolve(mat);
- // rotate by 90 degrees clockwise and scale
- return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
- }
- /**
- * Predict the keypoints without actually looking at the image
- * @param curr keypoints at time t (will modify the contents)
- * @param initial keypoints at time t-1 (not just t = 0)
- * @returns keypoints at time t+1
- */
- _predictKeypoints(curr, initial) {
- // the target image is likely to be moving roughly in
- // the same manner as it was in the previous frame
- const next = [];
- const n = curr.length;
- for (let i = 0; i < n; i++) {
- const cur = curr[i];
- if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
- continue;
- /*
- else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
- continue;
- */
- const ini = initial[cur.matches[0].index];
- const dx = cur.position.x - ini.position.x;
- const dy = cur.position.y - ini.position.y;
- // a better mathematical model is needed
- const alpha = 0.8; //0.2;
- cur.position.x = ini.position.x + alpha * dx;
- cur.position.y = ini.position.y + alpha * dy;
- next.push(cur);
- }
- // done!
- return next;
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- detector.quality = TRACK_HARRIS_QUALITY;
- detector.capacity = TRACK_DETECTOR_CAPACITY;
- subpixel.method = SUBPIXEL_METHOD;
- clipper.size = TRACK_MAX_KEYPOINTS;
- borderClipper.imageSize = screen.size;
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- matcher.k = 2;
- keypointPortalSource.source = null;
- keypointSink.turbo = USE_TURBO;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- imageRectifier.output().connectTo(blur.input());
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- keypointPortalSource.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- //preMatcher.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- //imageRectifier.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * image-tracker.ts
- * Image Tracker
- */
-
-
-
-
-
-
-
-
-
-
-
- /** A helper */
- const formatSize = (size) => `${size.width}x${size.height}`;
- /**
- * The ImageTracker tracks an image (one at a time)
- */
- class ImageTracker extends AREventTarget {
- /**
- * Constructor
- */
- constructor() {
- super();
- // the states
- this._state = {
- 'initial': new ImageTrackerInitialState(this),
- 'training': new ImageTrackerTrainingState(this),
- 'scanning': new ImageTrackerScanningState(this),
- 'pre-tracking': new ImageTrackerPreTrackingState(this),
- 'tracking': new ImageTrackerTrackingState(this),
- };
- // initial setup
- this._session = null;
- this._activeStateName = 'initial';
- this._lastOutput = {};
- this._database = new ReferenceImageDatabase();
- // user settings
- this._resolution = DEFAULT_TRACKING_RESOLUTION;
- }
- /**
- * The type of the tracker
- */
- get type() {
- return 'image-tracker';
- }
- /**
- * Current state name
- */
- get state() {
- return this._activeStateName;
- }
- /**
- * Reference Image Database
- * Must be configured before training the tracker
- */
- get database() {
- return this._database;
- }
- /**
- * Resolution of the AR screen space
- */
- get resolution() {
- return this._resolution;
- }
- /**
- * Resolution of the AR screen space
- */
- set resolution(resolution) {
- this._resolution = resolution;
- }
- /**
- * Size of the AR screen space, in pixels
- * @internal
- */
- get screenSize() {
- return this._state[this._activeStateName].screenSize;
- }
- /**
- * Last emitted output
- * @internal
- */
- get _output() {
- return this._lastOutput;
- }
- /**
- * Stats related to this tracker
- * @internal
- */
- get _stats() {
- return `${formatSize(this.screenSize)} ${this.state}`;
- }
- /**
- * Initialize this tracker
- * @param session
- * @returns promise that resolves after the tracker has been initialized
- * @internal
- */
- _init(session) {
- // store the session
- this._session = session;
- // initialize states
- for (const state of Object.values(this._state))
- state.init();
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Release this tracker
- * @returns promise that resolves after the tracker has been released
- * @internal
- */
- _release() {
- // release states
- for (const state of Object.values(this._state))
- state.release();
- // unlink session
- this._session = null;
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Update the tracker
- * @returns promise
- * @internal
- */
- _update() {
- // validate
- if (this._session == null)
- return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
- // compute the screen size for image processing purposes
- // note: this may change over time...!
- const media = this._session.media;
- const aspectRatio = media.width / media.height;
- const screenSize = Utils.resolution(this._resolution, aspectRatio);
- // run the active state
- const activeState = this._state[this._activeStateName];
- return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
- // update the output of the tracker
- this._lastOutput = trackerOutput;
- // need to change the state?
- if (this._activeStateName != nextState) {
- activeState.onLeaveState();
- this._activeStateName = nextState;
- this._state[nextState].onEnterState(nextStateSettings || {});
- }
- });
- }
- /**
- * Get reference image
- * @param keypointIndex -1 if not found
- * @returns reference image
- * @internal
- */
- _referenceImageOfKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceImageOfKeypoint(keypointIndex);
- }
- /**
- * Get reference image index
- * @param keypointIndex -1 if not found
- * @returns reference image index, or -1 if not found
- * @internal
- */
- _referenceImageIndexOfKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceImageIndexOfKeypoint(keypointIndex);
- }
- /**
- * Get a keypoint of the trained set
- * @param keypointIndex
- * @returns a keypoint
- * @internal
- */
- _referenceKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceKeypoint(keypointIndex);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * tracker-factory.ts
- * Tracker factory
- */
-
- /**
- * Tracker factory
- */
- class TrackerFactory {
- /**
- * Create an Image Tracker
- */
- static ImageTracker() {
- return new ImageTracker();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/video-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * video-source.ts
- * HTMLVideoElement-based source of data
- */
-
-
-
- /** A message to be displayed if a video can't autoplay and user interaction is required */
- const ALERT_MESSAGE = 'Tap on the screen to start';
- /** Whether or not we have displayed the ALERT_MESSAGE */
- let displayedAlertMessage = false;
- /**
- * HTMLVideoElement-based source of data
- */
- class VideoSource {
- /**
- * Constructor
- */
- constructor(video) {
- Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
- this._video = video;
- this._media = null;
- }
- /**
- * A type-identifier of the source of data
- * @internal
- */
- get _type() {
- return 'video';
- }
- /**
- * Get media
- * @internal
- */
- get _data() {
- if (this._media == null)
- throw new IllegalOperationError(`The media of the source of data isn't loaded`);
- return this._media;
- }
- /**
- * Stats related to this source of data
- * @internal
- */
- get _stats() {
- const media = this._media;
- if (media != null)
- return `${media.width}x${media.height} video`;
- else
- return 'uninitialized video';
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- Utils.log(`Initializing ${this._type} source...`);
- // prepare the video before loading the SpeedyMedia!
- return this._prepareVideo(this._video).then(video => {
- Utils.log('The video is prepared');
- return speedy_vision_default().load(video).then(media => {
- Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
- this._media = media;
- });
- });
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- if (this._media)
- this._media.release();
- this._media = null;
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Handle browser-specific quirks for <video> elements
- * @param video a video element
- * @returns a promise that resolves to the input video
- */
- _prepareVideo(video) {
- // WebKit <video> policies for iOS:
- // https://webkit.org/blog/6784/new-video-policies-for-ios/
- // required on iOS; nice to have in all browsers
- video.setAttribute('playsinline', '');
- // handle autoplay
- return this._handleAutoPlay(video).then(video => {
- // handle WebKit quirks
- if (Utils.isWebKit()) {
- // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
- // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
- if (video.hidden) {
- video.hidden = false;
- video.style.setProperty('opacity', '0');
- video.style.setProperty('position', 'fixed'); // make sure that it's visible on-screen
- video.style.setProperty('left', '0');
- video.style.setProperty('top', '0');
- //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
- //video.style.setProperty('visibility', 'hidden'); // doesn't work either
- }
- }
- // done
- return video;
- });
- }
- /**
- * Handle browser-specific quirks for videos marked with autoplay
- * @param video a <video> marked with autoplay
- * @returns a promise that resolves to the input video
- */
- _handleAutoPlay(video) {
- // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
- // Chrome policy: https://developer.chrome.com/blog/autoplay/
- // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
- // nothing to do?
- if (!video.autoplay)
- return speedy_vision_default().Promise.resolve(video);
- // videos marked with autoplay should be muted
- if (!video.muted) {
- Utils.warning('Videos marked with autoplay should be muted', video);
- video.muted = true;
- }
- // the browser may not honor the autoplay attribute if the video is not
- // visible on-screen. So, let's try to play the video in any case.
- return this._waitUntilPlayable(video).then(video => {
- // try to play the video
- const promise = video.play();
- // handle older browsers
- if (promise === undefined)
- return video;
- // resolve if successful
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- promise.then(() => resolve(video), error => {
- // can't play the video
- Utils.error(`Can't autoplay video!`, error, video);
- // autoplay is blocked for some reason
- if (error.name == 'NotAllowedError') {
- Utils.warning('Tip: allow manual playback');
- if (Utils.isIOS())
- Utils.warning('Is low power mode on?');
- // User interaction is required to play the video. We can
- // solve this here (easy and convenient to do) or at the
- // application layer (for a better user experience). If the
- // latter is preferred, just disable autoplay and play the
- // video programatically.
- if (video.hidden || !video.controls || video.parentNode === null) {
- // this is added for convenience
- document.body.addEventListener('pointerdown', () => video.play());
- // ask only once for user interaction
- if (!displayedAlertMessage) {
- alert(ALERT_MESSAGE);
- displayedAlertMessage = true;
- }
- // XXX what if the Session mode is inline? In this
- // case, this convenience code may be undesirable.
- // A workaround is to disable autoplay.
- }
- /*else {
- // play the video after the first interaction with the page
- const polling = setInterval(() => {
- video.play().then(() => clearInterval(polling));
- }, 500);
- }*/
- }
- // unsupported media source
- else if (error.name == 'NotSupportedError') {
- reject(new NotSupportedError('Unsupported video format', error));
- return;
- }
- // done
- resolve(video);
- });
- });
- });
- }
- /**
- * Wait for the input video to be playable
- * @param video
- * @returns a promise that resolves to the input video when it can be played
- */
- _waitUntilPlayable(video) {
- const TIMEOUT = 15000, INTERVAL = 500;
- if (video.readyState >= 3)
- return speedy_vision_default().Promise.resolve(video);
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- let ms = 0, t = setInterval(() => {
- //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
- if (video.readyState >= 3) {
- clearInterval(t);
- resolve(video);
- }
- else if ((ms += INTERVAL) >= TIMEOUT) {
- clearInterval(t);
- reject(new TimeoutError('The video took too long to load'));
- }
- }, INTERVAL);
- });
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * canvas-source.ts
- * HTMLCanvasElement-based source of data
- */
-
-
-
- /**
- * HTMLCanvasElement-based source of data
- */
- class CanvasSource {
- /**
- * Constructor
- */
- constructor(canvas) {
- Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
- this._canvas = canvas;
- this._media = null;
- }
- /**
- * A type-identifier of the source of data
- * @internal
- */
- get _type() {
- return 'canvas';
- }
- /**
- * Get media
- * @internal
- */
- get _data() {
- if (this._media == null)
- throw new IllegalOperationError(`The media of the source of data isn't loaded`);
- return this._media;
- }
- /**
- * Stats related to this source of data
- * @internal
- */
- get _stats() {
- const media = this._media;
- if (media != null)
- return `${media.width}x${media.height} canvas`;
- else
- return 'uninitialized canvas';
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- return speedy_vision_default().load(this._canvas).then(media => {
- Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
- this._media = media;
- });
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- if (this._media)
- this._media.release();
- this._media = null;
- return speedy_vision_default().Promise.resolve();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * camera-source.ts
- * Webcam-based source of data
- */
-
-
-
-
- /** Default options for camera sources */
- const DEFAULT_CAMERA_OPTIONS = {
- resolution: 'md',
- aspectRatio: 16 / 9,
- constraints: { facingMode: 'environment' },
- };
- /**
- * Webcam-based source of data
- */
- class CameraSource extends VideoSource {
- /**
- * Constructor
- */
- constructor(options) {
- const video = document.createElement('video');
- super(video);
- this._cameraVideo = video;
- this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
- }
- /**
- * Camera resolution
- */
- get resolution() {
- return this._options.resolution;
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- Utils.log('Accessing the webcam...');
- // validate
- if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
- throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
- // set up media constraints
- const options = this._options;
- const size = Utils.resolution(options.resolution, options.aspectRatio);
- const constraints = {
- audio: false,
- video: Object.assign({ width: size.width, height: size.height }, options.constraints)
- };
- // load camera stream
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- navigator.mediaDevices.getUserMedia(constraints).then(stream => {
- const video = this._cameraVideo;
- video.onloadedmetadata = () => {
- const promise = video.play();
- const success = 'Access to the webcam has been granted.';
- // handle older browsers
- if (promise === undefined) {
- Utils.log(success);
- resolve(video);
- return;
- }
- // handle promise
- promise.then(() => {
- Utils.log(success);
- resolve(video);
- }).catch(error => {
- reject(new IllegalOperationError('Webcam error!', error));
- });
- };
- video.setAttribute('playsinline', '');
- video.setAttribute('autoplay', '');
- video.setAttribute('muted', '');
- video.srcObject = stream;
- }).catch(error => {
- reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
- });
- }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- const stream = this._cameraVideo.srcObject;
- const tracks = stream.getTracks();
- // stop camera feed
- tracks.forEach(track => track.stop());
- this._cameraVideo.onloadedmetadata = null;
- this._cameraVideo.srcObject = null;
- // release the media
- return super._release();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * source-factory.ts
- * Factory of sources of data
- */
-
-
-
- /**
- * Factory of sources of data
- */
- class SourceFactory {
- /**
- * Create a <video>-based source of data
- * @param video video element
- */
- static Video(video) {
- return new VideoSource(video);
- }
- /**
- * Create a <canvas>-based source of data
- * @param canvas canvas element
- */
- static Canvas(canvas) {
- return new CanvasSource(canvas);
- }
- /**
- * Create a Webcam-based source of data
- * @param options optional options object
- */
- static Camera(options = {}) {
- return new CameraSource(options);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/main.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * main.ts
- * Entry point
- */
-
-
-
-
-
-
-
- /**
- * GPU-accelerated Augmented Reality for the web
- */
- class Martins {
- /**
- * Start a new session
- * @param options
- * @returns a promise that resolves to a new session
- */
- static startSession(options) {
- return Session.instantiate(options);
- }
- /**
- * Trackers
- */
- static get Tracker() {
- return TrackerFactory;
- }
- /**
- * Sources of data
- */
- static get Source() {
- return SourceFactory;
- }
- /**
- * Create a viewport
- * @param settings
- * @returns a new viewport with the specified settings
- */
- static Viewport(settings) {
- return new BaseViewport(settings);
- }
- /**
- * Global Settings
- */
- static get Settings() {
- return Settings;
- }
- /**
- * Engine version
- */
- static get version() {
- if (false)
- {}
- else
- return "0.2.1-wip";
- }
- /**
- * Speedy Vision
- */
- static get Speedy() {
- return (speedy_vision_default());
- }
- /**
- * Checks if the engine can be run in the browser the client is using
- * @returns true if the engine is compatible with the browser
- */
- static isSupported() {
- return Session.isSupported();
- }
- }
- // Freeze the namespace
- Object.freeze(Martins);
- // Add Speedy Vision to global scope
- ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
- // Display a notice
- Utils.log(`MARTINS.js version ${Martins.version}. ` +
- `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
- "https://github.com/alemart/martins-js");
-
- })();
-
- __webpack_exports__ = __webpack_exports__["default"];
- /******/ return __webpack_exports__;
- /******/ })()
- ;
- });
|