1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791157921579315794157951579615797157981579915800158011580215803158041580515806158071580815809158101581115812158131581415815158161581715818158191582015821158221582315824158251582615827158281582915830158311583215833158341583515836158371583815839158401584115842158431584415845158461584715848158491585015851158521585315854158551585615857158581585915860158611586215863158641586515866158671586815869158701587115872158731587415875158761587715878158791588015881158821588315884158851588615887158881588915890158911589215893158941589515896158971589815899159001590115902159031590415905159061590715908159091591015911159121591315914159151591615917159181591915920159211592215923159241592515926159271592815929159301593115932159331593415935159361593715938159391594015941159421594315944159451594615947159481594915950159511595215953159541595515956159571595815959159601596115962159631596415965159661596715968159691597015971159721597315974159751597615977159781597915980159811598215983159841598515986159871598815989159901599115992159931599415995159961599715998159991600016001160021600316004160051600616007160081600916010160111601216013160141601516016160171601816019160201602116022160231602416025160261602716028160291603016031160321603316034160351603616037160381603916040160411604216043160441604516046160471604816049160501605116052160531605416055160561605716058160591606016061160621606316064160651606616067160681606916070160711607216073160741607516076160771607816079160801608116082160831608416085160861608716088160891609016091160921609316094160951609616097160981609916100161011610216103161041610516106161071610816109161101611116112161131611416115161161611716118161191612016121161221612316124161251612616127161281612916130161311613216133161341613516136161371613816139161401614116142161431614416145161461614716148161491615016151161521615316154161551615616157161581615916160161611616216163161641616516166161671616816169161701617116172161731617416175161761617716178161791618016181161821618316184161851618616187161881618916190161911619216193161941619516196161971619816199162001620116202162031620416205162061620716208162091621016211162121621316214162151621616217162181621916220162211622216223162241622516226162271622816229162301623116232162331623416235162361623716238162391624016241162421624316244162451624616247162481624916250162511625216253162541625516256162571625816259162601626116262162631626416265162661626716268162691627016271162721627316274162751627616277162781627916280162811628216283162841628516286162871628816289162901629116292162931629416295162961629716298162991630016301163021630316304163051630616307163081630916310163111631216313163141631516316163171631816319163201632116322163231632416325163261632716328163291633016331163321633316334163351633616337163381633916340163411634216343163441634516346163471634816349163501635116352163531635416355163561635716358163591636016361163621636316364163651636616367163681636916370163711637216373163741637516376163771637816379163801638116382163831638416385163861638716388163891639016391163921639316394163951639616397163981639916400164011640216403164041640516406164071640816409164101641116412164131641416415164161641716418164191642016421164221642316424164251642616427164281642916430164311643216433164341643516436164371643816439164401644116442164431644416445164461644716448164491645016451164521645316454164551645616457164581645916460164611646216463164641646516466164671646816469164701647116472164731647416475164761647716478164791648016481164821648316484164851648616487164881648916490164911649216493164941649516496164971649816499165001650116502165031650416505165061650716508165091651016511165121651316514165151651616517165181651916520165211652216523165241652516526165271652816529165301653116532165331653416535165361653716538165391654016541165421654316544165451654616547165481654916550165511655216553165541655516556165571655816559165601656116562165631656416565165661656716568165691657016571165721657316574165751657616577165781657916580165811658216583165841658516586165871658816589165901659116592165931659416595165961659716598165991660016601166021660316604166051660616607166081660916610166111661216613166141661516616166171661816619166201662116622166231662416625166261662716628166291663016631166321663316634166351663616637166381663916640166411664216643166441664516646166471664816649166501665116652166531665416655166561665716658166591666016661166621666316664166651666616667166681666916670166711667216673166741667516676166771667816679166801668116682166831668416685166861668716688166891669016691166921669316694166951669616697166981669916700167011670216703167041670516706167071670816709167101671116712167131671416715167161671716718167191672016721167221672316724167251672616727167281672916730167311673216733167341673516736167371673816739167401674116742167431674416745167461674716748167491675016751167521675316754167551675616757167581675916760167611676216763167641676516766167671676816769167701677116772167731677416775167761677716778167791678016781167821678316784167851678616787167881678916790167911679216793167941679516796167971679816799168001680116802168031680416805168061680716808168091681016811168121681316814168151681616817168181681916820168211682216823168241682516826168271682816829168301683116832168331683416835168361683716838168391684016841168421684316844168451684616847168481684916850168511685216853168541685516856168571685816859168601686116862168631686416865168661686716868168691687016871168721687316874168751687616877168781687916880168811688216883168841688516886168871688816889168901689116892168931689416895168961689716898168991690016901169021690316904169051690616907169081690916910169111691216913169141691516916169171691816919169201692116922169231692416925169261692716928169291693016931169321693316934169351693616937169381693916940169411694216943169441694516946169471694816949169501695116952169531695416955169561695716958169591696016961169621696316964169651696616967169681696916970169711697216973169741697516976169771697816979169801698116982169831698416985169861698716988169891699016991169921699316994169951699616997169981699917000170011700217003170041700517006170071700817009170101701117012170131701417015170161701717018170191702017021170221702317024170251702617027170281702917030170311703217033170341703517036170371703817039170401704117042170431704417045170461704717048170491705017051170521705317054170551705617057170581705917060170611706217063170641706517066170671706817069170701707117072170731707417075170761707717078170791708017081170821708317084170851708617087170881708917090170911709217093170941709517096170971709817099171001710117102171031710417105171061710717108171091711017111171121711317114171151711617117171181711917120171211712217123171241712517126171271712817129171301713117132171331713417135171361713717138171391714017141171421714317144171451714617147171481714917150171511715217153171541715517156171571715817159171601716117162171631716417165171661716717168171691717017171171721717317174171751717617177171781717917180171811718217183171841718517186171871718817189171901719117192171931719417195171961719717198171991720017201172021720317204172051720617207172081720917210172111721217213172141721517216172171721817219172201722117222172231722417225172261722717228172291723017231172321723317234172351723617237172381723917240172411724217243172441724517246172471724817249172501725117252172531725417255172561725717258172591726017261172621726317264172651726617267172681726917270172711727217273172741727517276172771727817279172801728117282172831728417285172861728717288172891729017291172921729317294172951729617297172981729917300173011730217303173041730517306173071730817309173101731117312173131731417315173161731717318173191732017321173221732317324173251732617327173281732917330173311733217333173341733517336173371733817339173401734117342173431734417345173461734717348173491735017351173521735317354173551735617357173581735917360173611736217363173641736517366173671736817369173701737117372173731737417375173761737717378173791738017381173821738317384173851738617387173881738917390173911739217393173941739517396173971739817399174001740117402174031740417405174061740717408174091741017411174121741317414174151741617417174181741917420174211742217423174241742517426174271742817429174301743117432174331743417435174361743717438174391744017441174421744317444174451744617447174481744917450174511745217453174541745517456174571745817459174601746117462174631746417465174661746717468174691747017471174721747317474174751747617477174781747917480174811748217483174841748517486174871748817489174901749117492174931749417495174961749717498174991750017501175021750317504175051750617507175081750917510175111751217513175141751517516175171751817519175201752117522175231752417525175261752717528175291753017531175321753317534175351753617537175381753917540175411754217543175441754517546175471754817549175501755117552175531755417555175561755717558175591756017561175621756317564175651756617567175681756917570175711757217573175741757517576175771757817579175801758117582175831758417585175861758717588175891759017591175921759317594175951759617597175981759917600176011760217603176041760517606176071760817609176101761117612176131761417615176161761717618176191762017621176221762317624176251762617627176281762917630176311763217633176341763517636176371763817639176401764117642176431764417645176461764717648176491765017651176521765317654176551765617657176581765917660176611766217663176641766517666176671766817669176701767117672176731767417675176761767717678176791768017681176821768317684176851768617687176881768917690176911769217693176941769517696176971769817699177001770117702177031770417705177061770717708177091771017711177121771317714177151771617717177181771917720177211772217723177241772517726177271772817729177301773117732177331773417735177361773717738177391774017741177421774317744177451774617747177481774917750177511775217753177541775517756177571775817759177601776117762177631776417765177661776717768177691777017771177721777317774177751777617777177781777917780177811778217783177841778517786177871778817789177901779117792177931779417795177961779717798177991780017801178021780317804178051780617807178081780917810178111781217813178141781517816178171781817819178201782117822178231782417825178261782717828178291783017831178321783317834178351783617837178381783917840178411784217843178441784517846178471784817849178501785117852178531785417855178561785717858178591786017861178621786317864178651786617867178681786917870178711787217873178741787517876178771787817879178801788117882178831788417885178861788717888178891789017891178921789317894178951789617897178981789917900179011790217903179041790517906179071790817909179101791117912179131791417915179161791717918179191792017921179221792317924179251792617927179281792917930179311793217933179341793517936179371793817939179401794117942179431794417945179461794717948179491795017951179521795317954179551795617957179581795917960179611796217963179641796517966179671796817969179701797117972179731797417975179761797717978179791798017981179821798317984179851798617987179881798917990179911799217993179941799517996179971799817999180001800118002180031800418005180061800718008180091801018011180121801318014180151801618017180181801918020180211802218023180241802518026180271802818029180301803118032180331803418035180361803718038180391804018041180421804318044180451804618047180481804918050180511805218053180541805518056180571805818059180601806118062180631806418065180661806718068180691807018071180721807318074180751807618077180781807918080180811808218083180841808518086180871808818089180901809118092180931809418095180961809718098180991810018101181021810318104181051810618107181081810918110181111811218113181141811518116181171811818119181201812118122181231812418125181261812718128181291813018131181321813318134181351813618137181381813918140181411814218143181441814518146181471814818149181501815118152181531815418155181561815718158181591816018161181621816318164181651816618167181681816918170181711817218173181741817518176181771817818179181801818118182181831818418185181861818718188181891819018191181921819318194181951819618197181981819918200182011820218203182041820518206182071820818209182101821118212182131821418215182161821718218182191822018221182221822318224182251822618227182281822918230182311823218233182341823518236182371823818239182401824118242182431824418245182461824718248182491825018251182521825318254182551825618257182581825918260182611826218263182641826518266182671826818269182701827118272182731827418275182761827718278182791828018281182821828318284182851828618287182881828918290182911829218293182941829518296182971829818299183001830118302183031830418305183061830718308183091831018311183121831318314183151831618317183181831918320183211832218323183241832518326183271832818329183301833118332183331833418335183361833718338183391834018341183421834318344183451834618347183481834918350183511835218353183541835518356183571835818359183601836118362183631836418365183661836718368183691837018371183721837318374183751837618377183781837918380183811838218383183841838518386183871838818389183901839118392183931839418395183961839718398183991840018401184021840318404184051840618407184081840918410184111841218413184141841518416184171841818419184201842118422184231842418425184261842718428184291843018431184321843318434184351843618437184381843918440184411844218443184441844518446184471844818449184501845118452184531845418455184561845718458184591846018461184621846318464184651846618467184681846918470184711847218473184741847518476184771847818479184801848118482184831848418485184861848718488184891849018491184921849318494184951849618497184981849918500185011850218503185041850518506185071850818509185101851118512185131851418515185161851718518185191852018521185221852318524185251852618527185281852918530185311853218533185341853518536185371853818539185401854118542185431854418545185461854718548185491855018551185521855318554185551855618557185581855918560185611856218563185641856518566185671856818569185701857118572185731857418575185761857718578185791858018581185821858318584185851858618587185881858918590185911859218593185941859518596185971859818599186001860118602186031860418605186061860718608186091861018611186121861318614186151861618617186181861918620186211862218623186241862518626186271862818629186301863118632186331863418635186361863718638186391864018641186421864318644186451864618647186481864918650186511865218653186541865518656186571865818659186601866118662186631866418665186661866718668186691867018671186721867318674186751867618677186781867918680186811868218683186841868518686186871868818689186901869118692186931869418695186961869718698186991870018701187021870318704187051870618707187081870918710187111871218713187141871518716187171871818719187201872118722187231872418725187261872718728187291873018731187321873318734187351873618737187381873918740187411874218743187441874518746187471874818749187501875118752187531875418755187561875718758187591876018761187621876318764187651876618767187681876918770187711877218773187741877518776187771877818779187801878118782187831878418785187861878718788187891879018791187921879318794187951879618797187981879918800188011880218803188041880518806188071880818809188101881118812188131881418815188161881718818188191882018821188221882318824188251882618827188281882918830188311883218833188341883518836188371883818839188401884118842188431884418845188461884718848188491885018851188521885318854188551885618857188581885918860188611886218863188641886518866188671886818869188701887118872188731887418875188761887718878188791888018881188821888318884188851888618887188881888918890188911889218893188941889518896188971889818899189001890118902189031890418905189061890718908189091891018911189121891318914189151891618917189181891918920189211892218923189241892518926189271892818929189301893118932189331893418935189361893718938189391894018941189421894318944189451894618947189481894918950189511895218953189541895518956189571895818959189601896118962189631896418965189661896718968189691897018971189721897318974189751897618977189781897918980189811898218983189841898518986189871898818989189901899118992189931899418995189961899718998189991900019001190021900319004190051900619007190081900919010190111901219013190141901519016190171901819019190201902119022190231902419025190261902719028190291903019031190321903319034190351903619037190381903919040190411904219043190441904519046190471904819049190501905119052190531905419055190561905719058190591906019061190621906319064190651906619067190681906919070190711907219073190741907519076190771907819079190801908119082190831908419085190861908719088190891909019091190921909319094190951909619097190981909919100191011910219103191041910519106191071910819109191101911119112191131911419115191161911719118191191912019121191221912319124191251912619127191281912919130191311913219133191341913519136191371913819139191401914119142191431914419145191461914719148191491915019151191521915319154191551915619157191581915919160191611916219163191641916519166191671916819169191701917119172191731917419175191761917719178191791918019181191821918319184191851918619187191881918919190191911919219193191941919519196191971919819199192001920119202192031920419205192061920719208192091921019211192121921319214192151921619217192181921919220192211922219223192241922519226192271922819229192301923119232192331923419235192361923719238192391924019241192421924319244192451924619247192481924919250192511925219253192541925519256192571925819259192601926119262192631926419265192661926719268192691927019271192721927319274192751927619277192781927919280192811928219283192841928519286192871928819289192901929119292192931929419295192961929719298192991930019301193021930319304193051930619307193081930919310193111931219313193141931519316193171931819319193201932119322193231932419325193261932719328193291933019331193321933319334193351933619337193381933919340193411934219343193441934519346193471934819349193501935119352193531935419355193561935719358193591936019361193621936319364193651936619367193681936919370193711937219373193741937519376193771937819379193801938119382193831938419385193861938719388193891939019391193921939319394193951939619397193981939919400194011940219403194041940519406194071940819409194101941119412194131941419415194161941719418194191942019421194221942319424194251942619427194281942919430194311943219433194341943519436194371943819439194401944119442194431944419445194461944719448194491945019451194521945319454194551945619457194581945919460194611946219463194641946519466194671946819469194701947119472194731947419475194761947719478194791948019481194821948319484194851948619487194881948919490194911949219493194941949519496194971949819499195001950119502195031950419505195061950719508195091951019511195121951319514195151951619517195181951919520195211952219523195241952519526195271952819529195301953119532195331953419535195361953719538195391954019541195421954319544195451954619547195481954919550195511955219553195541955519556195571955819559195601956119562195631956419565195661956719568195691957019571195721957319574195751957619577195781957919580195811958219583195841958519586195871958819589195901959119592195931959419595195961959719598195991960019601196021960319604196051960619607196081960919610196111961219613196141961519616196171961819619196201962119622196231962419625196261962719628196291963019631196321963319634196351963619637196381963919640196411964219643196441964519646196471964819649196501965119652196531965419655196561965719658196591966019661196621966319664196651966619667196681966919670196711967219673196741967519676196771967819679196801968119682196831968419685196861968719688196891969019691196921969319694196951969619697196981969919700197011970219703197041970519706197071970819709197101971119712197131971419715197161971719718197191972019721197221972319724197251972619727197281972919730197311973219733197341973519736197371973819739197401974119742197431974419745197461974719748197491975019751197521975319754197551975619757197581975919760197611976219763197641976519766197671976819769197701977119772197731977419775197761977719778197791978019781197821978319784197851978619787197881978919790197911979219793197941979519796197971979819799198001980119802198031980419805198061980719808198091981019811198121981319814198151981619817198181981919820198211982219823198241982519826198271982819829198301983119832198331983419835198361983719838198391984019841198421984319844198451984619847198481984919850198511985219853198541985519856198571985819859198601986119862198631986419865198661986719868198691987019871198721987319874198751987619877198781987919880198811988219883198841988519886198871988819889198901989119892198931989419895198961989719898198991990019901199021990319904199051990619907199081990919910199111991219913199141991519916199171991819919199201992119922199231992419925199261992719928199291993019931199321993319934199351993619937199381993919940199411994219943199441994519946199471994819949199501995119952199531995419955199561995719958199591996019961199621996319964199651996619967199681996919970199711997219973199741997519976199771997819979199801998119982199831998419985199861998719988199891999019991199921999319994199951999619997199981999920000200012000220003200042000520006200072000820009200102001120012200132001420015200162001720018200192002020021200222002320024200252002620027200282002920030200312003220033200342003520036200372003820039200402004120042200432004420045200462004720048200492005020051200522005320054200552005620057200582005920060200612006220063200642006520066200672006820069200702007120072200732007420075200762007720078200792008020081200822008320084200852008620087200882008920090200912009220093200942009520096200972009820099201002010120102201032010420105201062010720108201092011020111201122011320114201152011620117201182011920120201212012220123201242012520126201272012820129201302013120132201332013420135201362013720138201392014020141201422014320144201452014620147201482014920150201512015220153201542015520156201572015820159201602016120162201632016420165201662016720168201692017020171201722017320174201752017620177201782017920180201812018220183201842018520186201872018820189201902019120192201932019420195201962019720198201992020020201202022020320204202052020620207202082020920210202112021220213202142021520216202172021820219202202022120222202232022420225202262022720228202292023020231202322023320234202352023620237202382023920240202412024220243202442024520246202472024820249202502025120252202532025420255202562025720258202592026020261202622026320264202652026620267202682026920270202712027220273202742027520276202772027820279202802028120282202832028420285202862028720288202892029020291202922029320294202952029620297202982029920300203012030220303203042030520306203072030820309203102031120312203132031420315203162031720318203192032020321203222032320324203252032620327203282032920330203312033220333203342033520336203372033820339203402034120342203432034420345203462034720348203492035020351203522035320354203552035620357203582035920360203612036220363203642036520366203672036820369203702037120372203732037420375203762037720378203792038020381203822038320384203852038620387203882038920390203912039220393203942039520396203972039820399204002040120402204032040420405204062040720408204092041020411204122041320414204152041620417204182041920420204212042220423204242042520426204272042820429204302043120432204332043420435204362043720438204392044020441204422044320444204452044620447204482044920450204512045220453204542045520456204572045820459204602046120462204632046420465204662046720468204692047020471204722047320474204752047620477204782047920480204812048220483204842048520486204872048820489204902049120492204932049420495204962049720498204992050020501205022050320504205052050620507205082050920510205112051220513205142051520516205172051820519205202052120522205232052420525205262052720528205292053020531205322053320534205352053620537205382053920540205412054220543205442054520546205472054820549205502055120552205532055420555205562055720558205592056020561205622056320564205652056620567205682056920570205712057220573205742057520576205772057820579205802058120582205832058420585205862058720588205892059020591205922059320594205952059620597205982059920600206012060220603206042060520606206072060820609206102061120612206132061420615206162061720618206192062020621206222062320624206252062620627206282062920630206312063220633206342063520636206372063820639206402064120642206432064420645206462064720648206492065020651206522065320654206552065620657206582065920660206612066220663206642066520666206672066820669206702067120672206732067420675206762067720678206792068020681206822068320684206852068620687206882068920690206912069220693206942069520696206972069820699207002070120702207032070420705207062070720708207092071020711207122071320714207152071620717207182071920720207212072220723207242072520726207272072820729207302073120732207332073420735207362073720738207392074020741207422074320744207452074620747207482074920750207512075220753207542075520756207572075820759207602076120762207632076420765207662076720768207692077020771207722077320774207752077620777207782077920780207812078220783207842078520786207872078820789207902079120792207932079420795207962079720798207992080020801208022080320804208052080620807208082080920810208112081220813208142081520816208172081820819208202082120822208232082420825208262082720828208292083020831208322083320834208352083620837208382083920840208412084220843208442084520846208472084820849208502085120852208532085420855208562085720858208592086020861208622086320864208652086620867208682086920870208712087220873208742087520876208772087820879208802088120882208832088420885208862088720888208892089020891208922089320894208952089620897208982089920900209012090220903209042090520906209072090820909209102091120912209132091420915209162091720918209192092020921209222092320924209252092620927209282092920930209312093220933209342093520936209372093820939209402094120942209432094420945209462094720948209492095020951209522095320954209552095620957209582095920960209612096220963209642096520966209672096820969209702097120972209732097420975209762097720978209792098020981209822098320984209852098620987209882098920990209912099220993209942099520996209972099820999210002100121002210032100421005210062100721008210092101021011210122101321014210152101621017210182101921020210212102221023210242102521026210272102821029210302103121032210332103421035210362103721038210392104021041210422104321044210452104621047210482104921050210512105221053210542105521056210572105821059210602106121062210632106421065210662106721068210692107021071210722107321074210752107621077210782107921080210812108221083210842108521086210872108821089210902109121092210932109421095210962109721098210992110021101211022110321104211052110621107211082110921110211112111221113211142111521116211172111821119211202112121122211232112421125211262112721128211292113021131211322113321134211352113621137211382113921140211412114221143211442114521146211472114821149211502115121152211532115421155211562115721158211592116021161211622116321164211652116621167211682116921170211712117221173211742117521176211772117821179211802118121182211832118421185211862118721188211892119021191211922119321194211952119621197211982119921200212012120221203212042120521206212072120821209212102121121212212132121421215212162121721218212192122021221212222122321224212252122621227212282122921230212312123221233212342123521236212372123821239212402124121242212432124421245212462124721248212492125021251212522125321254212552125621257212582125921260212612126221263212642126521266212672126821269212702127121272212732127421275212762127721278212792128021281212822128321284212852128621287212882128921290212912129221293212942129521296212972129821299213002130121302213032130421305213062130721308213092131021311213122131321314213152131621317213182131921320213212132221323213242132521326213272132821329213302133121332213332133421335213362133721338213392134021341213422134321344213452134621347213482134921350213512135221353213542135521356213572135821359213602136121362213632136421365213662136721368213692137021371213722137321374213752137621377213782137921380213812138221383213842138521386213872138821389213902139121392213932139421395213962139721398213992140021401214022140321404214052140621407214082140921410214112141221413214142141521416214172141821419214202142121422214232142421425214262142721428214292143021431214322143321434214352143621437214382143921440214412144221443214442144521446214472144821449214502145121452214532145421455214562145721458214592146021461214622146321464214652146621467214682146921470214712147221473214742147521476214772147821479214802148121482214832148421485214862148721488214892149021491214922149321494214952149621497214982149921500215012150221503215042150521506215072150821509215102151121512215132151421515215162151721518215192152021521215222152321524215252152621527215282152921530215312153221533215342153521536215372153821539215402154121542215432154421545215462154721548215492155021551215522155321554215552155621557215582155921560215612156221563215642156521566215672156821569215702157121572215732157421575215762157721578215792158021581215822158321584215852158621587215882158921590215912159221593215942159521596215972159821599216002160121602216032160421605216062160721608216092161021611216122161321614216152161621617216182161921620216212162221623216242162521626216272162821629216302163121632216332163421635216362163721638216392164021641216422164321644216452164621647216482164921650216512165221653216542165521656216572165821659216602166121662216632166421665216662166721668216692167021671216722167321674216752167621677216782167921680216812168221683216842168521686216872168821689216902169121692216932169421695216962169721698216992170021701217022170321704217052170621707217082170921710217112171221713217142171521716217172171821719217202172121722217232172421725217262172721728217292173021731217322173321734217352173621737217382173921740217412174221743217442174521746217472174821749217502175121752217532175421755217562175721758217592176021761217622176321764217652176621767217682176921770217712177221773217742177521776217772177821779217802178121782217832178421785217862178721788217892179021791217922179321794217952179621797217982179921800218012180221803218042180521806218072180821809218102181121812218132181421815218162181721818218192182021821218222182321824218252182621827218282182921830218312183221833218342183521836218372183821839218402184121842218432184421845218462184721848218492185021851218522185321854218552185621857218582185921860218612186221863218642186521866218672186821869218702187121872218732187421875218762187721878218792188021881218822188321884218852188621887218882188921890218912189221893218942189521896218972189821899219002190121902219032190421905219062190721908219092191021911219122191321914219152191621917219182191921920219212192221923219242192521926219272192821929219302193121932219332193421935219362193721938219392194021941219422194321944219452194621947219482194921950219512195221953219542195521956219572195821959219602196121962219632196421965219662196721968219692197021971219722197321974219752197621977219782197921980219812198221983219842198521986219872198821989219902199121992219932199421995219962199721998219992200022001220022200322004220052200622007220082200922010220112201222013220142201522016220172201822019220202202122022220232202422025220262202722028220292203022031220322203322034220352203622037220382203922040220412204222043220442204522046220472204822049220502205122052220532205422055220562205722058220592206022061220622206322064220652206622067220682206922070220712207222073220742207522076220772207822079220802208122082220832208422085220862208722088220892209022091220922209322094220952209622097220982209922100221012210222103221042210522106221072210822109221102211122112221132211422115221162211722118221192212022121221222212322124221252212622127221282212922130221312213222133221342213522136221372213822139221402214122142221432214422145221462214722148221492215022151221522215322154221552215622157221582215922160221612216222163221642216522166221672216822169221702217122172221732217422175221762217722178221792218022181221822218322184221852218622187221882218922190221912219222193221942219522196221972219822199222002220122202222032220422205222062220722208222092221022211222122221322214222152221622217222182221922220222212222222223222242222522226222272222822229222302223122232222332223422235222362223722238222392224022241222422224322244222452224622247222482224922250222512225222253222542225522256222572225822259222602226122262222632226422265222662226722268222692227022271222722227322274222752227622277222782227922280222812228222283222842228522286222872228822289222902229122292222932229422295222962229722298222992230022301223022230322304223052230622307223082230922310223112231222313223142231522316223172231822319223202232122322223232232422325223262232722328223292233022331223322233322334223352233622337223382233922340223412234222343223442234522346223472234822349223502235122352223532235422355223562235722358223592236022361223622236322364223652236622367223682236922370223712237222373223742237522376223772237822379223802238122382223832238422385223862238722388223892239022391223922239322394223952239622397223982239922400224012240222403224042240522406224072240822409224102241122412224132241422415224162241722418224192242022421224222242322424224252242622427224282242922430224312243222433224342243522436224372243822439224402244122442224432244422445224462244722448224492245022451224522245322454224552245622457224582245922460224612246222463224642246522466224672246822469224702247122472224732247422475224762247722478224792248022481224822248322484224852248622487224882248922490224912249222493224942249522496224972249822499225002250122502225032250422505225062250722508225092251022511225122251322514225152251622517225182251922520225212252222523225242252522526225272252822529225302253122532225332253422535225362253722538225392254022541225422254322544225452254622547225482254922550225512255222553225542255522556225572255822559225602256122562225632256422565225662256722568225692257022571225722257322574225752257622577225782257922580225812258222583225842258522586225872258822589225902259122592225932259422595225962259722598225992260022601226022260322604226052260622607226082260922610226112261222613226142261522616226172261822619226202262122622226232262422625226262262722628226292263022631226322263322634226352263622637226382263922640226412264222643226442264522646226472264822649226502265122652226532265422655226562265722658226592266022661226622266322664226652266622667226682266922670226712267222673226742267522676226772267822679226802268122682226832268422685226862268722688226892269022691226922269322694226952269622697226982269922700227012270222703227042270522706227072270822709227102271122712227132271422715227162271722718227192272022721227222272322724227252272622727227282272922730227312273222733227342273522736227372273822739227402274122742227432274422745227462274722748227492275022751227522275322754227552275622757227582275922760227612276222763227642276522766227672276822769227702277122772227732277422775227762277722778227792278022781227822278322784227852278622787227882278922790227912279222793227942279522796227972279822799228002280122802228032280422805228062280722808228092281022811228122281322814228152281622817228182281922820228212282222823228242282522826228272282822829228302283122832228332283422835228362283722838228392284022841228422284322844228452284622847228482284922850228512285222853228542285522856228572285822859228602286122862228632286422865228662286722868228692287022871228722287322874228752287622877228782287922880228812288222883228842288522886228872288822889228902289122892228932289422895228962289722898228992290022901229022290322904229052290622907229082290922910229112291222913229142291522916229172291822919229202292122922229232292422925229262292722928229292293022931229322293322934229352293622937229382293922940229412294222943229442294522946229472294822949229502295122952229532295422955229562295722958229592296022961229622296322964229652296622967229682296922970229712297222973229742297522976229772297822979229802298122982229832298422985229862298722988229892299022991229922299322994229952299622997229982299923000230012300223003230042300523006230072300823009230102301123012230132301423015230162301723018230192302023021230222302323024230252302623027230282302923030230312303223033230342303523036230372303823039230402304123042230432304423045230462304723048230492305023051230522305323054230552305623057230582305923060230612306223063230642306523066230672306823069230702307123072230732307423075230762307723078230792308023081230822308323084230852308623087230882308923090230912309223093230942309523096230972309823099231002310123102231032310423105231062310723108231092311023111231122311323114231152311623117231182311923120231212312223123231242312523126231272312823129231302313123132231332313423135231362313723138231392314023141231422314323144231452314623147231482314923150231512315223153231542315523156231572315823159231602316123162231632316423165231662316723168231692317023171231722317323174231752317623177231782317923180231812318223183231842318523186231872318823189231902319123192231932319423195231962319723198231992320023201232022320323204232052320623207232082320923210232112321223213232142321523216232172321823219232202322123222232232322423225232262322723228232292323023231232322323323234232352323623237232382323923240232412324223243232442324523246232472324823249232502325123252232532325423255232562325723258232592326023261232622326323264232652326623267232682326923270232712327223273232742327523276232772327823279232802328123282232832328423285232862328723288232892329023291232922329323294232952329623297232982329923300233012330223303233042330523306233072330823309233102331123312233132331423315233162331723318233192332023321233222332323324233252332623327233282332923330233312333223333233342333523336233372333823339233402334123342233432334423345233462334723348233492335023351233522335323354233552335623357233582335923360233612336223363233642336523366233672336823369233702337123372233732337423375233762337723378233792338023381233822338323384233852338623387233882338923390233912339223393233942339523396233972339823399234002340123402234032340423405234062340723408234092341023411234122341323414234152341623417234182341923420234212342223423234242342523426234272342823429234302343123432234332343423435234362343723438234392344023441234422344323444234452344623447234482344923450234512345223453234542345523456234572345823459234602346123462234632346423465234662346723468234692347023471234722347323474234752347623477234782347923480234812348223483234842348523486234872348823489234902349123492234932349423495234962349723498234992350023501235022350323504235052350623507235082350923510235112351223513235142351523516235172351823519235202352123522235232352423525235262352723528235292353023531235322353323534235352353623537235382353923540235412354223543235442354523546235472354823549235502355123552235532355423555235562355723558235592356023561235622356323564235652356623567235682356923570235712357223573235742357523576235772357823579235802358123582235832358423585235862358723588235892359023591235922359323594235952359623597235982359923600236012360223603236042360523606236072360823609236102361123612236132361423615236162361723618236192362023621236222362323624236252362623627236282362923630236312363223633236342363523636236372363823639236402364123642236432364423645236462364723648236492365023651236522365323654236552365623657236582365923660236612366223663236642366523666236672366823669236702367123672236732367423675236762367723678236792368023681236822368323684236852368623687236882368923690236912369223693236942369523696236972369823699237002370123702237032370423705237062370723708237092371023711237122371323714237152371623717237182371923720237212372223723237242372523726237272372823729237302373123732237332373423735237362373723738237392374023741237422374323744237452374623747237482374923750237512375223753237542375523756237572375823759237602376123762237632376423765237662376723768237692377023771237722377323774237752377623777237782377923780237812378223783237842378523786237872378823789237902379123792237932379423795237962379723798237992380023801238022380323804238052380623807238082380923810238112381223813238142381523816238172381823819238202382123822238232382423825238262382723828238292383023831238322383323834238352383623837238382383923840238412384223843238442384523846238472384823849238502385123852238532385423855238562385723858238592386023861238622386323864238652386623867238682386923870238712387223873238742387523876238772387823879238802388123882238832388423885238862388723888238892389023891238922389323894238952389623897238982389923900239012390223903239042390523906239072390823909239102391123912239132391423915239162391723918239192392023921239222392323924239252392623927239282392923930239312393223933239342393523936239372393823939239402394123942239432394423945239462394723948239492395023951239522395323954239552395623957239582395923960239612396223963239642396523966239672396823969239702397123972239732397423975239762397723978239792398023981239822398323984239852398623987239882398923990239912399223993239942399523996239972399823999240002400124002240032400424005240062400724008240092401024011240122401324014240152401624017240182401924020240212402224023240242402524026240272402824029240302403124032240332403424035240362403724038240392404024041240422404324044240452404624047240482404924050240512405224053240542405524056240572405824059240602406124062240632406424065240662406724068240692407024071240722407324074240752407624077240782407924080240812408224083240842408524086240872408824089240902409124092240932409424095240962409724098240992410024101241022410324104241052410624107241082410924110241112411224113241142411524116241172411824119241202412124122241232412424125241262412724128241292413024131241322413324134241352413624137241382413924140241412414224143241442414524146241472414824149241502415124152241532415424155241562415724158241592416024161241622416324164241652416624167241682416924170241712417224173241742417524176241772417824179241802418124182241832418424185241862418724188241892419024191241922419324194241952419624197241982419924200242012420224203242042420524206242072420824209242102421124212242132421424215242162421724218242192422024221242222422324224242252422624227242282422924230242312423224233242342423524236242372423824239242402424124242242432424424245242462424724248242492425024251242522425324254242552425624257242582425924260242612426224263242642426524266242672426824269242702427124272242732427424275242762427724278242792428024281242822428324284242852428624287242882428924290242912429224293242942429524296242972429824299243002430124302243032430424305243062430724308243092431024311243122431324314243152431624317243182431924320243212432224323243242432524326243272432824329243302433124332243332433424335243362433724338243392434024341243422434324344243452434624347243482434924350243512435224353243542435524356243572435824359243602436124362243632436424365243662436724368243692437024371243722437324374243752437624377243782437924380243812438224383243842438524386243872438824389243902439124392243932439424395243962439724398243992440024401244022440324404244052440624407244082440924410244112441224413244142441524416244172441824419244202442124422244232442424425244262442724428244292443024431244322443324434244352443624437244382443924440244412444224443244442444524446244472444824449244502445124452244532445424455244562445724458244592446024461244622446324464244652446624467244682446924470244712447224473244742447524476244772447824479244802448124482244832448424485244862448724488244892449024491244922449324494244952449624497244982449924500245012450224503245042450524506245072450824509245102451124512245132451424515245162451724518245192452024521245222452324524245252452624527245282452924530245312453224533245342453524536245372453824539245402454124542245432454424545245462454724548245492455024551245522455324554245552455624557245582455924560245612456224563245642456524566245672456824569245702457124572245732457424575245762457724578245792458024581245822458324584245852458624587245882458924590245912459224593245942459524596245972459824599246002460124602246032460424605246062460724608246092461024611246122461324614246152461624617246182461924620246212462224623246242462524626246272462824629246302463124632246332463424635246362463724638246392464024641246422464324644246452464624647246482464924650246512465224653246542465524656246572465824659246602466124662246632466424665246662466724668246692467024671246722467324674246752467624677246782467924680246812468224683246842468524686246872468824689246902469124692246932469424695246962469724698246992470024701247022470324704247052470624707247082470924710247112471224713247142471524716247172471824719247202472124722247232472424725247262472724728247292473024731247322473324734247352473624737247382473924740247412474224743247442474524746247472474824749247502475124752247532475424755247562475724758247592476024761247622476324764247652476624767247682476924770247712477224773247742477524776247772477824779247802478124782247832478424785247862478724788247892479024791247922479324794247952479624797247982479924800248012480224803248042480524806248072480824809248102481124812248132481424815248162481724818248192482024821248222482324824248252482624827248282482924830248312483224833248342483524836248372483824839248402484124842248432484424845248462484724848248492485024851248522485324854248552485624857248582485924860248612486224863248642486524866248672486824869248702487124872248732487424875248762487724878248792488024881248822488324884248852488624887248882488924890248912489224893248942489524896248972489824899249002490124902249032490424905249062490724908249092491024911249122491324914249152491624917249182491924920249212492224923249242492524926249272492824929249302493124932249332493424935249362493724938249392494024941249422494324944249452494624947249482494924950249512495224953249542495524956249572495824959249602496124962249632496424965249662496724968249692497024971249722497324974249752497624977249782497924980249812498224983249842498524986249872498824989249902499124992249932499424995249962499724998249992500025001250022500325004250052500625007250082500925010250112501225013250142501525016250172501825019250202502125022250232502425025250262502725028250292503025031250322503325034250352503625037250382503925040250412504225043250442504525046250472504825049250502505125052250532505425055250562505725058250592506025061250622506325064250652506625067250682506925070250712507225073250742507525076250772507825079250802508125082250832508425085250862508725088250892509025091250922509325094250952509625097250982509925100251012510225103251042510525106251072510825109251102511125112251132511425115251162511725118251192512025121251222512325124251252512625127251282512925130251312513225133251342513525136251372513825139251402514125142251432514425145251462514725148251492515025151251522515325154251552515625157251582515925160251612516225163251642516525166251672516825169251702517125172251732517425175251762517725178251792518025181251822518325184251852518625187251882518925190251912519225193251942519525196251972519825199252002520125202252032520425205252062520725208252092521025211252122521325214252152521625217252182521925220252212522225223252242522525226252272522825229252302523125232252332523425235252362523725238252392524025241252422524325244252452524625247252482524925250252512525225253252542525525256252572525825259252602526125262252632526425265252662526725268252692527025271252722527325274252752527625277252782527925280252812528225283252842528525286252872528825289252902529125292252932529425295252962529725298252992530025301253022530325304253052530625307253082530925310253112531225313253142531525316253172531825319253202532125322253232532425325253262532725328253292533025331253322533325334253352533625337253382533925340253412534225343253442534525346253472534825349253502535125352253532535425355253562535725358253592536025361253622536325364253652536625367253682536925370253712537225373253742537525376253772537825379253802538125382253832538425385253862538725388253892539025391253922539325394253952539625397253982539925400254012540225403254042540525406254072540825409254102541125412254132541425415254162541725418254192542025421254222542325424254252542625427254282542925430254312543225433254342543525436254372543825439254402544125442254432544425445254462544725448254492545025451254522545325454254552545625457254582545925460254612546225463254642546525466254672546825469254702547125472254732547425475254762547725478254792548025481254822548325484254852548625487254882548925490254912549225493254942549525496254972549825499255002550125502255032550425505255062550725508255092551025511255122551325514255152551625517255182551925520255212552225523255242552525526255272552825529255302553125532255332553425535255362553725538255392554025541255422554325544255452554625547255482554925550255512555225553255542555525556255572555825559255602556125562255632556425565255662556725568255692557025571255722557325574255752557625577255782557925580255812558225583255842558525586255872558825589255902559125592255932559425595255962559725598255992560025601256022560325604256052560625607256082560925610256112561225613256142561525616256172561825619256202562125622256232562425625256262562725628256292563025631256322563325634256352563625637256382563925640256412564225643256442564525646256472564825649256502565125652256532565425655256562565725658256592566025661256622566325664256652566625667256682566925670256712567225673256742567525676256772567825679256802568125682256832568425685256862568725688256892569025691256922569325694256952569625697256982569925700257012570225703257042570525706257072570825709257102571125712257132571425715257162571725718257192572025721257222572325724257252572625727257282572925730257312573225733257342573525736257372573825739257402574125742257432574425745257462574725748257492575025751257522575325754257552575625757257582575925760257612576225763257642576525766257672576825769257702577125772257732577425775257762577725778257792578025781257822578325784257852578625787257882578925790257912579225793257942579525796257972579825799258002580125802258032580425805258062580725808258092581025811258122581325814258152581625817258182581925820258212582225823258242582525826258272582825829258302583125832258332583425835258362583725838258392584025841258422584325844258452584625847258482584925850258512585225853258542585525856258572585825859258602586125862258632586425865258662586725868258692587025871258722587325874258752587625877258782587925880258812588225883258842588525886258872588825889258902589125892258932589425895258962589725898258992590025901259022590325904259052590625907259082590925910259112591225913259142591525916259172591825919259202592125922259232592425925259262592725928259292593025931259322593325934259352593625937259382593925940259412594225943259442594525946259472594825949259502595125952259532595425955259562595725958259592596025961259622596325964259652596625967259682596925970259712597225973259742597525976259772597825979259802598125982259832598425985259862598725988259892599025991259922599325994259952599625997259982599926000260012600226003260042600526006260072600826009260102601126012260132601426015260162601726018260192602026021260222602326024260252602626027260282602926030260312603226033260342603526036260372603826039260402604126042260432604426045260462604726048260492605026051260522605326054260552605626057260582605926060260612606226063260642606526066260672606826069260702607126072260732607426075260762607726078260792608026081260822608326084260852608626087260882608926090260912609226093260942609526096260972609826099261002610126102261032610426105261062610726108261092611026111261122611326114261152611626117 |
- /*!
- * MARTINS.js version 0.2.1-wip
- * GPU-accelerated Augmented Reality for the web
- * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
- * https://github.com/alemart/martins-js
- *
- * @license LGPL-3.0-or-later
- * Date: 2024-07-16T00:44:39.450Z
- */
- (function webpackUniversalModuleDefinition(root, factory) {
- if(typeof exports === 'object' && typeof module === 'object')
- module.exports = factory();
- else if(typeof define === 'function' && define.amd)
- define([], factory);
- else if(typeof exports === 'object')
- exports["Martins"] = factory();
- else
- root["Martins"] = factory();
- })(self, () => {
- return /******/ (() => { // webpackBootstrap
- /******/ var __webpack_modules__ = ({
-
- /***/ 774:
- /***/ ((module) => {
-
- /*!
- * Speedy Vision version 0.9.1
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
- * https://github.com/alemart/speedy-vision
- *
- * @license Apache-2.0
- * Date: 2024-07-03T02:16:25.769Z
- */
- (function webpackUniversalModuleDefinition(root, factory) {
- if(true)
- module.exports = factory();
- else {}
- })(self, () => {
- return /******/ (() => { // webpackBootstrap
- /******/ var __webpack_modules__ = ({
-
- /***/ 2199:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
- /* harmony export */ w: () => (/* binding */ Settings)
- /* harmony export */ });
- /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(6634);
- /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1001);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * settings.js
- * Global settings
- */
-
-
-
-
-
-
- /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
- /** @typedef {"raf" | "asap"} GPUPollingMode */
- /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
-
- /** @type {GPUPollingMode} Default GPU polling mode */
- const DEFAULT_GPU_POLLING_MODE = 'raf';
-
- /** @type {GPUPollingMode} GPU polling mode */
- let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
-
- /** @type {LoggingMode} logging mode */
- let loggingMode = 'default';
-
- /**
- * Global settings
- */
- class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q {
- /**
- * Power preference of the WebGL context
- * @returns {PowerPreference}
- */
- static get powerPreference() {
- return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
- }
-
- /**
- * Power preference of the WebGL context
- * @param {PowerPreference} value
- */
- static set powerPreference(value) {
- _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
- }
-
- /**
- * GPU polling mode
- * @returns {GPUPollingMode}
- */
- static get gpuPollingMode() {
- return gpuPollingMode;
- }
-
- /**
- * GPU polling mode
- * @param {GPUPollingMode} value
- */
- static set gpuPollingMode(value) {
- if (value !== 'raf' && value !== 'asap') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
- gpuPollingMode = value;
- }
-
- /**
- * Logging mode
- * @returns {LoggingMode}
- */
- static get logging() {
- return loggingMode;
- }
-
- /**
- * Logging mode
- * @param {LoggingMode} mode
- */
- static set logging(mode) {
- if (mode !== 'default' && mode !== 'none' && mode !== 'diagnostic') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);else if (mode === 'diagnostic') _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
- loggingMode = mode;
- }
- }
-
- /***/ }),
-
- /***/ 6306:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4248__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_4248__.d(__nested_webpack_exports__, {
- /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
- /* harmony export */ });
- /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4248__(6465);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4248__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4248__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-expr.js
- * Symbolic matrix expressions
- */
-
-
-
-
-
- /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
- /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
- /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
-
- /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
-
- /** @const {Dtype2BufferType} */
- const DTYPE_TO_BUFFER_TYPE = Object.freeze({
- 'float32': Float32Array
- });
-
- /**
- * @abstract Matrix expression
- * It's an opaque object representing an algebraic
- * expression. It has no data attached to it.
- */
- class SpeedyMatrixExpr {
- /**
- * Constructor
- * @param {number} rows
- * @param {number} columns
- * @param {SpeedyMatrixDtype} dtype
- */
- constructor(rows, columns, dtype) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
-
- /** @type {number} number of rows */
- this._rows = rows | 0;
-
- /** @type {number} number of columns */
- this._columns = columns | 0;
-
- /** @type {SpeedyMatrixDtype} data type */
- this._dtype = dtype;
- }
-
- /**
- * Number of rows
- * @returns {number}
- */
- get rows() {
- return this._rows;
- }
-
- /**
- * Number of columns
- * @returns {number}
- */
- get columns() {
- return this._columns;
- }
-
- /**
- * Data type
- * @returns {SpeedyMatrixDtype}
- */
- get dtype() {
- return this._dtype;
- }
-
- /**
- * Default data type
- * @returns {SpeedyMatrixDtype}
- */
- static get DEFAULT_DTYPE() {
- return 'float32';
- }
-
- /**
- * Buffer types
- * @returns {Dtype2BufferType}
- */
- static get BUFFER_TYPE() {
- return DTYPE_TO_BUFFER_TYPE;
- }
-
- /**
- * Matrix addition
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- plus(expr) {
- return new SpeedyMatrixAddExpr(this, expr);
- }
-
- /**
- * Matrix subtraction
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- minus(expr) {
- return new SpeedyMatrixSubtractExpr(this, expr);
- }
-
- /**
- * Matrix multiplication
- * @param {SpeedyMatrixExpr|number} expr
- * @returns {SpeedyMatrixExpr}
- */
- times(expr) {
- if (typeof expr === 'number') return new SpeedyMatrixScaleExpr(this, expr);else return new SpeedyMatrixMultiplyExpr(this, expr);
- }
-
- /**
- * Matrix transposition
- * @returns {SpeedyMatrixExpr}
- */
- transpose() {
- return new SpeedyMatrixTransposeExpr(this);
- }
-
- /**
- * Matrix inversion
- * @returns {SpeedyMatrixExpr}
- */
- inverse() {
- return new SpeedyMatrixInvertExpr(this);
- }
-
- /**
- * Component-wise multiplication
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- compMult(expr) {
- return new SpeedyMatrixCompMultExpr(this, expr);
- }
-
- /**
- * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
- * @param {SpeedyMatrixExpr} expr
- * @returns {SpeedyMatrixExpr}
- */
- ldiv(expr) {
- return new SpeedyMatrixLdivExpr(this, expr);
- }
-
- /**
- * Returns a human-readable string representation of the matrix expression
- * @returns {string}
- */
- toString() {
- return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
- }
-
- /**
- * Evaluate this expression
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
- const {
- SpeedyMatrix
- } = __nested_webpack_require_4248__(4188);
-
- /**
- * @abstract operation storing a temporary matrix
- */
- class SpeedyMatrixTempExpr extends SpeedyMatrixExpr {
- /**
- * Constructor
- * @param {number} rows
- * @param {number} columns
- * @param {SpeedyMatrixDtype} dtype
- */
- constructor(rows, columns, dtype) {
- super(rows, columns, dtype);
-
- /** @type {SpeedyMatrix} holds the results of a computation */
- this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
- }
- }
-
- /**
- * @abstract unary operation
- */
- class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr {
- /**
- * Constructor
- * @param {number} rows rows of the output matrix
- * @param {number} columns columns of the output matrix
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(rows, columns, operand) {
- super(rows, columns, operand.dtype);
-
- /** @type {SpeedyMatrixExpr} operand */
- this._operand = operand;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- const operand = this._operand._evaluate(wasm, memory);
- const result = this._tempMatrix;
-
- // allocate matrices
- const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
- const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
-
- // copy operand to WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
-
- // run the WASM routine
- this._compute(wasm, memory, resultptr, operandptr);
-
- // copy result from WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
-
- // deallocate matrices
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
-
- // done!
- return result;
- }
-
- /**
- * Compute the result of this operation
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
-
- /**
- * @abstract binary operation
- */
- class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr {
- /**
- * Constructor
- * @param {number} rows rows of the output matrix
- * @param {number} columns columns of the output matrix
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(rows, columns, left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
- super(rows, columns, left.dtype);
-
- /** @type {SpeedyMatrixExpr} left operand */
- this._left = left;
-
- /** @type {SpeedyMatrixExpr} right operand */
- this._right = right;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- const left = this._left._evaluate(wasm, memory);
- const right = this._right._evaluate(wasm, memory);
- const result = this._tempMatrix;
-
- // allocate matrices
- const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
- const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
- const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
-
- // copy input matrices to WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
-
- // run the WASM routine
- this._compute(wasm, memory, resultptr, leftptr, rightptr);
-
- // copy output matrix from WASM memory
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
-
- // deallocate matrices
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
- _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
-
- // done!
- return result;
- }
-
- /**
- * Compute the result of this operation
- * @abstract
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
- }
- }
-
- /**
- * Transpose matrix
- */
- class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(operand) {
- super(operand.columns, operand.rows, operand);
- }
-
- /**
- * Compute result = operand^T
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- wasm.exports.Mat32_transpose(resultptr, operandptr);
- }
- }
-
- /**
- * Invert square matrix
- */
- class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- */
- constructor(operand) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
- super(operand.rows, operand.columns, operand);
-
- /** @type {number} size of the matrix */
- this._size = operand.rows;
- }
-
- /**
- * Compute result = operand ^ (-1)
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- switch (this._size) {
- case 0:
- break;
- case 1:
- wasm.exports.Mat32_inverse1(resultptr, operandptr);
- break;
- case 2:
- wasm.exports.Mat32_inverse2(resultptr, operandptr);
- break;
- case 3:
- wasm.exports.Mat32_inverse3(resultptr, operandptr);
- break;
- default:
- wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
- break;
- }
- }
- }
-
- /**
- * Multiply matrix by a scalar value
- */
- class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} operand
- * @param {number} scalar
- */
- constructor(operand, scalar) {
- super(operand.rows, operand.columns, operand);
-
- /** @type {number} scalar value */
- this._scalar = +scalar;
- }
-
- /**
- * Compute result = scalar * operand
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} operandptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, operandptr) {
- wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
- }
- }
-
- /**
- * Matrix addition
- */
- class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(left.rows, left.columns, left, right);
- }
-
- /**
- * Compute result = left + right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Matrix subtraction
- */
- class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(left.rows, left.columns, left, right);
- }
-
- /**
- * Compute result = left - right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Matrix multiplication
- */
- class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
- super(left.rows, right.columns, left, right);
- }
-
- /**
- * Compute result = left * right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Component-wise multiplication
- */
- class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
- super(right.rows, right.columns, left, right);
- }
-
- /**
- * Compute result = left <compMult> right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
- }
- }
-
- /**
- * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
- */
- class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr {
- /**
- * Constructor
- * @param {SpeedyMatrixExpr} left left operand
- * @param {SpeedyMatrixExpr} right right operand
- */
- constructor(left, right) {
- const m = left.rows,
- n = left.columns;
-
- // TODO right doesn't need to be a column vector
- _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
- super(n, 1, left, right);
- }
-
- /**
- * Compute result = left \ right
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} resultptr pointer to Mat32
- * @param {number} leftptr pointer to Mat32
- * @param {number} rightptr pointer to Mat32
- */
- _compute(wasm, memory, resultptr, leftptr, rightptr) {
- wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
- }
- }
-
- /***/ }),
-
- /***/ 6465:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_21592__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_21592__.d(__nested_webpack_exports__, {
- /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
- /* harmony export */ });
- /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_21592__(9192);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_21592__(8581);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_21592__(9037);
- /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_21592__(3816);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-wasm.js
- * WebAssembly bridge
- */
-
-
-
-
-
-
- /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
-
- /**
- * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
- * @property {object} as
- * @property {WebAssembly.Memory} as.object
- * @property {Uint8Array} as.uint8
- * @property {Int32Array} as.int32
- * @property {Uint32Array} as.uint32
- * @property {Float32Array} as.float32
- * @property {Float64Array} as.float64
- */
-
- /**
- * @typedef {object} SpeedyMatrixWASMHandle
- * @property {WebAssembly.Instance} wasm
- * @property {SpeedyMatrixWASMMemory} memory
- * @property {WebAssembly.Module} module
- */
-
- /** @type {Uint8Array} WebAssembly binary */
- const WASM_BINARY = __nested_webpack_require_21592__(3575);
-
- /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
- let _instance = null;
-
- /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
- let _module = null;
-
- /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
- const _memory = (mem => ({
- as: {
- object: mem,
- uint8: new Uint8Array(mem.buffer),
- int32: new Int32Array(mem.buffer),
- uint32: new Uint32Array(mem.buffer),
- float32: new Float32Array(mem.buffer),
- float64: new Float64Array(mem.buffer)
- }
- }))(typeof WebAssembly === 'undefined' ? new Uint8Array(1024) :
- // use a filler
- new WebAssembly.Memory({
- initial: 16,
- // 1 MB
- maximum: 256
- }));
-
- /**
- * WebAssembly utilities
- */
- class SpeedyMatrixWASM {
- /**
- * Gets you the WASM instance, augmented memory & module
- * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
- */
- static ready() {
- // Check if WebAssembly is supported
- if (typeof WebAssembly === 'undefined') return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM('This application requires WebAssembly. Please update your system.'));
-
- // Endianness check
- if (!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN) return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`));
-
- // Get the WASM instance
- return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
- SpeedyMatrixWASM._ready(resolve, reject);
- });
- }
-
- /**
- * Synchronously gets you the WASM instance, augmented memory & module
- * @returns {SpeedyMatrixWASMHandle}
- */
- static get handle() {
- if (!_instance || !_module) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
- return {
- wasm: _instance,
- memory: _memory,
- module: _module
- };
- }
-
- /**
- * Gets you the WASM imports bound to a memory object
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {Object<string,Function>}
- */
- static imports(memory) {
- const obj = new SpeedyMatrixWASMImports(memory);
- return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype).filter(property => typeof obj[property] === 'function' && property !== 'constructor').reduce((imports, methodName) => (imports[methodName] = obj[methodName], imports), Object.create(null));
- }
-
- /**
- * Allocate a Mat32 in WebAssembly memory without copying any data
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {SpeedyMatrix} matrix
- * @returns {number} pointer to the new Mat32
- */
- static allocateMat32(wasm, memory, matrix) {
- const dataptr = wasm.exports.malloc(matrix.data.byteLength);
- const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
- return matptr;
- }
-
- /**
- * Deallocate a Mat32 in WebAssembly
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to the allocated Mat32
- * @returns {number} NULL
- */
- static deallocateMat32(wasm, memory, matptr) {
- const dataptr = wasm.exports.Mat32_data(matptr);
- wasm.exports.free(matptr);
- wasm.exports.free(dataptr);
- return 0;
- }
-
- /**
- * Copy the data of a matrix to a WebAssembly Mat32
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to a Mat32
- * @param {SpeedyMatrix} matrix
- * @returns {number} matptr
- */
- static copyToMat32(wasm, memory, matptr, matrix) {
- // We assume the following:
- // 1. the host uses little-endian byte ordering (just like WebAssembly)
- // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
- // 3. the data type is float32
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
- //matrix.dtype === 'float32' &&
- matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
- const dataptr = wasm.exports.Mat32_data(matptr);
- memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
- return matptr;
- }
-
- /**
- * Copy the data of a WebAssembly Mat32 to a matrix
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @param {number} matptr pointer to a Mat32
- * @param {SpeedyMatrix} matrix
- * @returns {number} matptr
- */
- static copyFromMat32(wasm, memory, matptr, matrix) {
- // We assume the following:
- // 1. the host uses little-endian byte ordering (just like WebAssembly)
- // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
- // 3. the data type is float32
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
- //matrix.dtype === 'float32' &&
- matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
- const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
- for (let offset = matrix.data.length - 1; offset >= 0; offset--) matrix.data[offset] = memory.as.float32[base + offset];
- return matptr;
- }
-
- /**
- * Polls the WebAssembly instance until it's ready
- * @param {function(SpeedyMatrixWASMHandle): void} resolve
- * @param {function(Error): void} reject
- * @param {number} [counter]
- */
- static _ready(resolve, reject, counter = 1000) {
- if (_instance !== null && _module !== null) resolve({
- wasm: _instance,
- memory: _memory,
- module: _module
- });else if (counter <= 0) reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));else setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
- }
- }
-
- /**
- * Methods called from WASM
- */
- class SpeedyMatrixWASMImports {
- /**
- * Constructor
- * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
- */
- constructor(memory) {
- // find all methods of this object
- const methodNames = Object.getOwnPropertyNames(this.constructor.prototype).filter(property => typeof this[property] === 'function').filter(property => property !== 'constructor');
-
- // bind all methods to this object
- methodNames.forEach(methodName => {
- this[methodName] = this[methodName].bind(this);
- });
-
- /** @type {SpeedyMatrixWASMMemory} WASM memory */
- this.memory = memory;
-
- /** @type {CStringUtils} utilities related to C strings */
- this.cstring = new CStringUtils(memory);
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * Prints a message
- * @param {number} ptr pointer to char
- */
- print(ptr) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
- }
-
- /**
- * Throws an error
- * @param {number} ptr pointer to char
- */
- fatal(ptr) {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
- }
-
- /**
- * Fills a memory segment with a byte
- * @param {number} value byte
- * @param {number} start memory address, inclusive
- * @param {number} end memory address greater than start, exclusive
- */
- bytefill(value, start, end) {
- this.memory.as.uint8.fill(value, start, end);
- }
-
- /**
- * Copy a memory segment to another segment
- * @param {number} target memory address, where we'll start writing
- * @param {number} start memory address, where we'll start copying (inclusive)
- * @param {number} end memory address, where we'll end the copy (exclusive)
- */
- copyWithin(target, start, end) {
- this.memory.as.uint8.copyWithin(target, start, end);
- }
- }
-
- /**
- * Utilities related to C strings
- */
- class CStringUtils {
- /**
- * Constructor
- * @param {SpeedyMatrixWASMMemory} memory
- */
- constructor(memory) {
- /** @type {TextDecoder} */
- this._decoder = new TextDecoder('utf-8');
-
- /** @type {SpeedyMatrixWASMMemory} */
- this._memory = memory;
- }
-
- /**
- * Convert a C string to a JavaScript string
- * @param {number} ptr pointer to char
- * @returns {string}
- */
- get(ptr) {
- const byte = this._memory.as.uint8;
- const size = this._memory.as.uint8.byteLength;
- let p = ptr;
- while (p < size && 0 !== byte[p]) ++p;
- return this._decoder.decode(byte.subarray(ptr, p));
- }
- }
-
- /**
- * WebAssembly loader
- * @param {SpeedyMatrixWASMMemory} memory
- */
- (function loadWASM(memory) {
- const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
-
- // Skip if WebAssembly is unsupported
- if (typeof WebAssembly === 'undefined') return;
-
- // Load the WASM binary
- _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY).then(data => base64decode(data)).then(bytes => WebAssembly.instantiate(bytes, {
- env: Object.assign({
- memory: memory.as.object
- }, SpeedyMatrixWASM.imports(memory))
- })).then(wasm => {
- _instance = wasm.instance;
- _module = wasm.module;
- wasm.instance.exports.srand(Date.now() * 0.001 & 0xffffffff); // srand(time(NULL))
-
- _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
- }).catch(err => {
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
- });
- })(_memory);
-
- /***/ }),
-
- /***/ 4188:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_33268__) => {
-
- "use strict";
- __nested_webpack_require_33268__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_33268__.d(__nested_webpack_exports__, {
- /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
- /* harmony export */ });
- /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_33268__(6306);
- /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_33268__(6465);
- /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_33268__(9192);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_33268__(9037);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix.js
- * Matrix class
- */
-
-
-
-
-
-
- /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
- /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
- /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
- /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
-
- /**
- * Matrix class
- */
- class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r {
- /**
- * @private
- *
- * Low-level constructor
- * @param {number} rows number of rows
- * @param {number} columns number of columns
- * @param {number} step0 step size between two consecutive elements (e.g., 1)
- * @param {number} step1 step size between two consecutive columns (e.g., rows)
- * @param {SpeedyMatrixBufferType} data entries in column-major format
- */
- constructor(rows, columns, step0, step1, data) {
- super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.length + rows * columns === 0 ||
- // empty matrix and empty buffer, or
- data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
- );
-
- /** @type {number} step size between two consecutive elements */
- this._step0 = step0 | 0;
-
- /** @type {number} step size between two consecutive columns */
- this._step1 = step1 | 0;
-
- /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
- this._data = data;
- }
-
- /**
- * Create a new matrix with the specified size and entries
- * @param {number} rows number of rows
- * @param {number} columns number of columns
- * @param {number[]} entries in column-major format
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
- }
-
- /**
- * Create a new matrix filled with zeros with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
- }
-
- /**
- * Create a new matrix filled with ones with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
- }
-
- /**
- * Create a new identity matrix with the specified size
- * @param {number} rows number of rows
- * @param {number} [columns] number of columns
- * @param {SpeedyMatrixDtype} [dtype] data type
- * @returns {SpeedyMatrix}
- */
- static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
- const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
- for (let j = Math.min(rows, columns) - 1; j >= 0; j--) data[j * rows + j] = 1;
- return new SpeedyMatrix(rows, columns, 1, rows, data);
- }
-
- /**
- * Evaluate an expression synchronously and store the result in a new matrix
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix}
- */
- static From(expr) {
- return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
- }
-
- /**
- * Returns a promise that resolves immediately if the WebAssembly routines
- * are ready to be used, or as soon as they do become ready
- * @returns {SpeedyPromise<void>}
- */
- static ready() {
- return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void 0);
- }
-
- /**
- * Get the underlying buffer
- * @returns {SpeedyMatrixBufferType}
- */
- get data() {
- return this._data;
- }
-
- /**
- * Row-step
- * @returns {number} defaults to 1
- */
- get step0() {
- return this._step0;
- }
-
- /**
- * Column-step
- * @returns {number} defaults to this.rows
- */
- get step1() {
- return this._step1;
- }
-
- /**
- * Extract a block from this matrix. Use a shared underlying buffer
- * @param {number} firstRow
- * @param {number} lastRow
- * @param {number} firstColumn
- * @param {number} lastColumn
- * @returns {SpeedyMatrix}
- */
- block(firstRow, lastRow, firstColumn, lastColumn) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(firstRow <= lastRow && firstColumn <= lastColumn, `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`);
-
- // ensure that the indices are within bounds
- firstRow = Math.max(firstRow, 0);
- lastRow = Math.min(lastRow, this._rows - 1);
- firstColumn = Math.max(firstColumn, 0);
- lastColumn = Math.min(lastColumn, this._columns - 1);
-
- // compute the dimensions of the new submatrix
- const rows = lastRow - firstRow + 1;
- const columns = lastColumn - firstColumn + 1;
-
- // obtain the relevant portion of the data
- const step0 = this._step0,
- step1 = this._step1;
- const begin = firstRow * step0 + firstColumn * step1; // inclusive
- const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
-
- // create new matrix
- return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
- }
-
- /**
- * Extract a row from this matrix
- * @param {number} index 0-based
- * @returns {SpeedyMatrix}
- */
- row(index) {
- return this.block(index, index, 0, this._columns - 1);
- }
-
- /**
- * Extract a column from this matrix
- * @param {number} index 0-based
- * @returns {SpeedyMatrix}
- */
- column(index) {
- return this.block(0, this._rows - 1, index, index);
- }
-
- /**
- * Extract the main diagonal from this matrix
- * @returns {SpeedyMatrix} as a column-vector
- */
- diagonal() {
- const diagsize = Math.min(this._rows, this._columns);
-
- // compute the dimensions of the new submatrix
- const rows = diagsize; // make it a column vector
- const columns = 1;
-
- // obtain the relevant portion of the data
- const diagstep = this._step0 + this._step1; // jump a row and a column
- const begin = 0; // inclusive
- const end = 1 + (diagsize - 1) * diagstep; // exclusive
-
- // create new matrix
- return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
- }
-
- /**
- * Read a single entry of this matrix
- * @param {number} row 0-based index
- * @param {number} column 0-based index
- * @returns {number}
- */
- at(row, column) {
- if (row >= 0 && row < this._rows && column >= 0 && column < this._columns) return this._data[this._step0 * row + this._step1 * column];else return Number.NaN;
- }
-
- /**
- * Read the entries of the matrix in column-major format
- * @returns {number[]}
- */
- read() {
- const entries = new Array(this._rows * this._columns);
- const step0 = this._step0,
- step1 = this._step1;
- let i = 0;
- for (let column = 0; column < this._columns; column++) {
- for (let row = 0; row < this._rows; row++) entries[i++] = this._data[row * step0 + column * step1];
- }
- return entries;
- }
-
- /**
- * Returns a human-readable string representation of the matrix
- * @returns {string}
- */
- toString() {
- const DECIMALS = 5;
- const rows = this.rows,
- columns = this.columns;
- const entries = this.read();
- const mat = /** @type {number[][]} */new Array(rows);
- for (let i = 0; i < rows; i++) {
- mat[i] = new Array(columns);
- for (let j = 0; j < columns; j++) mat[i][j] = entries[j * rows + i];
- }
- const fix = x => x.toFixed(DECIMALS);
- const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
- const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
- return str;
- }
-
- /**
- * Set the contents of this matrix to the result of an expression
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
- */
- setTo(expr) {
- return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
- // TODO: add support for WebWorkers
- return this.setToSync(expr);
- });
- }
-
- /**
- * Synchronously set the contents of this matrix to the result of an expression
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix} this
- */
- setToSync(expr) {
- const {
- wasm,
- memory
- } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
-
- // evaluate the expression
- const result = expr._evaluate(wasm, memory);
-
- /*
- // shallow copy the results to this matrix
- // limitation: can't handle blocks properly
- // (a tree-like structure could be useful)
- this._rows = result.rows;
- this._columns = result.columns;
- //this._dtype = result.dtype;
- this._data = result.data;
- this._step0 = result.step0;
- this._step1 = result.step1;
- */
-
- // validate shape
- _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype, `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`);
-
- // deep copy
- const step0 = this._step0,
- step1 = this._step1,
- rstep0 = result._step0,
- rstep1 = result._step1;
- if (step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
- // fast copy
- this._data.set(result._data);
- } else {
- // copy each element
- for (let column = this._columns - 1; column >= 0; column--) {
- for (let row = this._rows - 1; row >= 0; row--) this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
- }
- }
-
- // done!
- return this;
- }
-
- /**
- * Fill this matrix with a scalar value
- * @param {number} value
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
- */
- fill(value) {
- this.fillSync(value);
- return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
- }
-
- /**
- * Synchronously fill this matrix with a scalar value
- * @param {number} value
- * @returns {SpeedyMatrix} this
- */
- fillSync(value) {
- value = +value;
- if (this._rows * this._columns === this._data.length) {
- this._data.fill(value);
- return this;
- }
- for (let column = 0; column < this._columns; column++) {
- for (let row = 0; row < this._rows; row++) {
- this._data[row * this._step0 + column * this._step1] = value;
- }
- }
- return this;
- }
-
- /**
- * Evaluate this expression
- * @param {WebAssembly.Instance} wasm
- * @param {SpeedyMatrixWASMMemory} memory
- * @returns {SpeedyMatrix}
- */
- _evaluate(wasm, memory) {
- return this;
- }
- }
-
- /***/ }),
-
- /***/ 6634:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_48547__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_48547__.d(__nested_webpack_exports__, {
- /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
- /* harmony export */ });
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_48547__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-namespace.js
- * Symbolizes a namespace
- */
-
-
-
- /**
- * An abstract namespace
- * @abstract
- */
- class SpeedyNamespace {
- /**
- * Namespaces can't be instantiated.
- * Only static methods are allowed.
- * @abstract
- * @throws SpeedyError
- */
- constructor() {
- // only static methods are allowed
- throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
- }
- }
-
- /***/ }),
-
- /***/ 9192:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_50059__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_50059__.d(__nested_webpack_exports__, {
- /* harmony export */ i: () => (/* binding */ SpeedyPromise)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-promise.js
- * Speedy Promises: a fast implementation of Promises
- */
-
- const PENDING = 0;
- const FULFILLED = 1;
- const REJECTED = 2;
- const SUSPEND_ASYNC = 1;
- const asap = typeof queueMicrotask !== 'undefined' && queueMicrotask ||
- // browsers
- typeof process !== 'undefined' && process.nextTick || (
- // node.js
- f => Promise.resolve().then(() => f())); // most compatible
-
- /**
- * SpeedyPromise: Super Fast Promises. SpeedyPromises can
- * interoperate with ES6 Promises. This implementation is
- * based on the Promises/A+ specification.
- * @template T
- */
- class SpeedyPromise {
- /**
- * Constructor
- * @param {function(function(T=): void, function(Error): void): void} callback
- */
- constructor(callback) {
- this._state = PENDING;
- this._value = undefined;
- this._onFulfillment = null;
- this._onRejection = null;
- this._children = 0;
- this[0] = this;
- this._parent = undefined;
- this._flags = 0;
- this._fulfill = this._fulfill.bind(this);
- this._reject = this._reject.bind(this);
- this._resolve = this._resolve.bind(this);
- this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
- callback(this._fulfill, this._reject);
- }
-
- /**
- * Setup handlers
- * @template U, V=never
- * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
- * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
- * @returns {SpeedyPromise<U>}
- */
- then(onFulfillment, onRejection = null) {
- const child = new SpeedyPromise(this._nop);
- child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
- child._onRejection = typeof onRejection === 'function' && onRejection;
- child._parent = this;
- this[this._children++] = child; // attach child
- this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
- this._notify();
- return child;
- }
-
- /**
- * Setup rejection handler
- * @template U, V=never
- * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
- * @returns {SpeedyPromise<V>}
- */
- catch(onRejection) {
- return this.then(null, onRejection);
- }
-
- /**
- * Execute a callback when the promise is settled
- * (i.e., fulfilled or rejected)
- * @param {function(): void} onFinally
- * @returns {SpeedyPromise<T>}
- */
- finally(onFinally) {
- const fn = val => {
- onFinally();
- return val;
- };
- return this.then(fn, fn);
- }
-
- /**
- * Start the computation immediately, synchronously.
- * Can't afford to spend any time at all waiting for micro-tasks, etc.
- * @returns {SpeedyPromise<T>} this
- */
- turbocharge() {
- let my = this;
-
- // suspend the async behavior
- this._flags |= SUSPEND_ASYNC;
- while (my._parent !== undefined) {
- my = my._parent;
- my._flags |= SUSPEND_ASYNC;
- }
-
- // notify the children of the root
- my._notify(); // will be synchronous
-
- // return this SpeedyPromise
- return this;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- switch (this._state) {
- case PENDING:
- return `SpeedyPromise { <pending> }`;
- case FULFILLED:
- return `SpeedyPromise { <fulfilled> ${this._value} }`;
- case REJECTED:
- return `SpeedyPromise { <rejected> ${this._value} }`;
- default:
- return '';
- }
- }
-
- /**
- * Symbol.toStringTag
- * @returns {string}
- */
- get [Symbol.toStringTag]() {
- return 'SpeedyPromise';
- }
-
- /**
- * Creates a resolved SpeedyPromise
- * @template U
- * @param {U} [value]
- * @returns {SpeedyPromise<U>}
- */
- static resolve(value) {
- const promise = new SpeedyPromise(this._snop);
- if (typeof value === 'object' && value !== null && 'then' in value || typeof value === 'function' && 'then' in value) {
- // resolve asynchronously
- promise._resolve(value);
- } else {
- // fulfill synchronously
- promise._value = value;
- promise._state = FULFILLED;
- }
- return promise;
- }
-
- /**
- * Creates a rejected SpeedyPromise
- * @template U
- * @param {Error} reason
- * @returns {SpeedyPromise<U>}
- */
- static reject(reason) {
- const promise = new SpeedyPromise(this._snop);
- promise._value = reason;
- promise._state = REJECTED;
- return promise;
- }
-
- /**
- * Returns a SpeedyPromise that resolves to an array
- * containing the results of the input promises/values,
- * in their given order. The returned SpeedyPromise will
- * resolve if all input promises resolve, or reject if
- * any input promise rejects.
- * @template U
- * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
- * @returns {SpeedyPromise<U[]>}
- *
- * FIXME iterables need not be all <U>
- */
- static all(iterable) {
- return new SpeedyPromise((resolve, reject) => {
- const input = [];
-
- // get elements
- for (const element of iterable) input.push(element);
-
- // resolve synchronously if there are no elements
- const length = input.length;
- if (length == 0) {
- resolve([]);
- return;
- }
-
- // resolve asynchronously
- let counter = length;
- const output = new Array(length);
- const partialResolve = i => val => {
- output[i] = val;
- if (0 == --counter) resolve(output);
- };
- for (let i = 0; i < length; i++) {
- const element = input[i];
- if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(partialResolve(i), reject);else SpeedyPromise.resolve(element).then(partialResolve(i), reject);
- }
- });
- }
-
- /**
- * Returns a promise that gets fulfilled or rejected as soon
- * as the first promise in the iterable gets fulfilled or
- * rejected (with its value/reason).
- * @template U
- * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
- * @returns {SpeedyPromise<U>}
- */
- static race(iterable) {
- return new SpeedyPromise((resolve, reject) => {
- const input = [];
-
- // get elements
- for (const element of iterable) input.push(element);
-
- // if the iterable is empty, the promise
- // will be pending forever...
-
- // resolve asynchronously
- const length = input.length;
- for (let i = 0; i < length; i++) {
- const element = input[i];
- if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(resolve, reject);else SpeedyPromise.resolve(element).then(resolve, reject);
- }
- });
- }
-
- /**
- * Fulfill this promise with a value
- * @param {T} value
- */
- _fulfill(value) {
- this._setState(FULFILLED, value);
- }
-
- /**
- * Reject this promise with a reason
- * @param {Error} reason
- */
- _reject(reason) {
- this._setState(REJECTED, reason);
- }
-
- /**
- * Set the state and the value of this promise
- * @param {number} state
- * @param {T|Error} value
- */
- _setState(state, value) {
- // the promise is already fulfilled or rejected
- if (this._state != PENDING) return;
-
- // set the new state
- this._state = state;
- this._value = value;
- this._notify();
- }
-
- /**
- * Notify my children that this promise is no
- * longer pending. This is an async operation:
- * my childen will be notified "as soon
- * as possible" (it will be scheduled).
- * We may force this to be synchronous, though
- */
- _notify() {
- // nothing to do
- if (this._state == PENDING) return;
-
- // have we turbocharged this promise?
- if (this._flags & SUSPEND_ASYNC) {
- this._broadcast(); // execute synchronously
- return;
- }
-
- // install a timer (default behavior)
- asap(this._broadcastIfAsync);
- }
-
- /**
- * Helper method
- */
- _broadcastIfAsync() {
- // we may have installed a timer at some
- // point, but turbocharged the promise later
- if (!(this._flags & SUSPEND_ASYNC)) this._broadcast();
- }
-
- /**
- * Tell my children that this promise
- * is either fulfilled or rejected.
- * This is a synchronous operation
- */
- _broadcast() {
- const children = this._children;
- const state = this._state;
- if (state === FULFILLED) {
- for (let i = 0; i < children; i++) {
- const child = this[i];
- const callback = child._onFulfillment;
- try {
- if (callback) {
- if (callback !== child._nop) {
- child._resolve(callback(this._value)); // promise resolution procedure
- child._onFulfillment = child._nop; // will not be called again
- }
- } else child._fulfill(this._value);
- } catch (e) {
- child._reject(e);
- }
- }
- } else if (state === REJECTED) {
- for (let i = 0; i < children; i++) {
- const child = this[i];
- const callback = child._onRejection;
- try {
- if (callback) {
- if (callback !== child._nop) {
- child._resolve(callback(this._value)); // promise resolution procedure
- child._onRejection = child._nop; // will not be called again
- }
- } else child._reject(this._value);
- } catch (e) {
- child._reject(e);
- }
- }
- }
- }
-
- /**
- * Promise Resolution Procedure
- * based on the Promises/A+ spec
- * @param {T} x
- */
- _resolve(x) {
- if (typeof x !== 'object' && typeof x !== 'function' || x === null) {
- // if(x !== Object(x))
- this._fulfill(x);
- return;
- }
- if (x === this) throw new TypeError(); // Circular reference
-
- if (x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
- x.then(this._resolve, this._reject);
- return;
- }
- try {
- const then = x.then;
- if (typeof then === 'function') {
- let resolve = this._resolve,
- reject = this._reject;
- try {
- then.call(x, y => {
- resolve(y);
- resolve = reject = this._nop;
- }, r => {
- reject(r);
- resolve = reject = this._nop;
- });
- } catch (e) {
- if (resolve !== this._nop && reject !== this._nop) this._reject(e);
- }
- } else {
- this._fulfill(x);
- }
- } catch (e) {
- this._reject(e);
- }
- }
-
- /**
- * No-operation
- */
- _nop() {}
-
- /**
- * Static no-operation
- */
- static _snop() {}
- }
-
- //module.exports = { SpeedyPromise };
-
- /*
- // Uncomment to test performance with regular Promises
- module.exports = { SpeedyPromise: Promise };
- Promise.prototype.turbocharge = function() { return this };
- */
-
- /***/ }),
-
- /***/ 9420:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_61794__) => {
-
- "use strict";
-
- // EXPORTS
- __nested_webpack_require_61794__.d(__nested_webpack_exports__, {
- gx: () => (/* binding */ createShader),
- bf: () => (/* binding */ importShader)
- });
-
- // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
-
- // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
- var speedy_gl = __nested_webpack_require_61794__(1001);
- // EXTERNAL MODULE: ./src/utils/utils.js
- var utils = __nested_webpack_require_61794__(9037);
- // EXTERNAL MODULE: ./src/utils/types.js
- var types = __nested_webpack_require_61794__(6049);
- // EXTERNAL MODULE: ./src/utils/errors.js
- var errors = __nested_webpack_require_61794__(8581);
- ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
- function _wrapRegExp() { _wrapRegExp = function (e, r) { return new BabelRegExp(e, void 0, r); }; var e = RegExp.prototype, r = new WeakMap(); function BabelRegExp(e, t, p) { var o = RegExp(e, t); return r.set(o, p || r.get(e)), _setPrototypeOf(o, BabelRegExp.prototype); } function buildGroups(e, t) { var p = r.get(t); return Object.keys(p).reduce(function (r, t) { var o = p[t]; if ("number" == typeof o) r[t] = e[o];else { for (var i = 0; void 0 === e[o[i]] && i + 1 < o.length;) i++; r[t] = e[o[i]]; } return r; }, Object.create(null)); } return _inherits(BabelRegExp, RegExp), BabelRegExp.prototype.exec = function (r) { var t = e.exec.call(this, r); if (t) { t.groups = buildGroups(t, this); var p = t.indices; p && (p.groups = buildGroups(p, this)); } return t; }, BabelRegExp.prototype[Symbol.replace] = function (t, p) { if ("string" == typeof p) { var o = r.get(this); return e[Symbol.replace].call(this, t, p.replace(/\$<([^>]+)>/g, function (e, r) { var t = o[r]; return "$" + (Array.isArray(t) ? t.join("$") : t); })); } if ("function" == typeof p) { var i = this; return e[Symbol.replace].call(this, t, function () { var e = arguments; return "object" != typeof e[e.length - 1] && (e = [].slice.call(e)).push(buildGroups(e, i)), p.apply(this, e); }); } return e[Symbol.replace].call(this, t, p); }, _wrapRegExp.apply(this, arguments); }
- function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
- function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shader-preprocessor.js
- * Custom preprocessor for shaders
- */
-
-
-
-
-
-
- /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
- /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
-
- // Import numeric globals
- const globals = __nested_webpack_require_61794__(3816);
- const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */
- Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce((obj, key) => (obj[key] = globals[key], obj), {});
-
- /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
- const basicConstants = Object.freeze(Object.assign(Object.assign({}, numericGlobals), {}, {
- // fragment shader
- 'FS_USE_CUSTOM_PRECISION': 0,
- // use default precision settings
- 'FS_OUTPUT_TYPE': 0,
- // normalized RGBA
-
- // colors
- 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
- 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
- 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
- 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA
- }));
-
- /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
- const platformConstants = (platform, glRenderer) => Object.freeze({
- 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0,
- // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
- 'APPLE_GPU': /Apple/.test(glRenderer) | 0,
- // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
- 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0 // Intel[(R)] ... [HD] Graphics xyz ...
- });
-
- // Regular Expressions
- const commentsRegex = [/\/\*(.|\s)*?\*\//g, /\/\/.*$/gm];
- const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
- const constantRegex = /@(\w+)@/g;
- const unrollRegex = [/*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
- counter: 2
- }), /*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+=\s*(\x2D?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
- counter: 2
- })];
-
- /**
- * Custom preprocessor for the shaders
- */
- class ShaderPreprocessor {
- /**
- * Runs the preprocessor and generates GLSL code
- * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
- * @param {string} infix annotated GLSL code
- * @param {string} [prefix]
- * @param {string} [suffix]
- * @returns {string} preprocessed GLSL code
- */
- static generateGLSL(defines, infix, prefix = null, suffix = null) {
- //
- // The preprocessor will remove comments from GLSL code,
- // include requested GLSL files and import global constants
- // defined for all shaders (see above)
- //
- const errors = []; // compile-time errors
- const constants = generateConstants(defines);
- const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
- return unrollLoops(annotatedGLSL.replace(commentsRegex[0], '').replace(commentsRegex[1], '').replace(constantRegex, (_, name) => String(
- // Replace preprocessor @CONSTANTS@ by their numeric values
- constants.has(name) ? Number(constants.get(name)) : (errors.push(`Undefined constant ${name}`), 0))).replace(includeRegex, (_, filename) =>
- // Included files may include other files.
- // XXX no cycle detection!
- ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))), defines) + errors.map(msg => `\n#error ${msg}\n`).join('');
- }
- }
-
- /**
- * Generate GLSL code based on the input arguments
- * @param {ShaderPreprocessorConstants} defines
- * @param {string} infix
- * @param {string} [prefix]
- * @param {string} [suffix]
- * @returns {string} GLSL code
- */
- function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null) {
- const parts = [];
- if (prefix !== null) parts.push(prefix);
- for (const [key, value] of defines) parts.push(`#define ${key} ${Number(value)}`);
- parts.push(infix);
- if (suffix !== null) parts.push(suffix);
- return parts.join('\n');
- }
-
- /**
- * Generate pre-processor constants. Constants provided by the
- * user have higher priority than globally available constants.
- * @param {ShaderPreprocessorConstants} defines user-provided
- * @returns {ShaderPreprocessorConstants}
- */
- function generateConstants(defines) {
- utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
- const myConstants = /** @type {ShaderPreprocessorConstants} */new Map();
- const globalConstants = Object.assign(Object.create(null), basicConstants, platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer));
-
- // globally available constants have lower priority
- for (const key in globalConstants) {
- //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
- myConstants.set(key, globalConstants[key]);
- }
-
- // user-defined constants have higher priority
- for (const [key, value] of defines) myConstants.set(key, value);
-
- // done!
- return myConstants;
- }
-
- /**
- * Reads a shader from the shaders/include/ folder
- * @param {string} filename
- * @returns {string}
- */
- function readfileSync(filename) {
- if (String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/)) return __nested_webpack_require_61794__(5235)("./" + filename);
- throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
- }
-
- /**
- * Unroll for loops in our own preprocessor
- * @param {string} code
- * @param {ShaderPreprocessorConstants} defines
- * @returns {string}
- */
- function unrollLoops(code, defines) {
- //
- // Currently, only integer for loops with positive step values
- // can be unrolled. (TODO: negative step values?)
- //
- // The current implementation does not support curly braces
- // inside unrolled loops. You may define macros to get around
- // this, but do you actually need to unroll such loops?
- //
- // Loops that don't fit the supported pattern will crash
- // the preprocessor if you try to unroll them.
- //
- const fn = unroll.bind(defines); // CRAZY!
- const n = unrollRegex.length;
- for (let i = 0; i < n; i++) code = code.replace(unrollRegex[i], fn);
- return code;
- }
-
- /**
- * Unroll a loop pattern (regexp)
- * @param {string} match the matched for loop
- * @param {string} type
- * @param {string} counter
- * @param {string} start
- * @param {string} cmp
- * @param {string} end
- * @param {string} step
- * @param {string} loopcode
- * @returns {string} unrolled loop
- */
- function unroll(match, type, counter, start, cmp, end, step, loopcode) {
- const defines = /** @type {ShaderPreprocessorConstants} */this;
-
- // check if the loop limits are numeric constants or #defined numbers from the outside
- const hasStart = Number.isFinite(+start) || defines.has(start);
- const hasEnd = Number.isFinite(+end) || defines.has(end);
- if (!hasStart || !hasEnd) {
- if (defines.size > 0) throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);else return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
- }
-
- // parse and validate limits & step
- let istart = defines.has(start) ? defines.get(start) : parseInt(start);
- let iend = defines.has(end) ? defines.get(end) : parseInt(end);
- let istep = step.length == 0 ? 1 : parseInt(step);
- utils/* Utils */.A.assert(istart <= iend && istep > 0);
-
- /*
- // debug
- console.log(`Encontrei "${match}"`);
- console.log(`type="${type}"`);
- console.log(`counter="${counter}"`);
- console.log(`start="${start}"`);
- console.log(`cmp="${cmp}"`);
- console.log(`end="${end}"`);
- console.log(`step="${step}"`);
- console.log(`loopcode="${loopcode}"`)
- console.log('Defines:', defines);
- */
-
- // continue statements are not supported inside unrolled loops
- // and will generate a compiler error. Using break is ok.
- const hasBreak = loopcode.match(/\bbreak\s*;/) !== null;
-
- // create a new scope
- let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
-
- // declare counter
- unrolledCode += `${type} ${counter};\n`;
-
- // unroll loop
- iend += cmp == '<=' ? 1 : 0;
- for (let i = istart; i < iend; i += istep) unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
-
- // close scope
- unrolledCode += '}\n';
- //console.log('Unrolled code:\n\n' + unrolledCode);
-
- // done!
- return unrolledCode;
- }
- ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shader-declaration.js
- * Encapsulates a shader declaration
- */
-
-
-
-
- const DEFAULT_ATTRIBUTES = Object.freeze({
- position: 'a_position',
- texCoord: 'a_texCoord'
- });
- const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
- position: 0,
- // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
- texCoord: 1
- });
- const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
- precision highp float;
- precision highp int;
-
- layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
- layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
- out highp vec2 texCoord;
- uniform highp vec2 texSize;
-
- #define vsinit() \
- gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
- texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
- \n\n`;
- const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
- const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
- const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
-
- #if @FS_USE_CUSTOM_PRECISION@ == 0
- precision mediump float; // ~float16
- precision mediump sampler2D;
- precision highp int; // int32
- #endif
-
- #if @FS_OUTPUT_TYPE@ == 0
- #define OUT_TYPE mediump vec4
- #elif @FS_OUTPUT_TYPE@ == 1
- #define OUT_TYPE mediump ivec4
- #elif @FS_OUTPUT_TYPE@ == 2
- #define OUT_TYPE mediump uvec4
- #else
- #error Unknown FS_OUTPUT_TYPE
- #endif
-
- out OUT_TYPE color;
- in highp vec2 texCoord;
- uniform highp vec2 texSize;
-
- @include "global.glsl"\n\n`;
- const PRIVATE_TOKEN = Symbol();
-
- /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
- /** @typedef {string[]} ShaderDeclarationArgumentList */
- /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
- /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
-
- /**
- * Shader Declaration
- * @abstract
- */
- class ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
- * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsSource, vsSource) {
- // private constructor!
- if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er();
-
- /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
- this._arguments = [...argumentList];
-
- /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
- this._defines = new Map(defines);
-
- /** @type {string} preprocessed source code of the fragment shader */
- this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
-
- /** @type {string} preprocessed source code of the vertex shader */
- this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
-
- /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
- this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
-
- // validate arguments
- this._validateArguments(this._arguments, this._uniforms);
- }
-
- /**
- * Return the preprocessed GLSL source code of the fragment shader
- * @returns {string}
- */
- get fragmentSource() {
- return this._fragmentSource;
- }
-
- /**
- * Return the preprocessed GLSL source code of the vertex shader
- * @returns {string}
- */
- get vertexSource() {
- return this._vertexSource;
- }
-
- /**
- * Get the names of the vertex shader attributes
- * @returns {typeof DEFAULT_ATTRIBUTES}
- */
- get attributes() {
- return DEFAULT_ATTRIBUTES;
- }
-
- /**
- * Get the pre-defined locations of the vertex shader attributes
- * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
- */
- get locationOfAttributes() {
- return DEFAULT_ATTRIBUTES_LOCATION;
- }
-
- /**
- * Names of the arguments that will be passed to the Shader,
- * corresponding to GLSL uniforms, in the order they will be passed
- * @returns {string[]}
- */
- get arguments() {
- return [].concat(this._arguments);
- }
-
- /**
- * Names of the uniforms declared in the shader
- * @returns {string[]}
- */
- get uniforms() {
- return Array.from(this._uniforms.keys());
- }
-
- /**
- * The GLSL type of a uniform variable declared in the shader
- * @param {string} name
- * @returns {string}
- */
- uniformType(name) {
- if (!this._uniforms.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
- return this._uniforms.get(name);
- }
-
- /**
- * The value of an externally defined constant, i.e., via withDefines()
- * @param {string} name
- * @returns {number}
- */
- definedConstant(name) {
- if (!this._defines.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
- return this._defines.get(name);
- }
-
- /**
- * Parses a GLSL source and detects the uniform variables,
- * as well as their types
- * @param {string} preprocessedSource
- * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
- */
- _autodetectUniforms(preprocessedSource) {
- const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
- const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
- const uniforms = /** @type {ShaderDeclarationUniformTypes} */new Map();
- let match;
- while ((match = regex.exec(sourceWithoutComments)) !== null) {
- const type = match[2];
- const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
-
- for (const name of names) {
- if (name.endsWith(']')) {
- // is it an array?
- if (!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/))) throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
-
- // read array name & size
- const [array, size] = [match[1], Number(match[2])];
-
- // register uniforms
- for (let i = 0; i < size; i++) uniforms.set(`${array}[${i}]`, type);
- } else {
- // register a regular uniform
- if (!uniforms.has(name) || uniforms.get(name) === type) uniforms.set(name, type);else throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
- }
- }
- }
- return uniforms;
- }
-
- /**
- * Checks if all the arguments of the shader declaration are backed by a
- * uniform variable in GLSL code
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationUniformTypes} uniforms
- * @throws {IllegalArgumentError}
- */
- _validateArguments(argumentList, uniforms) {
- for (const argname of argumentList) {
- if (!uniforms.has(argname)) {
- if (!uniforms.has(argname + '[0]')) throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
- }
- }
- }
- }
-
- /**
- * A ShaderDeclaration that has its GLSL code stored in-memory
- */
- class MemoryShaderDeclaration extends ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
- * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER) {
- super(privateToken, argumentList, defines, fsSource, vsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
- this._fsUnprocessedSource = String(fsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
- this._vsUnprocessedSource = String(vsSource);
- }
- }
-
- /**
- * A ShaderDeclaration that has its GLSL code stored in a file
- */
- class FileShaderDeclaration extends ShaderDeclaration {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationArgumentList} argumentList
- * @param {ShaderDeclarationPreprocessorConstants} defines
- * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
- * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
- */
- constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '') {
- // validate paths
- if (!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);else if (vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
-
- // import files
- const fsSource = __nested_webpack_require_61794__(4606)("./" + String(fsFilepath));
- const vsSource = vsFilepath != '' ? __nested_webpack_require_61794__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
-
- // super class
- super(privateToken, argumentList, defines, fsSource, vsSource);
-
- /** @type {string} filepath of the fragment shader */
- this._fsFilepath = String(fsFilepath);
-
- /** @type {string} filepath of the vertex shader */
- this._vsFilepath = String(vsFilepath);
- }
-
- /**
- * Return the preprocessed GLSL source code of the fragment shader
- * @returns {string}
- */
- get fragmentSource() {
- // we override this method to include the filepath. The motivation
- // is to easily identify the file when debugging compiling errors.
- return this._addHeader('// File: ' + this._fsFilepath, super.fragmentSource);
- }
-
- /**
- * Return the preprocessed GLSL source code of the vertex shader
- * @returns {string}
- */
- get vertexSource() {
- // we override this method to include the filepath. The motivation
- // is to easily identify the file when debugging compiling errors.
- return this._addHeader('// File: ' + (this._vsFilepath != '' ? this._vsFilepath : '(default-vs) ' + this._fsFilepath), super.vertexSource);
- }
-
- /**
- * Add a header to a GLSL code
- * @param {string} header code to be added
- * @param {string} src pre-processed GLSL code
- * @returns {string} src with an added header
- */
- _addHeader(header, src) {
- utils/* Utils */.A.assert(header.startsWith('//') && !header.includes('\n'));
- const j = src.indexOf('\n');
- const versionDirective = src.substr(0, j);
- const body = src.substr(j);
- utils/* Utils */.A.assert(versionDirective.startsWith('#version '));
- const head = versionDirective + '\n' + header;
- return head + body;
- }
- }
-
- /**
- * A builder of a ShaderDeclaration
- * @abstract
- */
- class ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- */
- constructor(privateToken) {
- if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er(); // private constructor!
-
- /** @type {string[]} ordered list of uniform names */
- this._arguments = [];
-
- /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
- this._defines = new Map();
- }
-
- /**
- * Specify the list & order of arguments to be
- * passed to the shader
- * @param {string[]} args argument names
- * @returns {this}
- */
- withArguments(...args) {
- // the list of arguments may be declared only once
- if (this._arguments.length > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
-
- // get arguments
- for (let j = 0; j < args.length; j++) this._arguments.push(String(args[j]));
-
- // done!
- return this;
- }
-
- /**
- * Specify a set of #defines to be prepended to the shader
- * @param {Object<string,number>} defines key-value pairs
- * @returns {this}
- */
- withDefines(defines) {
- // the list of #defines may be defined only once
- if (this._defines.size > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
-
- // store and write the #defines
- const keys = Object.keys(defines);
- for (const key of keys) {
- const value = Number(defines[key]); // force numeric values (just in case)
- this._defines.set(key, value);
- }
-
- // done!
- return this;
- }
-
- /**
- * Build a ShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- throw new errors/* AbstractMethodError */.aQ();
- }
- }
-
- /**
- * A builder of a MemoryShaderDeclaration
- */
- class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {ShaderDeclarationUnprocessedGLSL} fsSource
- * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
- */
- constructor(privateToken, fsSource, vsSource) {
- super(privateToken);
-
- /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
- this._fsSource = String(fsSource);
-
- /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
- this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
- }
-
- /**
- * Build a MemoryShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
- }
- }
-
- /**
- * A builder of a FileShaderDeclaration
- */
- class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder {
- /**
- * @private Constructor
- * @param {Symbol} privateToken
- * @param {string} fsFilepath
- * @param {string} [vsFilepath]
- */
- constructor(privateToken, fsFilepath, vsFilepath) {
- super(privateToken);
-
- /** @type {string} path to the unprocessed GLSL code of the fragment shader */
- this._fsFilepath = String(fsFilepath);
-
- /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
- this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
- }
-
- /**
- * Build a FileShaderDeclaration
- * @returns {ShaderDeclaration}
- */
- build() {
- return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
- }
- }
-
- /**
- * Import a ShaderDeclaration from a GLSL file
- * @param {string} filepath relative to the shaders/ folder (a .glsl file)
- * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
- * @returns {ShaderDeclaration}
- */
- function importShader(filepath, vsfilepath = undefined) {
- return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
- }
-
- /**
- * Create a ShaderDeclaration from a GLSL source code
- * @param {string} source fragment shader
- * @param {string} [vssource] optional vertex shader
- * @returns {ShaderDeclaration}
- */
- function createShader(source, vssource = undefined) {
- return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
- }
-
- /***/ }),
-
- /***/ 1672:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_89681__) => {
-
- "use strict";
- __nested_webpack_require_89681__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_89681__.d(__nested_webpack_exports__, {
- /* harmony export */ conv2D: () => (/* binding */ conv2D),
- /* harmony export */ convX: () => (/* binding */ convX),
- /* harmony export */ convY: () => (/* binding */ convY)
- /* harmony export */ });
- /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_89681__(9420);
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_89681__(9037);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_89681__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * convolution.js
- * Convolution shader generators
- */
-
-
-
-
-
- /**
- * Generate a 2D convolution with a square kernel
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function conv2D(kernel, normalizationConstant = 1.0) {
- const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
- const kSize = Math.sqrt(kernel32.length) | 0;
- const N = kSize >> 1; // idiv 2
-
- // validate input
- if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);else if (kSize * kSize != kernel32.length) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
-
- // select the appropriate pixel function
- const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
-
- // code generator
- const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(cur => fn(kernel32[(cur[0] + N) * kSize + (cur[1] + N)], cur[0], cur[1])).join('\n');
- const generateCode = (k, dy, dx) => `
- result += ${pixelAtOffset}(image, ivec2(${-dx | 0}, ${-dy | 0})) * float(${+k});
- `;
-
- // shader
- const source = `
- uniform sampler2D image;
-
- void main()
- {
- float alpha = threadPixel(image).a;
- vec4 result = vec4(0.0f);
-
- ${foreachKernelElement(generateCode)}
-
- color = vec4(result.rgb, alpha);
- }
- `;
-
- // done!
- return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
- }
-
- /**
- * Generate a 1D convolution function on the x-axis
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function convX(kernel, normalizationConstant = 1.0) {
- return conv1D('x', kernel, normalizationConstant);
- }
-
- /**
- * Generate a 1D convolution function on the y-axis
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function convY(kernel, normalizationConstant = 1.0) {
- return conv1D('y', kernel, normalizationConstant);
- }
-
- /**
- * 1D convolution function generator
- * @param {string} axis either "x" or "y"
- * @param {number[]} kernel convolution kernel
- * @param {number} [normalizationConstant] will be multiplied by all kernel entries
- * @returns {ShaderDeclarationBuilder}
- */
- function conv1D(axis, kernel, normalizationConstant = 1.0) {
- const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
- const kSize = kernel32.length;
- const N = kSize >> 1; // idiv 2
-
- // validate input
- if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);else if (axis != 'x' && axis != 'y') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
-
- // select the appropriate pixel function
- const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
-
- // code generator
- const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce((acc, cur) => acc + fn(kernel32[cur + N], cur), '');
- const generateCode = (k, i) => axis == 'x' ? `
- pixel += ${pixelAtOffset}(image, ivec2(${-i | 0}, 0)) * float(${+k});
- ` : `
- pixel += ${pixelAtOffset}(image, ivec2(0, ${-i | 0})) * float(${+k});
- `;
-
- // shader
- const source = `
- uniform sampler2D image;
-
- void main()
- {
- float alpha = threadPixel(image).a;
- vec4 pixel = vec4(0.0f);
-
- ${foreachKernelElement(generateCode)}
-
- color = vec4(pixel.rgb, alpha);
- }
- `;
-
- // done!
- return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
- }
-
- /***/ }),
-
- /***/ 1001:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_95546__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_95546__.d(__nested_webpack_exports__, {
- /* harmony export */ c: () => (/* binding */ SpeedyGL)
- /* harmony export */ });
- /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_95546__(9037);
- /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_95546__(2199);
- /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_95546__(3211);
- /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_95546__(9192);
- /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_95546__(8581);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-gl.js
- * A wrapper around the WebGL Rendering Context
- */
-
-
-
-
-
-
-
- /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
-
- // Constants
- const SINGLETON_KEY = Symbol();
- const DEFAULT_POWER_PREFERENCE = 'default';
-
- //
- // We use a small canvas to improve the performance
- // of createImageBitmap() on Firefox.
- //
- // A large canvas (2048x2048) causes a FPS drop, even
- // if we only extract a small region of it (this is
- // unlike Chrome, which is fast).
- //
- // Note: we automatically increase the size of the
- // canvas (as needed) when rendering to it.
- //
- const CANVAS_WIDTH = 16,
- CANVAS_HEIGHT = 16;
-
- /** @type {SpeedyGL} Singleton */
- let instance = null;
-
- /** @type {PowerPreference} power preference */
- let powerPreference = DEFAULT_POWER_PREFERENCE;
-
- /**
- * A wrapper around a WebGL Rendering Context
- */
- class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c {
- /**
- * Constructor
- * @param {Symbol} key
- * @private
- */
- constructor(key) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
- super();
-
- /** @type {boolean} internal flag */
- this._reinitializeOnContextLoss = true;
-
- /** @type {HTMLCanvasElement} internal canvas */
- this._canvas = this._createCanvas(this._reinitialize.bind(this));
-
- /** @type {WebGL2RenderingContext} WebGL rendering context */
- this._gl = this._createContext(this._canvas);
-
- /** @type {string} vendor string of the video driver */
- this._vendor = '';
-
- /** @type {string} renderer string of the video driver */
- this._renderer = '';
-
- // read driver info
- this._readDriverInfo();
-
- // log driver info
- if (_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic') this._logDriverInfo();
- }
-
- /**
- * Get Singleton
- * @returns {SpeedyGL}
- */
- static get instance() {
- return instance || (instance = new SpeedyGL(SINGLETON_KEY));
- }
-
- /**
- * The WebGL Rendering Context
- * Be careful not to cache this rendering context, as it may be lost!
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._gl;
- }
-
- /**
- * The internal canvas
- * @returns {HTMLCanvasElement}
- */
- get canvas() {
- return this._canvas;
- }
-
- /**
- * Renderer string of the video driver
- * @returns {string}
- */
- get renderer() {
- return this._renderer;
- }
-
- /**
- * Vendor string of the video driver
- * @returns {string}
- */
- get vendor() {
- return this._vendor;
- }
-
- /**
- * Create a WebGL-capable canvas
- * @param {Function} reinitialize to be called if we get a WebGL context loss event
- * @returns {HTMLCanvasElement}
- */
- _createCanvas(reinitialize) {
- const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
- canvas.addEventListener('webglcontextlost', ev => {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
- setTimeout(reinitialize, 0);
- ev.preventDefault();
- }, false);
-
- /*canvas.addEventListener('webglcontextrestored', ev => {
- Utils.warning(`Restored WebGL2 context`);
- ev.preventDefault();
- }, false);*/
-
- return canvas;
- }
-
- /**
- * Create a WebGL2 Rendering Context
- * @param {HTMLCanvasElement} canvas
- * @returns {WebGL2RenderingContext}
- */
- _createContext(canvas) {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
-
- // does the browser support WebGL2?
- if (typeof WebGL2RenderingContext === 'undefined') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please update your system.`);
- const gl = canvas.getContext('webgl2', {
- premultipliedAlpha: false,
- preserveDrawingBuffer: false,
- powerPreference: powerPreference,
- alpha: true,
- // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
- antialias: false,
- depth: false,
- stencil: false,
- desynchronized: true
- });
- if (!gl) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
- return gl;
- }
-
- /**
- * Reinitialize WebGL
- */
- _reinitialize() {
- // disable reinitialization?
- if (!this._reinitializeOnContextLoss) return;
-
- // warning
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
-
- // create new canvas
- this._canvas.remove();
- this._canvas = this._createCanvas(this._reinitialize.bind(this));
-
- // create new context
- this._gl = this._createContext(this._canvas);
-
- // is this needed?
- this._readDriverInfo();
-
- // notify observers: we have a new context!
- // we need to recreate all textures...
- this._notify();
- }
-
- /**
- * Read debugging information about the video driver of the user
- */
- _readDriverInfo() {
- // Depending on the privacy settings of the browser, this information
- // may be unavailable. When available, it may not be entirely correct.
- // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
- const gl = this._gl;
- let debugInfo = null;
- if (navigator.userAgent.includes('Firefox')) {
- this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
- this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
- } else if (null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
- this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
- this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
- } else {
- this._vendor = ''; // unavailable information
- this._renderer = '';
- }
- }
-
- /**
- * Log debugging information about the video driver and the platform
- */
- _logDriverInfo() {
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
- _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
- }
-
- /**
- * Lose the WebGL context. This is used to manually
- * free resources, and also for purposes of testing
- * @returns {WEBGL_lose_context}
- */
- loseContext() {
- const gl = this._gl;
-
- // find the appropriate extension
- const ext = gl.getExtension('WEBGL_lose_context');
- if (!ext) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
-
- // nothing to do?
- if (gl.isContextLost()) return ext;
-
- // disable reinitialization
- this._reinitializeOnContextLoss = false;
-
- // lose context
- ext.loseContext();
-
- // done!
- return ext;
- }
-
- /**
- * Lose & restore the WebGL context
- * @param {number} [secondsToRestore]
- * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
- */
- loseAndRestoreContext(secondsToRestore = 1) {
- const ms = Math.max(secondsToRestore, 0) * 1000;
- const ext = this.loseContext();
- return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
- setTimeout(() => {
- //ext.restoreContext();
- this._reinitializeOnContextLoss = true;
- this._reinitialize();
- setTimeout(() => resolve(ext), 0); // next frame
- }, ms);
- });
- }
-
- /**
- * Power preference for the WebGL context
- * @returns {PowerPreference}
- */
- static get powerPreference() {
- return powerPreference;
- }
-
- /**
- * Power preference for the WebGL context
- * @param {PowerPreference} value
- */
- static set powerPreference(value) {
- // validate
- if (!(value === 'default' || value === 'low-power' || value === 'high-performance')) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
-
- // the power preference should be set before we create the WebGL context
- if (instance == null || powerPreference !== value) {
- powerPreference = value;
-
- // recreate the context if it already exists. Experimental.
- if (instance != null) instance.loseAndRestoreContext();
- }
- }
-
- /**
- * Check if an instance of SpeedyGL has already been created
- * @returns {boolean}
- */
- static isInitialized() {
- return instance != null;
- }
- }
-
- /***/ }),
-
- /***/ 8581:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_105678__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_105678__.d(__nested_webpack_exports__, {
- /* harmony export */ EM: () => (/* binding */ NotSupportedError),
- /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
- /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
- /* harmony export */ MU: () => (/* binding */ TimeoutError),
- /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
- /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
- /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
- /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
- /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
- /* harmony export */ mB: () => (/* binding */ ParseError),
- /* harmony export */ pf: () => (/* binding */ AssertionError),
- /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
- /* harmony export */ wB: () => (/* binding */ GLError),
- /* harmony export */ xB: () => (/* binding */ SpeedyError)
- /* harmony export */ });
- /* unused harmony export NotImplementedError */
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * errors.js
- * Error classes
- */
-
- /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
-
- /**
- * Generic error class for Speedy
- */
- class SpeedyError extends Error {
- /**
- * Class constructor
- * @param {string} message message text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message, cause = null) {
- super([message, cause ? cause.toString() : '[speedy-vision.js]'].join('\n-> '));
-
- /** @type {SpeedyErrorCause} cause of the error */
- this._cause = cause;
- }
-
- /**
- * Error name
- * @returns {string}
- */
- get name() {
- return this.constructor.name;
- }
-
- /**
- * Set error name (ignored)
- * @param {string} _ ignored
- */
- set name(_) {
- void 0;
- }
-
- /**
- * Get the cause of the error. Available if
- * it has been specified in the constructor
- * @returns {SpeedyErrorCause}
- */
- get cause() {
- return this._cause;
- }
- }
-
- /**
- * Unsupported operation error
- * The requested operation is not supported
- */
- class NotSupportedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Unsupported operation. ${message}`, cause);
- }
- }
-
- /**
- * Not implemented error
- * The called method is not implemented
- */
- class NotImplementedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Method not implemented. ${message}`, cause);
- }
- }
-
- /**
- * WebGL error
- */
- class GLError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`WebGL error. ${message}`, cause);
- }
-
- /**
- * Get an error object describing the latest WebGL error
- * @param {WebGL2RenderingContext} gl
- * @returns {GLError}
- */
- static from(gl) {
- const recognizedErrors = ['NO_ERROR', 'INVALID_ENUM', 'INVALID_VALUE', 'INVALID_OPERATION', 'INVALID_FRAMEBUFFER_OPERATION', 'OUT_OF_MEMORY', 'CONTEXT_LOST_WEBGL'];
- const glError = gl.getError();
- const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
- return new GLError(message);
- }
- }
-
- /**
- * AbstractMethodError
- * Thrown when one tries to call an abstract method
- */
- class AbstractMethodError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Can't call abstract method. ${message}`, cause);
- }
- }
-
- /**
- * Illegal argument error
- * A method has received one or more illegal arguments
- */
- class IllegalArgumentError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Illegal argument. ${message}`, cause);
- }
- }
-
- /**
- * Illegal operation error
- * The method arguments are valid, but the method can't
- * be called due to the current the state of the object
- */
- class IllegalOperationError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Illegal operation. ${message}`, cause);
- }
- }
-
- /**
- * Out of memory
- */
- class OutOfMemoryError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Out of memory. ${message}`, cause);
- }
- }
-
- /**
- * File not found error
- */
- class FileNotFoundError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`File not found. ${message}`, cause);
- }
- }
-
- /**
- * Resource not loaded error
- */
- class ResourceNotLoadedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Resource not loaded. ${message}`, cause);
- }
- }
-
- /**
- * Timeout error
- */
- class TimeoutError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Timeout error. ${message}`, cause);
- }
- }
-
- /**
- * Parse error
- */
- class ParseError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Parse error. ${message}`, cause);
- }
- }
-
- /**
- * Assertion error
- */
- class AssertionError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Assertion failed. ${message}`, cause);
- }
- }
-
- /**
- * Access denied
- */
- class AccessDeniedError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`Access denied. ${message}`, cause);
- }
- }
-
- /**
- * WebAssembly error
- */
- class WebAssemblyError extends SpeedyError {
- /**
- * Class constructor
- * @param {string} [message] additional text
- * @param {SpeedyErrorCause} [cause] cause of the error
- */
- constructor(message = '', cause = null) {
- super(`WebAssembly error. ${message}`, cause);
- }
- }
-
- /***/ }),
-
- /***/ 3816:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_113692__) => {
-
- "use strict";
- __nested_webpack_require_113692__.r(__nested_webpack_exports__);
- /* harmony export */ __nested_webpack_require_113692__.d(__nested_webpack_exports__, {
- /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
- /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
- /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
- /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
- /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
- /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
- /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
- /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
- /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
- /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
- /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
- /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
- /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
- /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
- /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
- /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
- /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * globals.js
- * Global constants
- */
-
- // -----------------------------------------------------------------
- // IMAGE PYRAMIDS & SCALE-SPACE
- // -----------------------------------------------------------------
-
- /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
- const PYRAMID_MAX_LEVELS = 8;
-
- /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
- const LOG2_PYRAMID_MAX_SCALE = 0;
-
- /** @type {number} The maximum supported scale for a pyramid level */
- const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
-
- // -----------------------------------------------------------------
- // FIXED-POINT MATH
- // -----------------------------------------------------------------
-
- /** @type {number} How many bits do we use to store fractional data? */
- const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
-
- /** @type {number} Fixed-point resolution */
- const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
-
- // -----------------------------------------------------------------
- // TEXTURE LIMITS
- // -----------------------------------------------------------------
-
- /** @type {number} Maximum texture length (width, height) */
- const MAX_TEXTURE_LENGTH = (1 << 16 - FIX_BITS) - 1; // must be 2^n - 1 due to keypoint encoding
-
- // -----------------------------------------------------------------
- // KEYPOINTS
- // -----------------------------------------------------------------
-
- /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
- const MIN_KEYPOINT_SIZE = 8;
-
- /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
- const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
-
- /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
- const MAX_ENCODER_CAPACITY = 8192;
-
- /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
- const DEFAULT_ENCODER_CAPACITY = 2048;
-
- /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
- const LOG2_MAX_DESCRIPTOR_SIZE = 6;
-
- /** @type {number} maximum size of a keypoint descriptor, in bytes */
- const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
-
- /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
- const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
-
- /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
- const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
-
- /** @type {number} Maximum size of the database of keypoints for matching */
- const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
-
- /** @type {number} The maximum distance that can be stored in a match */
- const MATCH_MAX_DISTANCE = (1 << 32 - MATCH_INDEX_BITS) - 1;
-
- // -----------------------------------------------------------------
- // MISC
- // -----------------------------------------------------------------
-
- /** @type {boolean} Are we in a little-endian machine? */
- const LITTLE_ENDIAN = function () {
- return 0xCAFE === new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer)[0];
- }();
-
- /***/ }),
-
- /***/ 3211:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_119275__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_119275__.d(__nested_webpack_exports__, {
- /* harmony export */ c: () => (/* binding */ Observable)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * observable.js
- * Observer design pattern
- */
-
- /**
- * Implementation of the Observer design pattern
- * @abstract
- */
- class Observable {
- /**
- * Constructor
- */
- constructor() {
- /** @type {Function[]} subscribers / callbacks */
- this._subscribers = [];
-
- /** @type {object[]} "this" pointers */
- this._thisptr = [];
-
- /** @type {Array<any[]>} function arguments */
- this._args = [];
- }
-
- /**
- * Add subscriber
- * @param {Function} fn callback
- * @param {object} [thisptr] "this" pointer to be used when invoking the callback
- * @param {...any} args arguments to be passed to the callback
- */
- subscribe(fn, thisptr, ...args) {
- this._subscribers.push(fn);
- this._thisptr.push(thisptr);
- this._args.push(args);
- }
-
- /**
- * Remove subscriber
- * @param {Function} fn previously added callback
- * @param {object} [thisptr] "this" pointer
- */
- unsubscribe(fn, thisptr) {
- for (let j = this._subscribers.length - 1; j >= 0; j--) {
- if (this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
- this._subscribers.splice(j, 1);
- this._thisptr.splice(j, 1);
- this._args.splice(j, 1);
- break;
- }
- }
- }
-
- /**
- * Notify all subscribers about a state change
- * @protected
- */
- _notify() {
- for (let i = 0; i < this._subscribers.length; i++) this._subscribers[i].apply(this._thisptr[i], this._args[i]);
- }
- }
-
- /***/ }),
-
- /***/ 6049:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_121659__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_121659__.d(__nested_webpack_exports__, {
- /* harmony export */ f5: () => (/* binding */ ImageFormat),
- /* harmony export */ kQ: () => (/* binding */ PixelComponent),
- /* harmony export */ kg: () => (/* binding */ ColorComponentId),
- /* harmony export */ zu: () => (/* binding */ MediaType)
- /* harmony export */ });
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * types.js
- * Types & formats
- */
-
- /**
- * Media types
- * @enum {Symbol}
- */
- const MediaType = Object.freeze({
- Image: Symbol('Image'),
- Video: Symbol('Video'),
- Canvas: Symbol('Canvas'),
- OffscreenCanvas: Symbol('OffscreenCanvas'),
- Bitmap: Symbol('Bitmap'),
- Data: Symbol('Data')
- });
-
- /**
- * Image formats
- * @enum {Symbol}
- */
- const ImageFormat = Object.freeze({
- RGBA: Symbol('RGBA'),
- GREY: Symbol('GREY')
- });
-
- /**
- * Pixel component (bitwise flags)
- * @typedef {number} PixelComponent
- */
- const PixelComponent = Object.freeze({
- RED: 1,
- GREEN: 2,
- BLUE: 4,
- ALPHA: 8,
- ALL: 15 // = RED | GREEN | BLUE | ALPHA
- });
-
- /**
- * Component ID utility
- */
- const ColorComponentId = Object.freeze({
- [PixelComponent.RED]: 0,
- [PixelComponent.GREEN]: 1,
- [PixelComponent.BLUE]: 2,
- [PixelComponent.ALPHA]: 3
- });
-
- /***/ }),
-
- /***/ 9037:
- /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_123644__) => {
-
- "use strict";
- /* harmony export */ __nested_webpack_require_123644__.d(__nested_webpack_exports__, {
- /* harmony export */ A: () => (/* binding */ Utils)
- /* harmony export */ });
- /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_123644__(8581);
- /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_123644__(9192);
- /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_123644__(2199);
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * utils.js
- * Generic utilities
- */
-
-
-
-
-
- /**
- * Generic utilities
- */
- class Utils {
- /**
- * Generates a warning
- * @param {string} text message text
- * @param {...string} args optional text
- */
- static warning(text, ...args) {
- //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
- if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.warn('[speedy-vision] ' + text, ...args);
- }
-
- /**
- * Logs a message
- * @param {string} text message text
- * @param {...string} args optional text
- */
- static log(text, ...args) {
- if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.log('[speedy-vision] ' + text, ...args);
- }
-
- /**
- * Assertion
- * @param {boolean} expr expression
- * @param {string} [text] error message
- * @throws {AssertionError}
- */
- static assert(expr, text = '') {
- if (!expr) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
- }
-
- /**
- * Gets the names of the arguments of the specified function
- * @param {Function} fun
- * @returns {string[]}
- */
- static functionArguments(fun) {
- const code = fun.toString();
- const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' : code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>';
- const match = new RegExp(regex).exec(code);
- if (match !== null) {
- const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
- return args.split(',').map(argname => argname.replace(/=.*$/, '').trim() // remove default params & trim
- ).filter(argname => argname // handle trailing commas
- );
- } else throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
- }
-
- /**
- * Get all property descriptors from an object,
- * traversing its entire prototype chain
- * @param {object} obj
- * @returns {object}
- */
- static getAllPropertyDescriptors(obj) {
- if (obj) {
- const proto = Object.getPrototypeOf(obj);
- return Object.assign(Object.assign({}, Utils.getAllPropertyDescriptors(proto)), Object.getOwnPropertyDescriptors(obj));
- } else return Object.create(null);
- }
-
- /**
- * Creates a HTMLCanvasElement with the given dimensions
- * @param {number} width in pixels
- * @param {number} height in pixels
- * @returns {HTMLCanvasElement}
- */
- static createCanvas(width, height) {
- const canvas = document.createElement('canvas');
- canvas.width = width;
- canvas.height = height;
- return canvas;
- }
-
- /**
- * Generate a 1D gaussian kernel with custom sigma
- * Tip: use kernelSize >= (5 * sigma), kernelSize odd
- * @param {number} sigma gaussian sigma
- * @param {number} [kernelSize] kernel size, odd number
- * @param {boolean} [normalized] normalize entries so that their sum is 1
- * @returns {number[]}
- */
- static gaussianKernel(sigma, kernelSize = 0, normalized = true) {
- /*
- * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
- *
- * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
- *
- * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
- *
- * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
- * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
- *
- * Setting a constant c := sqrt(2) * sigma, it follows that:
- *
- * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
- */
-
- // default kernel size
- if (kernelSize == 0) {
- kernelSize = Math.ceil(5.0 * sigma) | 0;
- kernelSize += 1 - kernelSize % 2;
- }
-
- // validate input
- kernelSize |= 0;
- if (kernelSize < 1 || kernelSize % 2 == 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);else if (sigma <= 0.0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
-
- // function erf(x) = -erf(-x) can be approximated numerically. See:
- // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
- const kernel = new Array(kernelSize);
-
- // set constants
- const N = kernelSize >> 1; // integer (floor, div 2)
- const c = +sigma * 1.4142135623730951; // sigma * sqrt(2)
- const m = 0.3275911;
- const a1 = 0.254829592;
- const a2 = -0.284496736;
- const a3 = 1.421413741;
- const a4 = -1.453152027;
- const a5 = 1.061405429;
-
- // compute the kernel
- let sum = 0.0;
- for (let j = 0; j < kernelSize; j++) {
- let xa = (j - N + 0.5) / c;
- let xb = (j - N - 0.5) / c;
- let sa = 1.0,
- sb = 1.0;
- if (xa < 0.0) {
- sa = -1.0;
- xa = -xa;
- }
- if (xb < 0.0) {
- sb = -1.0;
- xb = -xb;
- }
- const ta = 1.0 / (1.0 + m * xa);
- const tb = 1.0 / (1.0 + m * xb);
- const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
- const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
- const ya = 1.0 - pa * Math.exp(-xa * xa);
- const yb = 1.0 - pb * Math.exp(-xb * xb);
- const erfa = sa * ya;
- const erfb = sb * yb;
- const fp = (erfa - erfb) / (2.0 * c);
- kernel[j] = fp;
- sum += fp;
- }
-
- // normalize the kernel
- if (normalized) {
- for (let j = 0; j < kernelSize; j++) kernel[j] /= sum;
- }
-
- // done!
- return kernel;
- }
-
- /**
- * Generate a 2D kernel in column-major format using two separable 1D kernels
- * @param {number[]} ka 1D kernel
- * @param {number[]} [kb]
- * @returns {number[]}
- */
- static kernel2d(ka, kb = ka) {
- const ksize = ka.length;
- Utils.assert(ka.length == ka.length);
- Utils.assert(ksize >= 1 && ksize % 2 == 1);
-
- // compute the outer product ka x kb
- let kernel2d = new Array(ksize * ksize),
- k = 0;
- for (let col = 0; col < ksize; col++) {
- for (let row = 0; row < ksize; row++) kernel2d[k++] = ka[row] * kb[col];
- }
- return kernel2d;
- }
-
- /**
- * Cartesian product a x b: [ [ai, bj] for all i, j ]
- * @param {number[]} a
- * @param {number[]} b
- * @returns {Array<[number,number]>}
- */
- static cartesian(a, b) {
- return [].concat(...a.map(a => b.map(b => [a, b])));
- }
-
- /**
- * Symmetric range
- * @param {number} n non-negative integer
- * @returns {number[]} [ -n, ..., n ]
- */
- static symmetricRange(n) {
- if ((n |= 0) < 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
- return [...Array(2 * n + 1).keys()].map(x => x - n);
- }
-
- /**
- * Compute the [0, n) range of integers
- * @param {number} n positive integer
- * @returns {number[]} [ 0, 1, ..., n-1 ]
- */
- static range(n) {
- if ((n |= 0) <= 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
- return [...Array(n).keys()];
- }
-
- /**
- * Shuffle in-place
- * @template T
- * @param {T[]} arr
- * @returns {T[]} arr
- */
- static shuffle(arr) {
- const len = arr.length;
- const m = len - 1;
-
- // Fisher-Yattes
- for (let i = 0; i < m; i++) {
- const j = i + (Math.random() * (len - i) | 0); // i <= j < arr.length
-
- if (i !== j) {
- const t = arr[i];
- arr[i] = arr[j];
- arr[j] = t;
- }
- }
- return arr;
- }
-
- /**
- * Flatten an array (1 level only)
- * @template U
- * @param {U[]} array
- * @returns {U[]}
- */
- static flatten(array) {
- //return array.flat();
- //return array.reduce((arr, val) => arr.concat(val), []);
-
- const flat = [];
- for (let i = 0, n = array.length; i < n; i++) {
- const entry = array[i];
- if (Array.isArray(entry)) {
- for (let j = 0, m = entry.length; j < m; j++) flat.push(entry[j]);
- } else flat.push(entry);
- }
- return flat;
- }
-
- /**
- * Decode a 16-bit float from a
- * unsigned 16-bit integer
- * @param {number} uint16
- * @returns {number}
- */
- static decodeFloat16(uint16) {
- // decode according to sec 2.1.2
- // 16-Bit Floating Point Numbers
- // of the OpenGL ES 3 spec
- const s = (uint16 & 0xFFFF) >> 15; // sign bit
- const e = (uint16 & 0x7FFF) >> 10; // exponent
- const m = uint16 & 0x3FF; // mantissa
- const sign = 1 - 2 * s; // (-1)^s
-
- if (e == 0) return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
- else if (e == 31) return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
- const f = e >= 15 ? 1 << e - 15 : 1.0 / (1 << 15 - e); // 2^(e-15)
- return sign * f * (1.0 + m * 0.0009765625); // normal
- }
-
- /**
- * Wrapper around getUserMedia()
- * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
- * @returns {SpeedyPromise<HTMLVideoElement>}
- */
- static requestCameraStream(constraints = {
- audio: false,
- video: true
- }) {
- Utils.log('Accessing the webcam...');
- if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
- return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
- navigator.mediaDevices.getUserMedia(constraints).then(stream => {
- const video = document.createElement('video');
- video.onloadedmetadata = () => {
- video.play();
- Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
- resolve(video);
- };
- video.setAttribute('playsinline', '');
- video.setAttribute('autoplay', '');
- if (constraints.audio === false || constraints.audio === undefined) video.setAttribute('muted', '');
- video.srcObject = stream;
- }).catch(err => {
- if (err.name === 'NotAllowedError') {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(`Please give access to the camera and reload the page.`, err));
- } else if (err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(`Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`, err));
- } else {
- reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(`Can't access the webcam.`, err));
- }
- });
- });
- }
-
- /**
- * Format binary data as a string with hex values
- * @param {ArrayBuffer} bytes
- * @returns {string}
- */
- static formatBinaryData(bytes) {
- const uint8 = new Uint8Array(bytes);
- const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
- return array.join(' ');
- }
-
- /**
- * Returns a string containing platform brand information
- * @returns {string}
- */
- static platformString() {
- // navigator.userAgent is easily and often spoofed, and thus is unreliable
-
- // use the NavigatorUAData interface if available
- if (typeof navigator.userAgentData === 'object') {
- // use only low entropy data, so we don't need to ask the permission
- // of the user to read this string
- return navigator.userAgentData.platform;
- }
-
- // navigator.platform is deprecated. It can be spoofed on Firefox, but,
- // at the time of this writing, there is no alternative apparently.
- return navigator.platform;
- }
- }
-
- /***/ }),
-
- /***/ 5235:
- /***/ ((module, __unused_webpack_exports, __nested_webpack_require_136472__) => {
-
- var map = {
- "./colors.glsl": 8609,
- "./filters.glsl": 4672,
- "./fixed-point.glsl": 9778,
- "./float16.glsl": 8710,
- "./global.glsl": 2434,
- "./int32.glsl": 439,
- "./keypoint-descriptors.glsl": 8545,
- "./keypoint-matches.glsl": 6762,
- "./keypoints.glsl": 7639,
- "./math.glsl": 431,
- "./platform.glsl": 6822,
- "./pyramids.glsl": 2728,
- "./subpixel.glsl": 6823
- };
-
-
- function webpackContext(req) {
- var id = webpackContextResolve(req);
- return __nested_webpack_require_136472__(id);
- }
- function webpackContextResolve(req) {
- if(!__nested_webpack_require_136472__.o(map, req)) {
- var e = new Error("Cannot find module '" + req + "'");
- e.code = 'MODULE_NOT_FOUND';
- throw e;
- }
- return map[req];
- }
- webpackContext.keys = function webpackContextKeys() {
- return Object.keys(map);
- };
- webpackContext.resolve = webpackContextResolve;
- module.exports = webpackContext;
- webpackContext.id = 5235;
-
- /***/ }),
-
- /***/ 4606:
- /***/ ((module, __unused_webpack_exports, __nested_webpack_require_137422__) => {
-
- var map = {
- "./filters/convolution": 1672,
- "./filters/convolution.js": 1672,
- "./filters/convolution1d.glsl": 8211,
- "./filters/convolution2d.glsl": 7360,
- "./filters/fast-median.glsl": 8191,
- "./filters/nightvision.glsl": 4438,
- "./filters/normalize-image.glsl": 5867,
- "./filters/rgb2grey.glsl": 9252,
- "./include/colors.glsl": 8609,
- "./include/filters.glsl": 4672,
- "./include/fixed-point.glsl": 9778,
- "./include/float16.glsl": 8710,
- "./include/global.glsl": 2434,
- "./include/int32.glsl": 439,
- "./include/keypoint-descriptors.glsl": 8545,
- "./include/keypoint-matches.glsl": 6762,
- "./include/keypoints.glsl": 7639,
- "./include/math.glsl": 431,
- "./include/platform.glsl": 6822,
- "./include/pyramids.glsl": 2728,
- "./include/subpixel.glsl": 6823,
- "./keypoints/allocate-descriptors.glsl": 1341,
- "./keypoints/allocate-extra.glsl": 7833,
- "./keypoints/apply-homography.glsl": 2352,
- "./keypoints/bf-knn.glsl": 7541,
- "./keypoints/clip-border.glsl": 4868,
- "./keypoints/clip.glsl": 5591,
- "./keypoints/distance-filter.glsl": 191,
- "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
- "./keypoints/encode-keypoint-offsets.glsl": 336,
- "./keypoints/encode-keypoint-positions.glsl": 8968,
- "./keypoints/encode-keypoint-properties.glsl": 1733,
- "./keypoints/encode-keypoints.glsl": 9674,
- "./keypoints/encode-null-keypoints.glsl": 2090,
- "./keypoints/fast.glsl": 1855,
- "./keypoints/fast.vs.glsl": 4824,
- "./keypoints/hamming-distance-filter.glsl": 2381,
- "./keypoints/harris-cutoff.glsl": 6060,
- "./keypoints/harris.glsl": 9974,
- "./keypoints/knn-init.glsl": 3047,
- "./keypoints/knn-transfer.glsl": 3266,
- "./keypoints/laplacian.glsl": 8018,
- "./keypoints/lk.glsl": 3168,
- "./keypoints/lookup-of-locations.glsl": 3890,
- "./keypoints/lookup-of-locations.vs.glsl": 8647,
- "./keypoints/lsh-knn.glsl": 4776,
- "./keypoints/mix-keypoints.glsl": 2648,
- "./keypoints/nonmax-scale.glsl": 8825,
- "./keypoints/nonmax-space.glsl": 5693,
- "./keypoints/nonmax-suppression.glsl": 9280,
- "./keypoints/orb-descriptor.glsl": 9108,
- "./keypoints/orb-orientation.glsl": 7137,
- "./keypoints/refine-scale.glsl": 9739,
- "./keypoints/score-findmax.glsl": 8231,
- "./keypoints/shuffle.glsl": 2518,
- "./keypoints/sort-keypoints.glsl": 8096,
- "./keypoints/subpixel-refinement.glsl": 5795,
- "./keypoints/transfer-flow.glsl": 3169,
- "./keypoints/transfer-orientation.glsl": 1337,
- "./keypoints/transfer-to-extra.glsl": 6187,
- "./keypoints/upload-keypoints.glsl": 477,
- "./pyramids/downsample2.glsl": 4050,
- "./pyramids/upsample2.glsl": 5545,
- "./transforms/additive-mix.glsl": 7113,
- "./transforms/resize.glsl": 1202,
- "./transforms/warp-perspective.glsl": 7971,
- "./utils/copy-components.glsl": 6122,
- "./utils/copy-raster.glsl": 371,
- "./utils/copy.glsl": 7307,
- "./utils/fill-components.glsl": 8614,
- "./utils/fill.glsl": 6271,
- "./utils/flip-y.vs.glsl": 3016,
- "./utils/scan-minmax2d.glsl": 3630,
- "./utils/sobel-derivatives.glsl": 8508,
- "./utils/sobel-derivatives.vs.glsl": 8073
- };
-
-
- function webpackContext(req) {
- var id = webpackContextResolve(req);
- return __nested_webpack_require_137422__(id);
- }
- function webpackContextResolve(req) {
- if(!__nested_webpack_require_137422__.o(map, req)) {
- var e = new Error("Cannot find module '" + req + "'");
- e.code = 'MODULE_NOT_FOUND';
- throw e;
- }
- return map[req];
- }
- webpackContext.keys = function webpackContextKeys() {
- return Object.keys(map);
- };
- webpackContext.resolve = webpackContextResolve;
- module.exports = webpackContext;
- webpackContext.id = 4606;
-
- /***/ }),
-
- /***/ 8211:
- /***/ ((module) => {
-
- module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 7360:
- /***/ ((module) => {
-
- module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 8191:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 4438:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
-
- /***/ }),
-
- /***/ 5867:
- /***/ ((module) => {
-
- module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
-
- /***/ }),
-
- /***/ 9252:
- /***/ ((module) => {
-
- module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 8609:
- /***/ ((module) => {
-
- module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
-
- /***/ }),
-
- /***/ 4672:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
-
- /***/ }),
-
- /***/ 9778:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
-
- /***/ }),
-
- /***/ 8710:
- /***/ ((module) => {
-
- module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
-
- /***/ }),
-
- /***/ 2434:
- /***/ ((module) => {
-
- module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
-
- /***/ }),
-
- /***/ 439:
- /***/ ((module) => {
-
- module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
-
- /***/ }),
-
- /***/ 8545:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
-
- /***/ }),
-
- /***/ 6762:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
-
- /***/ }),
-
- /***/ 7639:
- /***/ ((module) => {
-
- module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
-
- /***/ }),
-
- /***/ 431:
- /***/ ((module) => {
-
- module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
-
- /***/ }),
-
- /***/ 6822:
- /***/ ((module) => {
-
- module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
-
- /***/ }),
-
- /***/ 2728:
- /***/ ((module) => {
-
- module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
-
- /***/ }),
-
- /***/ 6823:
- /***/ ((module) => {
-
- module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
-
- /***/ }),
-
- /***/ 1341:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 7833:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 2352:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
-
- /***/ }),
-
- /***/ 7541:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
-
- /***/ }),
-
- /***/ 4868:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
-
- /***/ }),
-
- /***/ 5591:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
-
- /***/ }),
-
- /***/ 191:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 5467:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
-
- /***/ }),
-
- /***/ 336:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
-
- /***/ }),
-
- /***/ 8968:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
-
- /***/ }),
-
- /***/ 1733:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
-
- /***/ }),
-
- /***/ 9674:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
-
- /***/ }),
-
- /***/ 2090:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
-
- /***/ }),
-
- /***/ 1855:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
-
- /***/ }),
-
- /***/ 4824:
- /***/ ((module) => {
-
- module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
-
- /***/ }),
-
- /***/ 2381:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 6060:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
-
- /***/ }),
-
- /***/ 9974:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
-
- /***/ }),
-
- /***/ 3047:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
-
- /***/ }),
-
- /***/ 3266:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
-
- /***/ }),
-
- /***/ 8018:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
-
- /***/ }),
-
- /***/ 3168:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
-
- /***/ }),
-
- /***/ 3890:
- /***/ ((module) => {
-
- module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
-
- /***/ }),
-
- /***/ 8647:
- /***/ ((module) => {
-
- module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
-
- /***/ }),
-
- /***/ 4776:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
-
- /***/ }),
-
- /***/ 2648:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
-
- /***/ }),
-
- /***/ 8825:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
-
- /***/ }),
-
- /***/ 5693:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
-
- /***/ }),
-
- /***/ 9280:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
-
- /***/ }),
-
- /***/ 9108:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
-
- /***/ }),
-
- /***/ 7137:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
-
- /***/ }),
-
- /***/ 9739:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
-
- /***/ }),
-
- /***/ 8231:
- /***/ ((module) => {
-
- module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
-
- /***/ }),
-
- /***/ 2518:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
-
- /***/ }),
-
- /***/ 8096:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
-
- /***/ }),
-
- /***/ 5795:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
-
- /***/ }),
-
- /***/ 3169:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
-
- /***/ }),
-
- /***/ 1337:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
-
- /***/ }),
-
- /***/ 6187:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
-
- /***/ }),
-
- /***/ 477:
- /***/ ((module) => {
-
- module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
-
- /***/ }),
-
- /***/ 4050:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
-
- /***/ }),
-
- /***/ 5545:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
-
- /***/ }),
-
- /***/ 7113:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
-
- /***/ }),
-
- /***/ 1202:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
-
- /***/ }),
-
- /***/ 7971:
- /***/ ((module) => {
-
- module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
-
- /***/ }),
-
- /***/ 6122:
- /***/ ((module) => {
-
- module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
-
- /***/ }),
-
- /***/ 371:
- /***/ ((module) => {
-
- module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
-
- /***/ }),
-
- /***/ 7307:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
-
- /***/ }),
-
- /***/ 8614:
- /***/ ((module) => {
-
- module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
-
- /***/ }),
-
- /***/ 6271:
- /***/ ((module) => {
-
- module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
-
- /***/ }),
-
- /***/ 3016:
- /***/ ((module) => {
-
- module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
-
- /***/ }),
-
- /***/ 3630:
- /***/ ((module) => {
-
- module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
-
- /***/ }),
-
- /***/ 8508:
- /***/ ((module) => {
-
- module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
-
- /***/ }),
-
- /***/ 8073:
- /***/ ((module) => {
-
- module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
-
- /***/ }),
-
- /***/ 3575:
- /***/ ((module) => {
-
- module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
- f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
- f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
- BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
- AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
- CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
- Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
- AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
- dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
- TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
- X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
- MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
- ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
- PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
- CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
- AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
- gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
- AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
- QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
- AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
- gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
- IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
- gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
- hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
- nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
- AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
- EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
- IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
- AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
- IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
- gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
- AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
- IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
- AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
- AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
- IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
- AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
- QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
- gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
- IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
- OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
- IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
- AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
- APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
- IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
- AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
- IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
- CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
- hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
- IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
- AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
- BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
- QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
- ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
- AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
- BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
- iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
- IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
- AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
- gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
- QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
- QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
- ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
- gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
- NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
- AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
- gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
- IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
- a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
- KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
- QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
- CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
- ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
- QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
- CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
- AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
- QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
- ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
- BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
- AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
- KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
- EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
- DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
- BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
- Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
- DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
- AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
- QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
- IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
- QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
- QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
- IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
- CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
- KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
- IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
- ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
- KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
- AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
- DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
- QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
- QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
- ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
- EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
- SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
- KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
- gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
- ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
- ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
- IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
- IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
- IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
- DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
- A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
- akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
- DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
- u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
- AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
- IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
- IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
- aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
- QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
- KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
- bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
- IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
- IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
- IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
- An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
- DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
- KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
- QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
- BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
- QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
- KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
- ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
- GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
- QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
- ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
- B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
- DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
- BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
- bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
- IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
- DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
- IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
- QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
- FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
- DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
- AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
- AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
- QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
- AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
- EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
- SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
- Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
- ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
- IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
- fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
- IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
- gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
- gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
- CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
- IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
- ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
- DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
- KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
- gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
- ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
- AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
- gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
- aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
- SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
- CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
- CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
- CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
- CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
- AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
- ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
- gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
- QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
- AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
- lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
- kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
- 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
- ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
- NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
- gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
- BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
- AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
- QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
- GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
- C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
- DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
- QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
- IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
- DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
- f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
- gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
- gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
- IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
- AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
- aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
- bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
- IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
- AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
- ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
- gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
- AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
- IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
- BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
- l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
- gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
- AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
- AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
- aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
- ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
- aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
- AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
- FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
- IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
- oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
- AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
- IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
- IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
- 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
- oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
- IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
- PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
- APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
- oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
- GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
- IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
- mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
- BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
- AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
- oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
- oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
- FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
- oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
- IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
- JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
- ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
- AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
- DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
- IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
- ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
- CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
- QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
- IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
- IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
- ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
- AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
- CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
- dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
- KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
- AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
- HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
- BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
- AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
- gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
- QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
- AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
- BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
- IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
- kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
- BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
- IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
- AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
- AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
- QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
- CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
- lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
- dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
- IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
- IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
- C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
- IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
- AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
- KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
- Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
- ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
- IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
- EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
- IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
- IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
- QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
- IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
- AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
- ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
- CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
- AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
- QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
- kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
- GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
- ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
- OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
- BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
- AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
- IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
- A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
- dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
- KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
- IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
- FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
- GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
- PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
- E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
- IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
- IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
- KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
- Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
- ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
- gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
- BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
- EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
- gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
- AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
- IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
- AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
- lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
- b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
- ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
- IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
- gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
- ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
- KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
- AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
- dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
- NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
- ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
- DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
- IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
- IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
- GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
- BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
- ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
- gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
- IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
- FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
- ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
- QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
- DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
- dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
- IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
- IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
- IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
- AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
- D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
- EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
- QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
- IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
- IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
- KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
- QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
- GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
- AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
- ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
- DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
- AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
- IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
- zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
- ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
- AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
- IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
- AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
- QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
- B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
- DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
- gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
- KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
- AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
- KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
- HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
- IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
- IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
- HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
- QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
- gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
- QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
- QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
- ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
- ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
- ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
- EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
- KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
- X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
- ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
- QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
- dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
- AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
- AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
- AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
- KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
- koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
- CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
- CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
- IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
- aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
- ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
- IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
- EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
- A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
- A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
- KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
- AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
- lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
- ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
- IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
- aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
- iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
- DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
- AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
- AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
- IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
- lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
- QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
- IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
- Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
- BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
- A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
- ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
- IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
- taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
- NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
- gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
- Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
- DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
- aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
- ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
- C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
- dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
- IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
- dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
- NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
- YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
- bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
- AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
- b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
- QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
- AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
- dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
- AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
- dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
- NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
- ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
- YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
- aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
- MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
- dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
- ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
- cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
- AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
- AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
- OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
- MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
- MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
- IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
- cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
- aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
- LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
- ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
- bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
- dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
- MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
- YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
- YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
- cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
- b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
- AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
- NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
- bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
- aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
- `
-
- /***/ })
-
- /******/ });
- /************************************************************************/
- /******/ // The module cache
- /******/ var __webpack_module_cache__ = {};
- /******/
- /******/ // The require function
- /******/ function __nested_webpack_require_314174__(moduleId) {
- /******/ // Check if module is in cache
- /******/ var cachedModule = __webpack_module_cache__[moduleId];
- /******/ if (cachedModule !== undefined) {
- /******/ return cachedModule.exports;
- /******/ }
- /******/ // Create a new module (and put it into the cache)
- /******/ var module = __webpack_module_cache__[moduleId] = {
- /******/ // no module.id needed
- /******/ // no module.loaded needed
- /******/ exports: {}
- /******/ };
- /******/
- /******/ // Execute the module function
- /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_314174__);
- /******/
- /******/ // Return the exports of the module
- /******/ return module.exports;
- /******/ }
- /******/
- /************************************************************************/
- /******/ /* webpack/runtime/define property getters */
- /******/ (() => {
- /******/ // define getter functions for harmony exports
- /******/ __nested_webpack_require_314174__.d = (exports, definition) => {
- /******/ for(var key in definition) {
- /******/ if(__nested_webpack_require_314174__.o(definition, key) && !__nested_webpack_require_314174__.o(exports, key)) {
- /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
- /******/ }
- /******/ }
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/hasOwnProperty shorthand */
- /******/ (() => {
- /******/ __nested_webpack_require_314174__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
- /******/ })();
- /******/
- /******/ /* webpack/runtime/make namespace object */
- /******/ (() => {
- /******/ // define __esModule on exports
- /******/ __nested_webpack_require_314174__.r = (exports) => {
- /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
- /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
- /******/ }
- /******/ Object.defineProperty(exports, '__esModule', { value: true });
- /******/ };
- /******/ })();
- /******/
- /************************************************************************/
- var __nested_webpack_exports__ = {};
- // This entry need to be wrapped in an IIFE because it need to be in strict mode.
- (() => {
- "use strict";
-
- // EXPORTS
- __nested_webpack_require_314174__.d(__nested_webpack_exports__, {
- "default": () => (/* binding */ Speedy)
- });
-
- // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
- var speedy_gl = __nested_webpack_require_314174__(1001);
- // EXTERNAL MODULE: ./src/utils/utils.js
- var utils = __nested_webpack_require_314174__(9037);
- // EXTERNAL MODULE: ./src/core/settings.js
- var settings = __nested_webpack_require_314174__(2199);
- // EXTERNAL MODULE: ./src/core/speedy-promise.js
- var speedy_promise = __nested_webpack_require_314174__(9192);
- ;// CONCATENATED MODULE: ./src/utils/asap.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * asap.js
- * Schedule a function to run "as soon as possible"
- */
-
- /** callbacks */
- const callbacks = /** @type {Function[]} */[];
-
- /** arguments to be passed to the callbacks */
- const args = /** @type {any[][]} */[];
-
- /** asap key */
- const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
-
- // Register an event listener
- window.addEventListener('message', event => {
- if (event.source !== window || event.data !== ASAP_KEY) return;
- event.stopPropagation();
- if (callbacks.length == 0) return;
- const fn = callbacks.pop();
- const argArray = args.pop();
- fn.apply(undefined, argArray);
- }, true);
-
- /**
- * Schedule a function to run "as soon as possible"
- * @param {Function} fn callback
- * @param {any[]} params optional parameters
- */
- function asap(fn, ...params) {
- callbacks.unshift(fn);
- args.unshift(params);
- window.postMessage(ASAP_KEY, '*');
- }
- // EXTERNAL MODULE: ./src/utils/errors.js
- var utils_errors = __nested_webpack_require_314174__(8581);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture-reader.js
- * Reads data from textures
- */
-
-
-
-
-
-
-
-
-
- /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
- const DEFAULT_NUMBER_OF_BUFFERS = 2;
-
- /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
- const runOnNextFrame = navigator.userAgent.includes('Firefox') ? (fn, ...args) => setTimeout(fn, 10, ...args) :
- // RAF produces a warning on Firefox
- (fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args)); // reduce battery usage
-
- /**
- * Reads data from textures
- */
- class SpeedyTextureReader {
- /**
- * Constructor
- * @param {number} [numberOfBuffers]
- */
- constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS) {
- utils/* Utils */.A.assert(numberOfBuffers > 0);
-
- /** @type {boolean} is this object initialized? */
- this._initialized = false;
-
- /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
- this._pixelBuffer = new Array(numberOfBuffers).fill(null).map(() => new Uint8Array(0));
-
- /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
- this._pbo = new Array(numberOfBuffers).fill(null);
-
- /** @type {number} the index of the buffer that will be consumed in this frame */
- this._consumerIndex = 0;
-
- /** @type {number} the index of the buffer that will be produced next */
- this._producerIndex = numberOfBuffers - 1;
-
- /** @type {SpeedyPromise<void>[]} producer-consumer promises */
- this._promise = Array.from({
- length: numberOfBuffers
- }, () => speedy_promise/* SpeedyPromise */.i.resolve());
-
- /** @type {boolean[]} are the contents of the ith buffer being produced? */
- this._busy = new Array(numberOfBuffers).fill(false);
-
- /** @type {boolean[]} can the ith buffer be consumed? */
- this._ready = new Array(numberOfBuffers).fill(true);
- }
-
- /**
- * Initialize this object
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- this._allocatePBOs(gpu);
- gpu.subscribe(this._allocatePBOs, this, gpu);
- this._initialized = true;
- }
-
- /**
- * Release resources
- * @param {SpeedyGPU} gpu
- * @returns {null}
- */
- release(gpu) {
- gpu.unsubscribe(this._allocatePBOs, this);
- this._deallocatePBOs(gpu);
- this._initialized = false;
- return null;
- }
-
- /**
- * Read pixels from a texture, synchronously.
- * You may optionally specify a (x,y,width,height) sub-rectangle.
- * @param {SpeedyDrawableTexture} texture a texture with a FBO
- * @param {number} [x]
- * @param {number} [y]
- * @param {number} [width]
- * @param {number} [height]
- * @returns {Uint8Array} pixels in the RGBA format
- */
- readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height) {
- utils/* Utils */.A.assert(this._initialized);
- const gl = texture.gl;
- const fbo = texture.glFbo;
-
- // clamp values
- width = Math.max(0, Math.min(width, texture.width));
- height = Math.max(0, Math.min(height, texture.height));
- x = Math.max(0, Math.min(x, texture.width - width));
- y = Math.max(0, Math.min(y, texture.height - height));
-
- // buffer allocation
- const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
- this._reallocate(sizeofBuffer);
-
- // lost context?
- if (gl.isContextLost()) return this._pixelBuffer[0].subarray(0, sizeofBuffer);
-
- // read pixels
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // done!
- return this._pixelBuffer[0].subarray(0, sizeofBuffer);
- }
-
- /**
- * Read pixels from a texture, asynchronously, with PBOs.
- * You may optionally specify a (x,y,width,height) sub-rectangle.
- * @param {SpeedyDrawableTexture} texture a texture with a FBO
- * @param {number} [x]
- * @param {number} [y]
- * @param {number} [width]
- * @param {number} [height]
- * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
- * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
- */
- readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false) {
- utils/* Utils */.A.assert(this._initialized);
- const gl = texture.gl;
- const fbo = texture.glFbo;
-
- // clamp values
- width = Math.max(0, Math.min(width, texture.width));
- height = Math.max(0, Math.min(height, texture.height));
- x = Math.max(0, Math.min(x, texture.width - width));
- y = Math.max(0, Math.min(y, texture.height - height));
-
- // buffer allocation
- const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
- this._reallocate(sizeofBuffer);
-
- // lost context?
- if (gl.isContextLost()) return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
-
- // do not optimize?
- if (!useBufferedDownloads) {
- const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
- return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() => pixelBuffer);
- }
-
- // Hide latency with a Producer-Consumer mechanism
- const numberOfBuffers = this._pixelBuffer.length;
-
- // GPU needs to produce data
- const producerIndex = this._producerIndex;
- if (!this._busy[producerIndex]) {
- const pbo = this._pbo[producerIndex];
- const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
- this._producerIndex = (producerIndex + 1) % numberOfBuffers;
- this._ready[producerIndex] = false;
- this._busy[producerIndex] = true;
- //console.time("produce "+producerIndex);
- this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
- //console.timeEnd("produce "+producerIndex);
- this._busy[producerIndex] = false;
- this._ready[producerIndex] = true;
- });
- }
- //else console.log("skip",producerIndex);
- else /* skip frame */;
-
- // CPU needs to consume data
- const consumerIndex = this._consumerIndex;
- this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
- if (!this._ready[consumerIndex]) {
- //console.time("consume "+consumerIndex);
- return this._promise[consumerIndex].then(() => {
- //console.timeEnd("consume "+consumerIndex);
- this._ready[consumerIndex] = false;
- return this._pixelBuffer[consumerIndex];
- });
- }
-
- //console.log("NO WAIT "+consumerIndex);
- this._ready[consumerIndex] = false;
- return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
- }
-
- /**
- * Reallocate the pixel buffers, so that they can hold the required number of bytes
- * If the pixel buffers already have the required capacity, then nothing is done
- * @param {number} size in bytes
- */
- _reallocate(size) {
- // no need to reallocate
- if (size <= this._pixelBuffer[0].byteLength) return;
-
- // reallocate
- for (let i = 0; i < this._pixelBuffer.length; i++) {
- const newBuffer = new Uint8Array(size);
- //newBuffer.set(this._pixelBuffer[i]); // make this optional?
- this._pixelBuffer[i] = newBuffer;
- }
- }
-
- /**
- * Allocate PBOs
- * @param {SpeedyGPU} gpu
- */
- _allocatePBOs(gpu) {
- const gl = gpu.gl;
- for (let i = 0; i < this._pbo.length; i++) this._pbo[i] = gl.createBuffer();
- }
-
- /**
- * Deallocate PBOs
- * @param {SpeedyGPU} gpu
- */
- _deallocatePBOs(gpu) {
- const gl = gpu.gl;
- for (let i = this._pbo.length - 1; i >= 0; i--) {
- gl.deleteBuffer(this._pbo[i]);
- this._pbo[i] = null;
- }
- }
-
- /**
- * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
- * It's assumed that the target texture is in the RGBA8 format
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLBuffer} pbo
- * @param {Uint8Array} outputBuffer with size >= width * height * 4
- * @param {WebGLFramebuffer} fbo
- * @param {GLint} x
- * @param {GLint} y
- * @param {GLsizei} width
- * @param {GLsizei} height
- * @returns {SpeedyPromise<void>}
- */
- static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height) {
- /*
- When testing Speedy on Chrome (mobile) using about:tracing with the
- --enable-gpu-service-tracing flag, I found that A LOT of time is spent
- in TraceGLAPI::glMapBufferRange, which takes place just after
- GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
- Using multiple PBOs doesn't seem to impact Chrome too much. Performance
- is much better on Firefox. This suggests there is room for improvement.
- I do not yet understand clearly the cause for this lag on Chrome. It
- may be a CPU-GPU synchronization issue.
- EDIT: I have found that using gl.flush() aggressively greatly improves
- things. WebGL commands will be pushed frequently!
- See also:
- https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
- https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
- */
- const size = width * height * 4;
-
- // validate outputBuffer
- utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
-
- // read pixels into the PBO
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
- gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
-
- // create a fence
- const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
- gl.flush(); // make sure the sync command is read
-
- // wait for the commands to be processed by the GPU
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- // according to the WebGL2 spec sec 3.7.14 Sync objects,
- // "sync objects may only transition to the signaled state
- // when the user agent's event loop is not executing a task"
- // in other words, it won't be signaled in the same frame
- if (settings/* Settings */.w.gpuPollingMode != 'asap') runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);else asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
- }).then(() => {
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
- gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
- gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
- }).catch(err => {
- throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
- }).finally(() => {
- gl.deleteSync(sync);
- });
- }
-
- /**
- * Waits for a sync object to become signaled
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLSync} sync
- * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
- * @param {Function} resolve
- * @param {Function} reject
- * @param {number} [pollInterval] in milliseconds
- * @param {number} [remainingAttempts] for timeout
- */
- static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000) {
- (function poll() {
- const status = gl.clientWaitSync(sync, flags, 0);
- if (remainingAttempts-- <= 0) {
- reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
- } else if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
- resolve();
- } else {
- //setTimeout(poll, pollInterval);
- if (settings/* Settings */.w.gpuPollingMode != 'asap') requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
- else asap(poll);
- }
- })();
- }
- }
- // EXTERNAL MODULE: ./src/utils/globals.js
- var globals = __nested_webpack_require_314174__(3816);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture.js
- * A wrapper around WebGLTexture
- */
-
-
-
-
-
-
-
- /**
- * Get a buffer filled with zeros
- * @param {number} size number of bytes
- * @returns {Uint8Array}
- */
- /*
- const zeros = (function() {
- let buffer = new Uint8Array(4);
-
- return function(size) {
- if(size > buffer.length)
- buffer = new Uint8Array(size);
-
- return buffer.subarray(0, size);
- }
- })();
- */
-
- /**
- * A wrapper around WebGLTexture
- */
- class SpeedyTexture {
- /**
- * Constructor
- * @param {WebGL2RenderingContext} gl
- * @param {number} width texture width in pixels
- * @param {number} height texture height in pixels
- * @param {number} [format]
- * @param {number} [internalFormat]
- * @param {number} [dataType]
- * @param {number} [filter]
- * @param {number} [wrap]
- */
- constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT) {
- /** @type {WebGL2RenderingContext} rendering context */
- this._gl = gl;
-
- /** @type {number} width of the texture */
- this._width = Math.max(1, width | 0);
-
- /** @type {number} height of the texture */
- this._height = Math.max(1, height | 0);
-
- /** @type {boolean} have we generated mipmaps for this texture? */
- this._hasMipmaps = false;
-
- /** @type {number} texture format */
- this._format = format;
-
- /** @type {number} internal format (usually a sized format) */
- this._internalFormat = internalFormat;
-
- /** @type {number} data type */
- this._dataType = dataType;
-
- /** @type {number} texture filtering (min & mag) */
- this._filter = filter;
-
- /** @type {number} texture wrapping */
- this._wrap = wrap;
-
- /** @type {WebGLTexture} internal texture object */
- this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
- }
-
- /**
- * Releases the texture
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // already released?
- if (this._glTexture == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
-
- // release resources
- this.discardMipmaps();
- gl.deleteTexture(this._glTexture);
- this._glTexture = null;
- this._width = this._height = 0;
-
- // done!
- return null;
- }
-
- /**
- * Upload pixel data to the texture. The texture will be resized if needed.
- * @param {TexImageSource} data
- * @param {number} [width] in pixels
- * @param {number} [height] in pixels
- * @return {SpeedyTexture} this
- */
- upload(data, width = this._width, height = this._height) {
- const gl = this._gl;
-
- // bugfix: if the media is a video, we can't really
- // upload it to the GPU unless it's ready
- if (data instanceof HTMLVideoElement) {
- if (data.readyState < 2) {
- // this may happen when the video loops (Firefox)
- // keep the previously uploaded texture
- //Utils.warning(`Trying to process a video that isn't ready yet`);
- return this;
- }
- }
- utils/* Utils */.A.assert(width > 0 && height > 0);
- this.discardMipmaps();
- this._width = width;
- this._height = height;
- this._internalFormat = gl.RGBA8;
- this._format = gl.RGBA;
- this._dataType = gl.UNSIGNED_BYTE;
- SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
- return this;
- }
-
- /**
- * Clear the texture
- * @returns {this}
- */
- clear() {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // clear texture data
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // no mipmaps
- this.discardMipmaps();
-
- // done!
- return this;
- }
-
- /**
- * Resize this texture. Its content will be lost!
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @returns {this}
- */
- resize(width, height) {
- const gl = this._gl;
-
- // no need to resize?
- if (this._width === width && this._height === height) return this;
-
- // validate size
- width |= 0;
- height |= 0;
- if (width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH) throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);else if (width < 1 || height < 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // update dimensions
- this._width = width;
- this._height = height;
-
- // resize
- // Note: this is fast on Chrome, but seems slow on Firefox
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // no mipmaps
- this.discardMipmaps();
-
- // done!
- return this;
- }
-
- /**
- * Generate mipmap
- * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
- * @returns {SpeedyTexture} this
- */
- generateMipmaps(mipmap = []) {
- const gl = this._gl;
-
- // nothing to do
- if (this._hasMipmaps) return this;
-
- // let the hardware compute the all levels of the pyramid, up to 1x1
- // we also specify the TEXTURE_MIN_FILTER to be used from now on
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
- gl.generateMipmap(gl.TEXTURE_2D);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // accept custom textures
- if (mipmap.length > 0) {
- // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
- const width = this.width,
- height = this.height;
- const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
- utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
-
- // verify the dimensions of each level
- for (let level = 1; level < mipmap.length; level++) {
- // use max(1, floor(size / 2^lod)), in accordance to
- // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
- const w = Math.max(1, width >>> level);
- const h = Math.max(1, height >>> level);
-
- // verify the dimensions of this level
- utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
-
- // copy to mipmap
- mipmap[level].copyTo(this, level);
- }
- }
-
- // done!
- this._hasMipmaps = true;
- return this;
- }
-
- /**
- * Invalidates previously generated mipmap, if any
- */
- discardMipmaps() {
- const gl = this._gl;
-
- // nothing to do
- if (!this._hasMipmaps) return;
-
- // reset the min filter
- gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
- gl.bindTexture(gl.TEXTURE_2D, null);
-
- // done!
- this._hasMipmaps = false;
- }
-
- /**
- * Does this texture have a mipmap?
- * @returns {boolean}
- */
- hasMipmaps() {
- return this._hasMipmaps;
- }
-
- /**
- * Has this texture been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._glTexture == null;
- }
-
- /**
- * The internal WebGLTexture
- * @returns {WebGLTexture}
- */
- get glTexture() {
- return this._glTexture;
- }
-
- /**
- * The width of the texture, in pixels
- * @returns {number}
- */
- get width() {
- return this._width;
- }
-
- /**
- * The height of the texture, in pixels
- * @returns {number}
- */
- get height() {
- return this._height;
- }
-
- /**
- * The WebGL Context
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._gl;
- }
-
- /**
- * Create a WebGL texture
- * @param {WebGL2RenderingContext} gl
- * @param {number} width in pixels
- * @param {number} height in pixels
- * @param {number} format usually gl.RGBA
- * @param {number} internalFormat usually gl.RGBA8
- * @param {number} dataType usually gl.UNSIGNED_BYTE
- * @param {number} filter usually gl.NEAREST or gl.LINEAR
- * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
- * @returns {WebGLTexture}
- */
- static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap) {
- utils/* Utils */.A.assert(width > 0 && height > 0);
-
- // create & bind texture
- const texture = gl.createTexture();
- gl.bindTexture(gl.TEXTURE_2D, texture);
-
- // setup
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
- //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
- gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
-
- // unbind & return
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
-
- /**
- * Upload pixel data to a WebGL texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLTexture} texture
- * @param {GLsizei} width texture width
- * @param {GLsizei} height texture height
- * @param {TexImageSource} pixels
- * @param {GLint} lod mipmap level-of-detail
- * @param {number} format
- * @param {number} internalFormat
- * @param {number} dataType
- * @returns {WebGLTexture} texture
- */
- static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType) {
- // Prefer calling _upload() before gl.useProgram() to avoid the
- // needless switching of GL programs internally. See also:
- // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
- gl.bindTexture(gl.TEXTURE_2D, texture);
-
- /*
- // slower than texImage2D, unlike the spec?
- gl.texSubImage2D(gl.TEXTURE_2D, // target
- lod, // mip level
- 0, // x-offset
- 0, // y-offset
- width, // texture width
- height, // texture height
- gl.RGBA, // source format
- gl.UNSIGNED_BYTE, // source type
- pixels); // source data
- */
-
- gl.texImage2D(gl.TEXTURE_2D,
- // target
- lod,
- // mip level
- internalFormat,
- // internal format
- width,
- // texture width
- height,
- // texture height
- 0,
- // border
- format,
- // source format
- dataType,
- // source type
- pixels); // source data
-
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
- }
-
- /**
- * A SpeedyTexture with a framebuffer
- */
- class SpeedyDrawableTexture extends SpeedyTexture {
- /**
- * Constructor
- * @param {WebGL2RenderingContext} gl
- * @param {number} width texture width in pixels
- * @param {number} height texture height in pixels
- * @param {number} [format]
- * @param {number} [internalFormat]
- * @param {number} [dataType]
- * @param {number} [filter]
- * @param {number} [wrap]
- */
- constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined) {
- super(gl, width, height, format, internalFormat, dataType, filter, wrap);
-
- /** @type {WebGLFramebuffer} framebuffer */
- this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
- }
-
- /**
- * Releases the texture
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // already released?
- if (this._glFbo == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
-
- // release the framebuffer
- gl.deleteFramebuffer(this._glFbo);
- this._glFbo = null;
-
- // release the SpeedyTexture
- return super.release();
- }
-
- /**
- * The internal WebGLFramebuffer
- * @returns {WebGLFramebuffer}
- */
- get glFbo() {
- return this._glFbo;
- }
-
- /**
- * Copy this texture into another
- * (you may have to discard the mipmaps after calling this function)
- * @param {SpeedyTexture} texture target texture
- * @param {number} [lod] level-of-detail of the target texture
- */
- copyTo(texture, lod = 0) {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return;
-
- // compute texture size as max(1, floor(size / 2^lod)),
- // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
- // (Mipmapping)
- const pot = 1 << (lod |= 0);
- const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
- const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
-
- // validate
- utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
-
- // copy to texture
- SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
- }
-
- /*
- * Resize this texture
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
- * @returns {this}
- */
- /*resize(width, height, preserveContent = false)
- {
- const gl = this._gl;
- // no need to preserve the content?
- if(!preserveContent)
- return super.resize(width, height);
- // no need to resize?
- if(this._width === width && this._height === height)
- return this;
- // validate size
- width |= 0; height |= 0;
- Utils.assert(width > 0 && height > 0);
- // context loss?
- if(gl.isContextLost())
- return this;
- // allocate new texture
- const newTexture = SpeedyTexture._createTexture(gl, width, height);
- // initialize the new texture with zeros to avoid a
- // warning when calling copyTexSubImage2D() on Firefox
- // this may not be very efficient?
- SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
- // copy the old texture to the new one
- const oldWidth = this._width, oldHeight = this._height;
- SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
- // bind FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
- // invalidate old data (is this needed?)
- gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
- // attach the new texture to the existing framebuffer
- gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
- gl.COLOR_ATTACHMENT0, // color buffer
- gl.TEXTURE_2D, // tex target
- newTexture, // texture
- 0); // mipmap level
- // unbind FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- // release the old texture and replace it
- gl.deleteTexture(this._glTexture);
- this._glTexture = newTexture;
- // update dimensions & discard mipmaps
- this.discardMipmaps();
- this._width = width;
- this._height = height;
- // done!
- return this;
- }
- */
-
- /**
- * Clear the texture
- * @returns {this}
- */
- clear() {
- //
- // When we pass null to texImage2D(), it seems that Firefox
- // doesn't clear the texture. Instead, it displays this warning:
- //
- // "WebGL warning: drawArraysInstanced:
- // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
- //
- // Here is a workaround:
- //
- return this.clearToColor(0, 0, 0, 0);
- }
-
- /**
- * Clear the texture to a color
- * @param {number} r red component, a value in [0,1]
- * @param {number} g green component, a value in [0,1]
- * @param {number} b blue component, a value in [0,1]
- * @param {number} a alpha component, a value in [0,1]
- * @returns {this}
- */
- clearToColor(r, g, b, a) {
- const gl = this._gl;
-
- // context loss?
- if (gl.isContextLost()) return this;
-
- // clamp parameters
- r = Math.max(0.0, Math.min(+r, 1.0));
- g = Math.max(0.0, Math.min(+g, 1.0));
- b = Math.max(0.0, Math.min(+b, 1.0));
- a = Math.max(0.0, Math.min(+a, 1.0));
-
- // discard mipmaps, if any
- this.discardMipmaps();
-
- // clear the texture
- gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
- gl.viewport(0, 0, this._width, this._height);
- gl.clearColor(r, g, b, a);
- gl.clear(gl.COLOR_BUFFER_BIT);
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // done!
- return this;
- }
-
- /**
- * Inspect the pixels of the texture for debugging purposes
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTextureReader} [textureReader] optional texture reader
- * @returns {Uint8Array}
- */
- inspect(gpu, textureReader) {
- if (textureReader === undefined) {
- textureReader = new SpeedyTextureReader();
- textureReader.init(gpu);
- const pixels = textureReader.readPixelsSync(this);
- textureReader.release(gpu);
- return new Uint8Array(pixels); // copy the array
- } else {
- const pixels = textureReader.readPixelsSync(this);
- return new Uint8Array(pixels);
- }
- }
-
- /**
- * Inspect the pixels of the texture as unsigned 32-bit integers
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTextureReader} [textureReader] optional texture reader
- * @returns {Uint32Array}
- */
- inspect32(gpu, textureReader) {
- utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
- return new Uint32Array(this.inspect(gpu, textureReader).buffer);
- }
-
- /**
- * Create a FBO associated with an existing texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLTexture} texture
- * @returns {WebGLFramebuffer}
- */
- static _createFramebuffer(gl, texture) {
- const fbo = gl.createFramebuffer();
-
- // setup framebuffer
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.framebufferTexture2D(gl.FRAMEBUFFER,
- // target
- gl.COLOR_ATTACHMENT0,
- // color buffer
- gl.TEXTURE_2D,
- // tex target
- texture,
- // texture
- 0); // mipmap level
-
- // check for errors
- const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
- if (status != gl.FRAMEBUFFER_COMPLETE) {
- const error = (() => ['FRAMEBUFFER_UNSUPPORTED', 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS', 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'].filter(err => gl[err] === status)[0] || 'unknown error')();
- throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
- }
-
- // unbind & return
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- return fbo;
- }
-
- /**
- * Copy data from a framebuffer to a texture
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLFramebuffer} fbo we'll read the data from this
- * @param {WebGLTexture} texture destination texture
- * @param {GLint} x xpos (where to start copying)
- * @param {GLint} y ypos (where to start copying)
- * @param {GLsizei} width width of the texture
- * @param {GLsizei} height height of the texture
- * @param {GLint} [lod] mipmap level-of-detail
- * @returns {WebGLTexture} texture
- */
- static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0) {
- //gl.activeTexture(gl.TEXTURE0);
- gl.bindTexture(gl.TEXTURE_2D, texture);
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
- gl.copyTexSubImage2D(gl.TEXTURE_2D,
- // target
- lod,
- // mipmap level
- 0,
- // xoffset
- 0,
- // yoffset
- x,
- // xpos (where to start copying)
- y,
- // ypos (where to start copying)
- width,
- // width of the texture
- height // height of the texture
- );
-
- /*
- gl.copyTexImage2D(
- gl.TEXTURE_2D, // target
- lod, // mipmap level
- gl.RGBA, // internal format
- x, // xpos (where to start copying)
- y, // ypos (where to start copying)
- width, // width of the texture
- height, // height of the texture
- 0 // border
- );
- */
-
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
- gl.bindTexture(gl.TEXTURE_2D, null);
- return texture;
- }
- }
- // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
- var shader_declaration = __nested_webpack_require_314174__(9420);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program.js
- * SpeedyProgram class
- */
-
-
-
-
-
-
-
- /** @const {Object<string,string>} Map uniform type to a gl function */
- const UNIFORM_SETTERS = Object.freeze({
- 'sampler2D': 'uniform1i',
- 'isampler2D': 'uniform1i',
- 'usampler2D': 'uniform1i',
- 'float': 'uniform1f',
- 'int': 'uniform1i',
- 'uint': 'uniform1ui',
- 'bool': 'uniform1i',
- 'vec2': 'uniform2f',
- 'vec3': 'uniform3f',
- 'vec4': 'uniform4f',
- 'ivec2': 'uniform2i',
- 'ivec3': 'uniform3i',
- 'ivec4': 'uniform4i',
- 'uvec2': 'uniform2ui',
- 'uvec3': 'uniform3ui',
- 'uvec4': 'uniform4ui',
- 'bvec2': 'uniform2i',
- 'bvec3': 'uniform3i',
- 'bvec4': 'uniform4i',
- 'mat2': 'uniformMatrix2fv',
- 'mat3': 'uniformMatrix3fv',
- 'mat4': 'uniformMatrix4fv'
- });
-
- /**
- * @typedef {object} SpeedyProgramOptions
- * @property {boolean} [renderToTexture] render results to a texture?
- * @property {boolean} [pingpong] alternate output texture between calls
- */
-
- /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
-
- /**
- * A SpeedyProgram is a Function that runs GLSL code
- */
- class SpeedyProgram extends Function {
- /**
- * Creates a new SpeedyProgram
- * @param {WebGL2RenderingContext} gl WebGL context
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} [options] user options
- */
- constructor(gl, shaderdecl, options = {}) {
- super('...args', 'return this._self._call(...args)');
-
- /** @type {SpeedyProgram} this function bound to this function! */
- this._self = this.bind(this);
- this._self._init(gl, shaderdecl, options);
- return this._self;
- }
-
- /**
- * Initialize the SpeedyProgram
- * @param {WebGL2RenderingContext} gl WebGL context
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} options user options
- */
- _init(gl, shaderdecl, options) {
- // not a valid context?
- if (gl.isContextLost()) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
-
- // options object
- options = Object.assign({
- // default options
- renderToTexture: true,
- pingpong: false
- }, options);
-
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- /** @type {WebGLProgram} vertex shader + fragment shader */
- this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
-
- /** @type {ProgramGeometry} this is a quad */
- this._geometry = new ProgramGeometry(gl, {
- position: shaderdecl.locationOfAttributes.position,
- texCoord: shaderdecl.locationOfAttributes.texCoord
- });
-
- /** @type {string[]} names of the arguments of the SpeedyProgram */
- this._argnames = shaderdecl.arguments;
-
- /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
- this._argIsArray = new Array(this._argnames.length).fill(false);
-
- /** @type {UBOHelper} UBO helper (lazy instantiation) */
- this._ubo = null;
-
- /** @type {boolean} should we render to a texture? If false, we render to the canvas */
- this._renderToTexture = Boolean(options.renderToTexture);
-
- /** @type {number} width of the output */
- this._width = 1;
-
- /** @type {number} height of the output */
- this._height = 1;
-
- /** @type {[number,number]} cached object that stores the size of the output */
- this._size = [1, 1];
-
- /** @type {SpeedyDrawableTexture[]} output texture(s) */
- this._texture = new Array(options.pingpong ? 2 : 1).fill(null);
-
- /** @type {number} used for pingpong rendering */
- this._textureIndex = 0;
-
- /** @type {Map<string,UniformVariable>} uniform variables */
- this._uniform = new Map();
-
- /** @type {ShaderDeclaration} shader declaration */
- this._shaderdecl = shaderdecl;
-
- // autodetect uniforms
- gl.useProgram(this._program);
- for (const name of shaderdecl.uniforms) {
- const type = shaderdecl.uniformType(name);
- const location = gl.getUniformLocation(this._program, name);
- this._uniform.set(name, new UniformVariable(type, location));
- }
-
- // match arguments & uniforms
- for (let j = 0; j < this._argnames.length; j++) {
- const argname = this._argnames[j];
- if (!this._uniform.has(argname)) {
- this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
- if (!this._argIsArray[j]) throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
- }
- }
- }
-
- /**
- * Run the SpeedyProgram
- * @param {...SpeedyProgramUniformValue} args
- * @returns {SpeedyDrawableTexture}
- */
- _call(...args) {
- const gl = this._gl;
- const argnames = this._argnames;
- const texture = this._texture[this._textureIndex];
-
- // matching arguments?
- if (args.length != argnames.length) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
-
- // can't use the output texture as an input
- /*
- // slower method
- const flatArgs = Utils.flatten(args);
- for(let j = flatArgs.length - 1; j >= 0; j--) {
- if(flatArgs[j] === this._texture[this._textureIndex])
- throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
- }
- */
- for (let j = args.length - 1; j >= 0; j--) {
- if (args[j] === texture) throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
- // else if(Array.isArray(args[j])) ...
- // we don't support passing arrays of textures at the time of this writing
- }
-
- // context loss?
- if (gl.isContextLost()) return texture;
-
- // use program
- gl.useProgram(this._program);
-
- // bind the VAO
- gl.bindVertexArray(this._geometry.vao);
-
- // select the render target
- const fbo = this._renderToTexture ? texture.glFbo : null;
-
- // update texSize uniform (available in all fragment shaders)
- const texSize = this._uniform.get('texSize');
- this._size[0] = this._width;
- this._size[1] = this._height;
- texSize.setValue(gl, this._size);
-
- // set uniforms[i] to args[i]
- for (let i = 0, texNo = 0; i < args.length; i++) {
- const argname = argnames[i];
- if (!this._argIsArray[i]) {
- // uniform variable matches argument name
- const uniform = this._uniform.get(argname);
- texNo = uniform.setValue(gl, args[i], texNo);
- } else {
- // uniform array matches argument name
- const array = args[i];
- if (Array.isArray(array)) {
- if (this._uniform.has(indexedVariable(argname, array.length))) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
- for (let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++) texNo = uniform.setValue(gl, array[j], texNo);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
- }
- }
-
- // set Uniform Buffer Objects (if any)
- if (this._ubo !== null) this._ubo.update();
-
- // bind the FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
-
- // draw call
- gl.viewport(0, 0, this._width, this._height);
- gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
-
- // unbind the FBO
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
-
- // unbind the VAO
- gl.bindVertexArray(null);
-
- // we've just changed the texture! discard the pyramid, if any
- if (texture != null) texture.discardMipmaps();
-
- // ping-pong rendering
- this._pingpong();
-
- // done!
- return texture;
- }
-
- /**
- * Set the output texture(s) and its (their) shape(s)
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @param {...SpeedyDrawableTexture|null} texture output texture(s)
- * @returns {SpeedyProgram} this
- */
- outputs(width, height, ...texture) {
- this._setOutputTexture(...texture);
- this._setOutputSize(width, height);
- return this;
- }
-
- /**
- * Set the size of the output
- * @param {number} width new width, in pixels
- * @param {number} height new height, in pixels
- * @returns {SpeedyProgram} this
- */
- _setOutputSize(width, height) {
- utils/* Utils */.A.assert(width > 0 && height > 0);
-
- // update output size
- this._width = width | 0;
- this._height = height | 0;
-
- // resize the output texture(s)
- for (let i = 0; i < this._texture.length; i++) {
- if (this._texture[i] != null) this._texture[i].resize(this._width, this._height);
- }
-
- // done!
- return this;
- }
-
- /**
- * Use the provided texture(s) as output
- * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
- * @returns {SpeedyProgram} this
- */
- _setOutputTexture(...texture) {
- utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
-
- // update output texture(s)
- for (let i = 0; i < this._texture.length; i++) this._texture[i] = texture[i];
- this._textureIndex = 0;
-
- // done!
- return this;
- }
-
- /**
- * Clear the internal textures
- * @returns {SpeedyDrawableTexture}
- */
- clear() {
- const texture = this._texture[this._textureIndex];
-
- // clear internal textures
- for (let i = 0; i < this._texture.length; i++) this._texture[i].clear();
-
- // ping-pong rendering
- this._pingpong();
-
- // done!
- return texture;
- }
-
- /**
- * Set data using a Uniform Buffer Object
- * @param {string} blockName uniform block name
- * @param {ArrayBufferView} data
- * @returns {SpeedyProgram} this
- */
- setUBO(blockName, data) {
- if (this._ubo === null) this._ubo = new UBOHelper(this._gl, this._program);
- this._ubo.set(blockName, data);
- return this;
- }
-
- /**
- * Release the resources associated with this SpeedyProgram
- * @returns {null}
- */
- release() {
- const gl = this._gl;
-
- // Release UBOs (if any)
- if (this._ubo != null) this._ubo = this._ubo.release();
-
- // Unlink textures
- this._texture.fill(null);
-
- // Release geometry
- this._geometry = this._geometry.release();
-
- // Release program
- gl.deleteProgram(this._program);
- this._program = null;
-
- // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
- // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
- // will automatically be marked for deletion when the JS object is
- // destroyed (i.e., garbage collected)
-
- // done!
- return null;
- }
-
- /**
- * A constant #defined in the shader declaration
- * @param {string} name
- * @returns {number}
- */
- definedConstant(name) {
- return this._shaderdecl.definedConstant(name);
- }
-
- /**
- * Helper method for pingpong rendering: alternates
- * the texture index from 0 to 1 and vice-versa
- */
- _pingpong() {
- if (this._texture.length > 1) this._textureIndex = 1 - this._textureIndex;
- }
-
- /**
- * Compile and link GLSL shaders
- * @param {WebGL2RenderingContext} gl
- * @param {string} vertexShaderSource GLSL code of the vertex shader
- * @param {string} fragmentShaderSource GLSL code of the fragment shader
- * @returns {WebGLProgram}
- */
- static _compile(gl, vertexShaderSource, fragmentShaderSource) {
- const program = gl.createProgram();
- const vertexShader = gl.createShader(gl.VERTEX_SHADER);
- const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
-
- // compile vertex shader
- gl.shaderSource(vertexShader, vertexShaderSource);
- gl.compileShader(vertexShader);
- gl.attachShader(program, vertexShader);
-
- // compile fragment shader
- gl.shaderSource(fragmentShader, fragmentShaderSource);
- gl.compileShader(fragmentShader);
- gl.attachShader(program, fragmentShader);
-
- // link program
- gl.linkProgram(program);
- gl.validateProgram(program);
-
- // return on success
- if (gl.getProgramParameter(program, gl.LINK_STATUS)) return program;
-
- // display an error
- const errors = [gl.getShaderInfoLog(fragmentShader), gl.getShaderInfoLog(vertexShader), gl.getProgramInfoLog(program)];
- gl.deleteProgram(program);
- gl.deleteShader(fragmentShader);
- gl.deleteShader(vertexShader);
-
- // display error
- const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
- const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
- const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
- const formattedSource = source.split('\n').map((line, no) => col(1 + no) + line).join('\n');
- throw new utils_errors/* GLError */.wB(`\n\n---------- ERROR ----------\n\n` + errors.filter(err => err).join('\n') + `\n\n---------- SOURCE CODE ----------\n\n` + formattedSource + '\n');
- }
- }
-
- // ============================================================================
- // HELPERS
- // ============================================================================
-
- /**
- * Configure and store the VAO and the VBOs
- * @param {WebGL2RenderingContext} gl
- * @param {LocationOfAttributes} location
- * @returns {ProgramGeometry}
- *
- * @typedef {Object} LocationOfAttributes
- * @property {number} position
- * @property {number} texCoord
- *
- * @typedef {Object} BufferOfAttributes
- * @property {WebGLBuffer} position
- * @property {WebGLBuffer} texCoord
- */
- function ProgramGeometry(gl, location) {
- /** @type {WebGLVertexArrayObject} Vertex Array Object */
- this.vao = gl.createVertexArray();
-
- /** @type {BufferOfAttributes} Vertex Buffer Objects */
- this.vbo = Object.freeze({
- position: gl.createBuffer(),
- texCoord: gl.createBuffer()
- });
-
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- // bind the VAO
- gl.bindVertexArray(this.vao);
-
- // set the position attribute
- gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
- // clip coordinates (CCW)
- -1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
- gl.enableVertexAttribArray(location.position);
- gl.vertexAttribPointer(location.position,
- // attribute location
- 2,
- // 2 components per vertex (x,y)
- gl.FLOAT,
- // type
- false,
- // don't normalize
- 0,
- // default stride (tightly packed)
- 0); // offset
-
- // set the texCoord attribute
- gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
- // texture coordinates (CCW)
- 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);
- gl.enableVertexAttribArray(location.texCoord);
- gl.vertexAttribPointer(location.texCoord,
- // attribute location
- 2,
- // 2 components per vertex (x,y)
- gl.FLOAT,
- // type
- false,
- // don't normalize
- 0,
- // default stride (tightly packed)
- 0); // offset
-
- // unbind
- gl.bindBuffer(gl.ARRAY_BUFFER, null);
- gl.bindVertexArray(null);
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * Releases the internal resources
- * @returns {null}
- */
- ProgramGeometry.prototype.release = function () {
- const gl = this._gl;
- gl.deleteVertexArray(this.vao);
- gl.deleteBuffer(this.vbo.position);
- gl.deleteBuffer(this.vbo.texCoord);
- return null;
- };
-
- /**
- * Helper class for storing data in GLSL uniform variables
- * @param {string} type
- * @param {WebGLUniformLocation} location
- */
- function UniformVariable(type, location) {
- /** @type {string} GLSL data type */
- this.type = String(type);
- if (!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
-
- /** @type {WebGLUniformLocation} uniform location in a WebGL program */
- this.location = location;
-
- /** @type {string} setter function */
- this.setter = UNIFORM_SETTERS[this.type];
- const n = Number(this.setter.match(/^uniform(Matrix)?(\d)/)[2]) | 0;
-
- /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
- this.dim = this.type.startsWith('mat') ? 2 : this.type.indexOf('vec') >= 0 ? 1 : 0;
-
- /** @type {number} required number of scalars */
- this.length = this.dim == 2 ? n * n : n;
-
- /** @type {SpeedyProgramUniformValue|null} cached value */
- this._value = null;
- }
-
- /**
- * Set the value of a uniform variable
- * @param {WebGL2RenderingContext} gl
- * @param {SpeedyProgramUniformValue} value use column-major format for matrices
- * @param {number} [texNo] current texture index
- * @returns {number} new texture index
- */
- UniformVariable.prototype.setValue = function (gl, value, texNo = -1) {
- const setValue = /** @type {Function} */gl[this.setter];
-
- // check uniform type
- if (typeof value === 'object' && this.type.endsWith('sampler2D')) {
- // set texture
- if (texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS) throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);else if (Array.isArray(value)) throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);else if (value == null) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);else if (texNo < 0) throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
- const tex = value;
- gl.activeTexture(gl.TEXTURE0 + texNo);
- gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
- gl.uniform1i(this.location, texNo);
- texNo++;
- } else if (value === this._value && typeof value !== 'object') {
- // do not update the uniform if it hasn't changed
- // note that value may be an array whose entries may have been updated
- void 0;
- } else if (typeof value === 'number' || typeof value === 'boolean') {
- // set scalar value
- setValue.call(gl, this.location, value);
- } else if (Array.isArray(value)) {
- // set vector or matrix
- if (value.length === this.length) {
- if (this.dim == 2) setValue.call(gl, this.location, false, value); // matrix
- else setValue.call(gl, this.location, ...value); // vector
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
-
- // cache the value
- this._value = value;
-
- // done
- return texNo;
- };
-
- /**
- * @typedef {object} UBOStuff
- * @property {WebGLBuffer} buffer
- * @property {number} blockBindingIndex "global" binding index
- * @property {number} blockIndex UBO "location" in the program
- * @property {ArrayBufferView|null} data user-data
- */
-
- /**
- * A helper class for handling Uniform Buffer Objects (UBOs)
- * @param {WebGL2RenderingContext} gl
- * @param {WebGLProgram} program
- */
- function UBOHelper(gl, program) {
- /** @type {WebGL2RenderingContext} */
- this._gl = gl;
-
- /** @type {WebGLProgram} */
- this._program = program;
-
- /** @type {number} auto-increment counter */
- this._nextIndex = 0;
-
- /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
- this._ubo = Object.create(null);
- }
-
- /**
- * Set Uniform Buffer Object data
- * (the buffer will be uploaded when the program is executed)
- * @param {string} name uniform block name
- * @param {ArrayBufferView} data
- */
- UBOHelper.prototype.set = function (name, data) {
- const gl = this._gl;
-
- // create UBO entry
- if (this._ubo[name] === undefined) {
- this._ubo[name] = {
- buffer: gl.createBuffer(),
- blockBindingIndex: this._nextIndex++,
- blockIndex: -1,
- data: null
- };
- }
-
- // get UBO entry for the given block name
- const ubo = this._ubo[name];
-
- // read block index & assign binding point
- if (ubo.blockIndex < 0) {
- const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
- gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
- ubo.blockIndex = blockIndex;
- }
-
- // store the data - we'll upload it later
- ubo.data = data;
- };
-
- /**
- * Update UBO data
- * Called when we're using the appropriate WebGLProgram
- */
- UBOHelper.prototype.update = function () {
- const gl = this._gl;
- for (const name in this._ubo) {
- const ubo = this._ubo[name];
- gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
- gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
- gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
- gl.bindBuffer(gl.UNIFORM_BUFFER, null);
- }
- };
-
- /**
- * Release allocated buffers
- * @returns {null}
- */
- UBOHelper.prototype.release = function () {
- const gl = this._gl;
- for (const name in this._ubo) {
- const ubo = this._ubo[name];
- gl.deleteBuffer(ubo.buffer);
- ubo.data = null;
- }
- return null;
- };
-
- /**
- * Generates an indexed variable name, as in variable[index]
- * @param {string} variable
- * @param {number} index
- * @returns {string} variable[index]
- */
- function indexedVariable(variable, index) {
- //return `${variable}[${index}]`; // no caching
-
- // is this cache lookup really faster than string concatenation?
- // what about memory consumption?
- const cache = indexedVariable.cache;
- let nameList = cache.get(variable);
- if (nameList === undefined) cache.set(variable, nameList = []);
- if (nameList[index] === undefined) nameList[index] = `${variable}[${index}]`;
- return nameList[index];
- }
-
- /** @type {Map<string,string[]>} cached argument names */
- indexedVariable.cache = new Map(); // Object.create(null)
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program-group.js
- * An abstract group of programs that run on the GPU
- */
-
-
-
-
-
- /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
-
- /**
- * @typedef {object} SpeedyProgramHelpers
- * @property {function(): SpeedyProgramOptions} usesPingpongRendering
- * @property {function(): SpeedyProgramOptions} rendersToCanvas
- */
-
- /** @const {SpeedyProgramHelpers} Program settings generator */
- const PROGRAM_HELPERS = Object.freeze({
- /**
- * Pingpong Rendering: the output texture of a
- * program cannot be used as an input to itself.
- * This is a convenient helper in these situations
- * @returns {SpeedyProgramOptions}
- */
- usesPingpongRendering() {
- return {
- pingpong: true
- };
- },
- /**
- * Render to canvas
- * Use it when we're supposed to see the texture
- * @returns {SpeedyProgramOptions}
- */
- rendersToCanvas() {
- return {
- renderToTexture: false
- };
- }
- });
-
- /**
- * SpeedyProgramGroup
- * A semantically correlated group
- * of programs that run on the GPU
- * @abstract
- */
- class SpeedyProgramGroup {
- /**
- * Class constructor
- * @protected
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- /** @type {SpeedyGPU} GPU-accelerated routines */
- this._gpu = gpu;
-
- /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
- this._programs = [];
- }
-
- /**
- * Declare a program
- * @protected
- * @param {string} name Program name
- * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
- * @param {SpeedyProgramOptions} [options] Program settings
- * @returns {this}
- */
- declare(name, builder, options = {}) {
- // lazy instantiation of kernels
- Object.defineProperty(this, name, {
- get: (() => {
- // Why cast a symbol to symbol?
- // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
- const key = /** @type {symbol} */Symbol(name);
- return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
- })()
- });
- return this;
- }
-
- /**
- * Neat helpers to be used when declaring programs
- * @returns {SpeedyProgramHelpers}
- */
- get program() {
- return PROGRAM_HELPERS;
- }
-
- /**
- * Releases all programs from this group
- * @returns {null}
- */
- release() {
- for (let i = 0; i < this._programs.length; i++) this._programs[i].release();
- return null;
- }
-
- /**
- * Spawn a SpeedyProgram
- * @param {ShaderDeclaration} shaderdecl Shader declaration
- * @param {SpeedyProgramOptions} [options] Program settings
- * @returns {SpeedyProgram}
- */
- _createProgram(shaderdecl, options = {}) {
- const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
- this._programs.push(program);
- return program;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * utils.js
- * GPU utilities
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Copy image
- const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
-
- // Copy keypoints
- const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
- 'TYPE': 1
- }).withArguments('image');
-
- // Copy 2D vectors
- const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
- 'TYPE': 2
- }).withArguments('image');
-
- // Flip y-axis for output
- const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
-
- // Fill image with a constant
- const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
-
- // Fill zero or more color components of the input image with a constant value
- const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
-
- // Copy the src component of src to zero or more color components of a copy of dest
- const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
-
- // Scan the entire image and find the minimum & maximum pixel intensity
- const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
-
- // Compute the partial derivatives of an image
- const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
-
- /**
- * SpeedyProgramGroupUtils
- * Utility operations
- */
- class SpeedyProgramGroupUtils extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // render to the canvas
- .declare('renderToCanvas', flipY, Object.assign({}, this.program.rendersToCanvas()))
-
- // copy image
- .declare('copy', copy)
-
- // copy keypoints
- .declare('copyKeypoints', copyKeypoints)
-
- // copy 2D vectors
- .declare('copy2DVectors', copy2DVectors)
-
- // Fill image with a constant
- .declare('fill', fill)
-
- // Fill zero or more color components of the input image with a constant value
- .declare('fillComponents', fillComponents)
-
- // Copy the src component of src to zero or more color components of a copy of dest
- .declare('copyComponents', copyComponents)
-
- // find minimum & maximum pixel intensity
- .declare('scanMinMax2D', scanMinMax2D, Object.assign({}, this.program.usesPingpongRendering()))
-
- // Compute the partial derivatives of an image
- .declare('sobelDerivatives', sobelDerivatives);
- }
- }
- // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
- var convolution = __nested_webpack_require_314174__(1672);
- ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * filters.js
- * Image filtering on the GPU
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Convert to greyscale
- const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl').withArguments('image');
-
- // Convolution
- const filters_convolution = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl').withDefines({
- 'KERNEL_SIZE_SQUARED': ksize * ksize
- }).withArguments('image', 'kernel'), obj), {});
-
- // Separable convolution
- const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
- 'KERNEL_SIZE': ksize,
- 'AXIS': 0
- }).withArguments('image', 'kernel'), obj), {});
- const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
- 'KERNEL_SIZE': ksize,
- 'AXIS': 1
- }).withArguments('image', 'kernel'), obj), {});
- // Median filter
- const median = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl').withDefines({
- 'KERNEL_SIZE': ksize
- }).withArguments('image'), obj), {});
-
- // Normalize image
- const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
- 'GREYSCALE': 1
- }).withArguments('minmax2d', 'minValue', 'maxValue');
- const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
- 'GREYSCALE': 0
- }).withArguments('minmax2dRGB', 'minValue', 'maxValue');
-
- // Nightvision
- const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
- 'GREYSCALE': 0
- }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
- const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
- 'GREYSCALE': 1
- }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
-
- //
- // Utilities
- //
-
- // Handy conversion for Gaussian filters
- // (symmetric kernel, approx. zero after 3*sigma)
- const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
-
- // Generate a 1D Gaussian kernel
- const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
-
- // Generate a 1D Box filter
- const box = ksize => new Array(ksize).fill(1.0 / ksize);
-
- /**
- * SpeedyProgramGroupFilters
- * Image filtering
- */
- class SpeedyProgramGroupFilters extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // convert to greyscale
- .declare('rgb2grey', rgb2grey)
-
- // median filters
- .declare('median3', median[3]) // 3x3 window
- .declare('median5', median[5]) // 5x5 window
- .declare('median7', median[7]) // 7x7 window
-
- // 2D convolution
- .declare('convolution3', filters_convolution[3]) // 3x3 kernel
- .declare('convolution5', filters_convolution[5]) // 5x5 kernel
- .declare('convolution7', filters_convolution[7]) // 7x7 kernel
-
- // 1D separable convolution
- .declare('convolution3x', convolutionX[3]) // 1x3 kernel
- .declare('convolution3y', convolutionY[3]) // 3x1 kernel
- .declare('convolution5x', convolutionX[5]) // 1x5 kernel
- .declare('convolution5y', convolutionY[5]) // 5x1 kernel
- .declare('convolution7x', convolutionX[7]).declare('convolution7y', convolutionY[7]).declare('convolution9x', convolutionX[9]).declare('convolution9y', convolutionY[9]).declare('convolution11x', convolutionX[11]).declare('convolution11y', convolutionY[11]).declare('convolution13x', convolutionX[13]).declare('convolution13y', convolutionY[13]).declare('convolution15x', convolutionX[15]).declare('convolution15y', convolutionY[15])
-
- // normalize image
- .declare('normalizeGreyscale', normalizeGreyscale).declare('normalizeColored', normalizeColored)
-
- // nightvision
- .declare('nightvision', nightvision).declare('nightvisionGreyscale', nightvisionGreyscale).declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255))).declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
-
- // gaussian: separable kernels
- // see also: http://dev.theomader.com/gaussian-kernel-calculator/
- .declare('gaussian3x', (0,convolution.convX)([0.25, 0.5, 0.25])) // sigma ~ 1.0
- .declare('gaussian3y', (0,convolution.convY)([0.25, 0.5, 0.25])).declare('gaussian5x', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])) // sigma ~ 1.0
- .declare('gaussian5y', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('gaussian7x', (0,convolution.convX)(gaussian(7))).declare('gaussian7y', (0,convolution.convY)(gaussian(7))).declare('gaussian9x', (0,convolution.convX)(gaussian(9))).declare('gaussian9y', (0,convolution.convY)(gaussian(9))).declare('gaussian11x', (0,convolution.convX)(gaussian(11))).declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
-
- // box filter: separable kernels
- .declare('box3x', (0,convolution.convX)(box(3))).declare('box3y', (0,convolution.convY)(box(3))).declare('box5x', (0,convolution.convX)(box(5))).declare('box5y', (0,convolution.convY)(box(5))).declare('box7x', (0,convolution.convX)(box(7))).declare('box7y', (0,convolution.convY)(box(7))).declare('box9x', (0,convolution.convX)(box(9))).declare('box9y', (0,convolution.convY)(box(9))).declare('box11x', (0,convolution.convX)(box(11))).declare('box11y', (0,convolution.convY)(box(11)));
- }
- }
- // EXTERNAL MODULE: ./src/core/speedy-namespace.js
- var speedy_namespace = __nested_webpack_require_314174__(6634);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-descriptordb.js
- * A database of binary descriptors in video memory
- */
-
-
-
-
-
-
- //
- // A database of binary descriptors is a texture that stores
- // a set of (descriptor: uint8_t[]) entries.
- //
-
- /** @type {number} we use RGBA8 textures to store the descriptors */
- const DESCRIPTORDB_BYTESPERPIXEL = 4;
-
- /** @type {number} texture size goes up to 16 MB */
- const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
-
- /**
- * Utility for generating a database of binary descriptors in video memory
- */
- class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create a database of binary descriptors
- * @param {SpeedyTexture} texture output texture
- * @param {Uint8Array[]} descriptors binary descriptors
- * @param {number} descriptorSize in bytes, a multiple of 4
- * @returns {SpeedyTexture} texture
- */
- static create(texture, descriptors, descriptorSize) {
- utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
- const numberOfDescriptors = descriptors.length;
- const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
-
- // find an appropriate texture size
- const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
- const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
-
- // setup texture parameters
- const stride = 1 << log2stride;
- const width = stride,
- height = stride; // we use powers-of-two
-
- // are we within storage capacity?
- const capacity = width * height / pixelsPerDescriptor;
- if (numberOfDescriptors > capacity) throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
-
- // create texture data
- const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
- for (let i = 0; i < numberOfDescriptors; i++) {
- const byteOffset = i * descriptorSize;
- const descriptor = descriptors[i];
-
- // validate input
- utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
- utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
-
- // write data
- data.set(descriptor, byteOffset);
- }
-
- // log data for further study
- const MEGABYTE = 1048576;
- const totalSize = numberOfDescriptors * descriptorSize;
- utils/* Utils */.A.log(`Creating a ${width}x${height} database of ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors ` + `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`);
-
- // upload to the texture
- texture.resize(width, height);
- texture.upload(data);
- return texture;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-lsh.js
- * GPU-based LSH tables for fast matching of binary descriptors
- */
-
-
-
-
-
-
- /*
- * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
- * ------------------------------------------------
- *
- * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
- * Indices of keypoint descriptors are stored in several tables, each with many
- * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
- * size to match the keypoints.
- *
- * Buckets in video memory may get full. Wouldn't it be cool if we could use a
- * probabilistic approach to let us work within their storage capacity?
- *
- * Let there be n buckets in a table, each with storage capacity c (holding
- * up to c elements). Buckets are numbered from 0 to n-1.
- *
- * We pick uniformly a random bucket to store a new element in the table. Let
- * X be the chosen bucket. The probability that we'll store the new element in
- * any particular bucket k is:
- *
- * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
- *
- * On average, each new element stored in the table inserts 1/n of an element
- * in each bucket. If we add m new elements to the table, each bucket receives
- * m/n elements, on average(*).
- *
- * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
- * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
- * addition, the expected value of (m Ik) is m * E(ik) = m/n.
- *
- * Now let Yi be the number of elements inserted in bucket i in m additions to
- * the table. We model Yi as Poisson(m/n), since on average, m additions to
- * the table result in m/n new elements being inserted in bucket i. Buckets
- * are picked independently. Hence, for all i, the probability that we insert
- * q elements in bucket i in m additions to the table is:
- *
- * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
- *
- * Given that each bucket has storage capacity c, we require Yi <= c with a
- * high probability p (say, p = 0.99). This means that, in m additions, we
- * don't want to exceed the capacity c with high probability. So, let us find
- * a (large) value of m such that:
- *
- * P(Yi <= c) >= p
- *
- * Sounds good! We can find the largest matching m using binary search.
- *
- * I don't think we need to enforce a high probability that ALL buckets stay
- * within their capacity - n is large, we need to use the available space, and
- * we have multiple tables anyway.
- *
- * In practice, the assumption that buckets are picked uniformly doesn't hold:
- * keypoints that are nearby tend to have similar descriptors and buckets are
- * picked according to those descriptors. Still, this model works well enough
- * in practice and it is simple! That's what I like about it!
- *
- * ... now, how I actually do the matching is the theme of the next episode!
- */
-
- /** @type {number} Default number of tables in a LSH data structure */
- const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
-
- /** @type {number} Default number of bits of a hash */
- const LSH_DEFAULT_HASH_SIZE = 15;
-
- /** @type {number[]} Acceptable number of tables for a LSH data structure */
- const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
-
- /** @type {number[]} Acceptable values for hashSize, in bits */
- const LSH_ACCEPTABLE_HASH_SIZES = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20];
-
- /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
- const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32, 64];
-
- /**
- * @typedef {Object} LSHProfile LSH profile
- * @property {string} name name of the profile
- * @property {number} capacity maximum number of keypoints that can be stored in such a table
- * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
- * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
- * @property {number} bucketCapacity maximum number of entries of a bucket of a table
- */
-
- /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
- const generateLSHProfiles = (t, h, p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [{
- name: 'x-small',
- bucketCapacity: 1,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 1, p)
- }, {
- name: 'small',
- bucketCapacity: 2,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 2, p)
- }, {
- name: 'small-plus',
- bucketCapacity: 3,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 3, p)
- }, {
- name: 'medium',
- bucketCapacity: 4,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 4, p)
- }, {
- name: 'medium-plus',
- bucketCapacity: 5,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 5, p)
- }, {
- name: 'large',
- bucketCapacity: 6,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 6, p)
- }, {
- name: 'x-large',
- bucketCapacity: 8,
- tableCount: t,
- hashSize: h,
- capacity: findTableCapacity(h, 8, p)
- }];
-
- //
- // LSH hash sequences: random bits in increasing order
- // We generate a few sequences (one for each table) supporting up to 16 hash bits
- // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
- //
-
- /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
- /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
- /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
-
- /** @type {number} maximum number of elements of a sequence */
- const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
-
- /** @type {number} number of sequences in a BitSequences object */
- const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
-
- /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
- const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i + 1) * LSH_SEQUENCE_MAXLEN).sort()), seq);
-
- /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
- const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray((i + 1) * LSH_SEQUENCE_MAXLEN - p, (i + 1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)), seq);
-
- /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
- const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p, o) => (p[o] = f(o), p), {}))(h => ({
- // for 256-bit descriptors
- 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
- // for 512-bit descriptors
- 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN))))
- }));
-
- //
- // Misc
- //
-
- /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
- const LSH_BYTESPERPIXEL = 4;
-
- /** @type {function(number): number} next power of 2 */
- const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /**
- * GPU-based LSH tables for fast matching of binary descriptors
- */
- class SpeedyLSH {
- /**
- * Constructor
- * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
- * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
- * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
- * @param {number} [tableCount] number of LSH tables, preferably a power of two
- * @param {number} [hashSize] number of bits of a hash of a descriptor
- * @param {number} [probability] probability of no discard events happening in the theoretical model
- */
- constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95) {
- const descriptorCount = descriptors.length;
- const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
- const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
-
- // validate input
- utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
- utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
- utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
- utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
- utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
-
- /** @type {LSHProfile} LSH profile */
- this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
-
- /** @type {number} descriptor size, in bytes */
- this._descriptorSize = descriptorSize;
-
- /** @type {number} number of descriptors */
- this._descriptorCount = descriptorCount;
-
- /** @type {BitSequences} bit sequences */
- this._sequences = this._pickSequences(this._descriptorSize);
-
- /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
- this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
-
- /** @type {SpeedyTexture} a storage of descriptors */
- this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- return this._descriptorSize;
- }
-
- /**
- * Number of descriptors stored in this LSH data structure
- * @returns {number}
- */
- get descriptorCount() {
- return this._descriptorCount;
- }
-
- /**
- * LSH bit sequences
- * @returns {BitSequences}
- */
- get sequences() {
- return this._sequences;
- }
-
- /**
- * Number of bits that make a hash
- * @returns {number}
- */
- get hashSize() {
- return this._profile.hashSize;
- }
-
- /**
- * Maximum number of descriptors that can be stored in a bucket of a table
- * @returns {number}
- */
- get bucketCapacity() {
- return this._profile.bucketCapacity;
- }
-
- /**
- * How many buckets per table do we have?
- * @returns {number}
- */
- get bucketsPerTable() {
- return 1 << this._profile.hashSize;
- }
-
- /**
- * Number of LSH tables
- * @returns {number}
- */
- get tableCount() {
- return this._profile.tableCount;
- }
-
- /**
- * Size of one LSH table, in bytes
- * @returns {number}
- */
- get tableSize() {
- return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
- }
-
- /**
- * Size of all LSH tables combined, in bytes
- * @returns {number}
- */
- get totalSize() {
- // actually, the total memory in VRAM may be a bit larger than
- // this value, depending on the actual size of the texture
- return this.tableCount * this.tableSize;
- }
-
- /**
- * LSH tables texture
- * @returns {SpeedyDrawableTexture}
- */
- get tables() {
- return this._tables;
- }
-
- /**
- * A collection of descriptors
- * @returns {SpeedyDrawableTexture}
- */
- get descriptorDB() {
- return this._descriptorDB;
- }
-
- /**
- * Pick the appropriate LSH sequences for a particular descriptor size
- * @param {number} descriptorSize in bytes
- * @returns {BitSequences}
- */
- _pickSequences(descriptorSize) {
- utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
- utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
- return LSH_SEQUENCES[this.hashSize][descriptorSize];
- }
-
- /**
- * Create LSH tables
- * @param {SpeedyTexture} texture output texture
- * @param {BitSequences} sequences bit sequences
- * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
- * @param {number} descriptorSize in bytes
- * @returns {SpeedyTexture} texture
- */
- _createStaticTables(texture, sequences, descriptors, descriptorSize) {
- const END_OF_LIST = 0xFFFFFFFF;
- const profileName = this._profile.name;
- const tableCapacity = this._profile.capacity;
- const tableCount = this.tableCount;
- const bucketsPerTable = this.bucketsPerTable;
- const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
- const hashSize = this.hashSize;
- const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
- const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
- const textureHeight = Math.ceil(numberOfPixels / textureWidth);
- const numberOfDescriptors = descriptors.length;
-
- // validate input
- utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
- utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
- utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
-
- // log
- const MEGABYTE = 1048576;
- utils/* Utils */.A.log(`Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` + `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` + `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `);
-
- // warn the user if there are too many descriptors
- if (numberOfDescriptors > tableCapacity) {
- const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
- utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
- }
-
- // create empty LSH tables
- const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
- const bytes = new Uint8Array(buffer).fill(0xFF);
- const data = new DataView(buffer);
-
- // shuffle the descriptors...
- // it seems like a good idea to handle collisions of similar descriptors,
- // which may be located next to each other in the array
- const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
-
- // for each descriptor
- // do everything in little-endian format!
- const numberOfDiscardedDescriptorsPerTable = new Array(tableCount).fill(0);
- for (let i = 0; i < numberOfDescriptors; i++) {
- const descriptorIndex = permutation[i]; //i;
- const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
-
- // for each table
- for (let table = 0; table < tableCount; table++) {
- // compute hash & memory addresses
- const hash = hashes[table];
- const tableByteOffset = table * bucketsPerTable * bucketSize;
- const bucketByteOffset = tableByteOffset + hash * bucketSize;
-
- // find the end of the list
- let index = END_OF_LIST;
- for (let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
- const byteOffset = bucketByteOffset + entryByteOffset;
- index = data.getUint32(byteOffset, true);
-
- // add the keypoint
- if (index == END_OF_LIST) {
- data.setUint32(byteOffset, descriptorIndex, true);
- break;
- }
- }
-
- // note: if the bucket is full, we just discard the entry :\
- // we give this event a probabilistic treatment (see above),
- // so it happens with low probability
- if (index != END_OF_LIST) numberOfDiscardedDescriptorsPerTable[table]++;
- }
- }
-
- // log data for further study
- const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
- const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
- utils/* Utils */.A.log(`When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` + `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` + `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` + `Minimum: ${Math.min(...profile).toFixed(2)}%. ` + `Table capacity: ${tableCapacity}.`);
-
- // upload the LSH tables to the GPU
- texture.resize(textureWidth, textureHeight);
- texture.upload(bytes);
- return texture;
- }
-
- /**
- * Pick bits from a binary descriptor
- * @param {Uint8Array} descriptor a single descriptor
- * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
- * @returns {number[]} hash code for each table
- */
- _hashCodes(descriptor, sequences) {
- const tableCount = this.tableCount;
- const hashSize = this.hashSize;
- const bucketsPerTable = this.bucketsPerTable;
- const hashes = new Array(tableCount);
- //const descriptorSize = descriptor.length;
-
- // just to be sure...
- utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN && sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount);
-
- // for each table
- for (let table = 0; table < tableCount; table++) {
- const offset = LSH_SEQUENCE_MAXLEN * table;
-
- // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
- let hash = 0;
- for (let i = 0; i < hashSize; i++) {
- let bit = sequences[offset + i];
- let b = bit >>> 3;
- let m = 1 << (bit & 7);
-
- //Utils.assert(b < descriptorSize);
- hash = hash << 1 | (descriptor[b] & m) != 0;
- }
-
- // validate & store
- utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
- hashes[table] = hash;
- }
-
- // done!
- return hashes;
- }
- }
-
- /**
- * Compute P(X <= k), where X ~ Poisson(lambda)
- * @param {number} lambda positive number
- * @param {number} k non-negative integer
- * @returns {number}
- */
- function cumulativePoisson(lambda, k) {
- const exp = Math.exp(-lambda);
- let sum = 1,
- fat = 1,
- pow = 1;
-
- // k should be small!!!
- for (let i = 1; i <= k; i++) sum += (pow *= lambda) / (fat *= i);
- return sum * exp;
- }
-
- /**
- * Find the maximum number of keypoint descriptors that a table can hold
- * @param {number} hashSize positive integer
- * @param {number} bucketCapacity positive integer
- * @param {number} [probability] probability of no discard events happening in the theoretical model
- * @return {number} optimal table capacity
- */
- function findTableCapacity(hashSize, bucketCapacity, probability = 0.99) {
- const n = 1 << hashSize; // number of buckets
- const c = bucketCapacity;
- const p = probability;
- let l = 1,
- r = n * c; // watch for overflow!
- let m = 0,
- pm = 0;
-
- // binary search
- while (l < r) {
- m = Math.floor((l + r) / 2);
- pm = cumulativePoisson(m / n, c);
- if (pm > p)
- //if(1-pm < 1-p)
- l = m + 1;else r = m;
- }
- return m;
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * keypoints.js
- * Facade for various keypoint detection algorithms
- */
-
-
-
-
-
-
-
- // FAST corner detector
- const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl').withDefines({
- 'FAST_TYPE': 916
- }).withArguments('corners', 'pyramid', 'lod', 'threshold');
-
- // Harris corner detector
- const harris = [1, 3, 5, 7].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl').withDefines({
- 'WINDOW_SIZE': win
- }).withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian'), obj), {});
- const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl').withArguments('corners', 'iterationNumber');
- const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl').withArguments('corners', 'maxScore', 'quality');
-
- // Subpixel refinement
- const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 0
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 1
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 2
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
- const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
- 'METHOD': 3
- }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
-
- // Scale refinement
- const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
- 'METHOD': 0
- }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
- 'METHOD': 1
- }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
-
- // Pixel allocation
- const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
- const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
- const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl').withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // ORB descriptors
- const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl').withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
- const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl').withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Non-maximum suppression
- const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
- 'MULTISCALE': 0
- }).withArguments('image', 'lodStep');
- const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
- 'MULTISCALE': 1
- }).withArguments('image', 'lodStep');
- const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl').withArguments('corners');
- const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
- 'USE_LAPLACIAN': 1
- }).withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
- const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
- 'USE_LAPLACIAN': 0
- }).withArguments('corners', 'pyramid', 'lodStep');
- const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl').withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
-
- // Keypoint tracking & optical-flow
- const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl').withDefines({
- 'WINDOW_SIZE': win
- }).withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {});
- const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl').withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Brute-force matching
- const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 0
- });
- const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 1
- });
- const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
- const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 32,
- 'NUMBER_OF_KEYPOINTS_PER_PASS': 16
- }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
- const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 64,
- 'NUMBER_OF_KEYPOINTS_PER_PASS': 8
- }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
-
- // LSH-based KNN matching
- const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 0
- });
- const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
- 'ENCODE_FILTERS': 1
- });
- const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => (obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => (obj[hashSize] = [0, 1, 2].reduce((obj, level) => (obj[level] = (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl').withDefines({
- 'DESCRIPTOR_SIZE': descriptorSize,
- 'HASH_SIZE': hashSize,
- 'LEVEL': level,
- 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
- 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT
- }).withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {}), obj), {}), obj), {});
- const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
-
- // Keypoint sorting
- const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 1
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 2
- }).withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
- const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
- 'STAGE': 3
- }).withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
-
- // Keypoint mixing
- const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 1
- }).withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
- const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 2
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
- const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 3
- }).withArguments('array', 'blockSize');
- const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 5
- }).withArguments('array');
- const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
- 'STAGE': 4
- }).withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Keypoint encoding
- const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
- 'FS_OUTPUT_TYPE': 2,
- 'STAGE': 1
- }).withArguments('corners');
- const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl').withDefines({
- 'FS_OUTPUT_TYPE': 2,
- 'FS_USE_CUSTOM_PRECISION': 1,
- 'STAGE': 2
- }).withArguments('lookupTable', 'blockSize', 'width', 'height');
- const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
- 'STAGE': -1
- }).withArguments('lookupTable');
- const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl').withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
- const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl').withArguments('corners', 'imageSize');
- const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl').withDefines({
- 'MAX_ITERATIONS': 6
- }) // dependent texture reads :(
- .withArguments('offsetsImage', 'imageSize');
- const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl').withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl').withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl').withArguments();
- const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl').withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl').withDefines({
- // UBOs can hold at least 16KB of data;
- // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
- // according to the GL ES 3 reference.
- // Each keypoint uses 16 bytes (vec4)
- 'BUFFER_SIZE': 1024 //16384 / 16
- }).withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Geometric transformations
- const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl').withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
-
- // Keypoint filters
- const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl').withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl').withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
- const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 32
- }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
- const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
- 'DESCRIPTOR_SIZE': 64
- }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
-
- // Other utilities
- const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl').withDefines({
- 'PERMUTATION_MAXLEN': 2048
- }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
- const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl').withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
-
- /**
- * SpeedyProgramGroupKeypoints
- * Keypoint detection
- */
- class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- //
- // FAST corner detector
- //
- .declare('fast9_16', fast9_16, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Harris corner detector
- //
- .declare('harris1', harris[1], Object.assign({}, this.program.usesPingpongRendering())).declare('harris3', harris[3], Object.assign({}, this.program.usesPingpongRendering())).declare('harris5', harris[5], Object.assign({}, this.program.usesPingpongRendering())).declare('harris7', harris[7], Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreFindMax', harrisScoreFindMax, Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreCutoff', harrisScoreCutoff)
-
- //
- // Subpixel refinement
- //
- .declare('subpixelQuadratic1d', subpixelQuadratic1d).declare('subpixelTaylor2d', subpixelTaylor2d).declare('subpixelBicubic', subpixelBicubic).declare('subpixelBilinear', subpixelBilinear)
-
- //
- // Scale refinement
- //
- .declare('refineScaleLoG', refineScaleLoG).declare('refineScaleFAST916', refineScaleFAST916)
-
- //
- // Pixel allocation
- //
- .declare('allocateDescriptors', allocateDescriptors).declare('allocateExtra', allocateExtra).declare('transferToExtra', transferToExtra)
-
- //
- // ORB descriptors
- //
- .declare('orbDescriptor', orbDescriptor).declare('orbOrientation', orbOrientation)
-
- //
- // Non-maximum suppression
- //
- .declare('nonmax', nonMaxSuppression).declare('pyrnonmax', multiscaleNonMaxSuppression).declare('nonmaxSpace', nonmaxSpace).declare('nonmaxScale', nonmaxScale).declare('nonmaxScaleSimple', nonmaxScaleSimple).declare('laplacian', laplacian)
-
- //
- // LK optical-flow
- //
- .declare('lk21', lk[21], Object.assign({}, this.program.usesPingpongRendering())).declare('lk19', lk[19], Object.assign({}, this.program.usesPingpongRendering())).declare('lk17', lk[17], Object.assign({}, this.program.usesPingpongRendering())).declare('lk15', lk[15], Object.assign({}, this.program.usesPingpongRendering())).declare('lk13', lk[13], Object.assign({}, this.program.usesPingpongRendering())).declare('lk11', lk[11], Object.assign({}, this.program.usesPingpongRendering())).declare('lk9', lk[9], Object.assign({}, this.program.usesPingpongRendering())).declare('lk7', lk[7], Object.assign({}, this.program.usesPingpongRendering())).declare('lk5', lk[5], Object.assign({}, this.program.usesPingpongRendering())).declare('lk3', lk[3], Object.assign({}, this.program.usesPingpongRendering())).declare('transferFlow', transferFlow)
-
- //
- // Brute-force KNN matching
- //
- .declare('bfMatcherInitCandidates', bfMatcherInitCandidates).declare('bfMatcherInitFilters', bfMatcherInitFilters).declare('bfMatcherTransfer', bfMatcherTransfer, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher32', bfMatcher32, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher64', bfMatcher64, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // LSH-based KNN matching
- //
- .declare('lshKnnInitCandidates', lshKnnInitCandidates).declare('lshKnnInitFilters', lshKnnInitFilters).declare('lshKnnTransfer', lshKnnTransfer, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Keypoint sorting
- //
- .declare('sortCreatePermutation', sortCreatePermutation).declare('sortMergePermutation', sortMergePermutation, Object.assign({}, this.program.usesPingpongRendering())).declare('sortApplyPermutation', sortApplyPermutation)
-
- //
- // Keypoint mixing
- //
- .declare('mixKeypointsPreInit', mixKeypointsPreInit).declare('mixKeypointsInit', mixKeypointsInit).declare('mixKeypointsSort', mixKeypointsSort, Object.assign({}, this.program.usesPingpongRendering())).declare('mixKeypointsView', mixKeypointsView).declare('mixKeypointsApply', mixKeypointsApply)
-
- //
- // Keypoint encoders
- //
- .declare('encodeNullKeypoints', encodeNullKeypoints).declare('encodeKeypoints', encodeKeypoints).declare('initLookupTable', initLookupTable).declare('sortLookupTable', sortLookupTable, Object.assign({}, this.program.usesPingpongRendering())).declare('viewLookupTable', viewLookupTable).declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets).declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointPositions', encodeKeypointPositions, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointProperties', encodeKeypointProperties).declare('transferOrientation', transferOrientation).declare('uploadKeypoints', uploadKeypoints, Object.assign({}, this.program.usesPingpongRendering()))
-
- //
- // Geometric transformations
- //
- .declare('applyHomography', applyHomography)
-
- //
- // Keypoint filters
- //
- .declare('clipBorder', clipBorder).declare('distanceFilter', distanceFilter).declare('hammingDistanceFilter32', hammingDistanceFilter32).declare('hammingDistanceFilter64', hammingDistanceFilter64)
-
- //
- // Other utilities
- //
- .declare('shuffle', shuffle).declare('clip', clip);
-
- //
- // LSH-based KNN matching
- //
- for (const descriptorSize of Object.keys(lshKnn)) {
- for (const hashSize of Object.keys(lshKnn[descriptorSize])) {
- for (const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
- const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
- this.declare(name, lshKnn[descriptorSize][hashSize][level], Object.assign({}, this.program.usesPingpongRendering()));
- }
- }
- }
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pyramids.js
- * Image pyramids
- */
-
-
-
-
-
-
-
- //
- // Shaders
- //
-
- const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
- const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
-
- /**
- * SpeedyProgramGroupPyramids
- * Image pyramids
- */
- class SpeedyProgramGroupPyramids extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this
- // upsampling & downsampling
- .declare('upsample2', upsample2).declare('downsample2', downsample2)
-
- // separable kernels for gaussian smoothing
- // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
- // pick a = 0.4 for gaussian approximation (sigma = 1)
- .declare('smoothX', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('smoothY', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05]))
- /*
- .declare('reduce', conv2D([
- 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
- 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
- 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
- 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
- 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
- ]))
- */
-
- // smoothing for 2x image
- // same rules as above with sum(k) = 2
- .declare('smoothX2', (0,convolution.convX)([0.1, 0.5, 0.8, 0.5, 0.1
- // NOTE: this would saturate the image, but we apply it
- // on a 2x upsampled version with lots of zero pixels
- ])).declare('smoothY2', (0,convolution.convY)([0.1, 0.5, 0.8, 0.5, 0.1], 1.0 / 2.0));
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transforms.js
- * Geometric transformations
- */
-
-
-
-
-
-
- //
- // Shaders
- //
-
- // Perspective warp
- const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl').withArguments('image', 'inverseHomography');
-
- // Resize image
- const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
- 'INTERPOLATION_METHOD': 0 // Nearest neighbors
- }).withArguments('image');
- const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
- 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
- }).withArguments('image');
-
- // Additive mix (TODO create a new program group?)
- const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl').withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
-
- /**
- * SpeedyProgramGroupTransforms
- * Geometric transformations
- */
- class SpeedyProgramGroupTransforms extends SpeedyProgramGroup {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu
- */
- constructor(gpu) {
- super(gpu);
- this.declare('warpPerspective', warpPerspective).declare('resizeNearest', resizeNearest).declare('resizeBilinear', resizeBilinear).declare('additiveMix', additiveMix);
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-program-center.js
- * An access point to all programs that run on the GPU
- */
-
-
-
-
-
-
-
-
-
- /**
- * An access point to all programs that run on the CPU
- * All program groups can be accessed via this class
- */
- class SpeedyProgramCenter {
- /**
- * Class constructor
- * @param {SpeedyGPU} gpu reference to SpeedyGPU
- */
- constructor(gpu) {
- // Note: we instantiate the program groups lazily
-
- /** @type {SpeedyGPU} reference to SpeedyGPU */
- this._gpu = gpu;
-
- /** @type {SpeedyProgramGroupFilters} image filters */
- this._filters = null;
-
- /** @type {SpeedyProgramGroupTransforms} geometric transformations */
- this._transforms = null;
-
- /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
- this._pyramids = null;
-
- /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
- this._keypoints = null;
-
- /** @type {SpeedyProgramGroupUtils} utility programs */
- this._utils = null;
- }
-
- /**
- * Image filters & convolutions
- * @returns {SpeedyProgramGroupFilters}
- */
- get filters() {
- return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
- }
-
- /**
- * Geometric transformations
- * @returns {SpeedyProgramGroupTransforms}
- */
- get transforms() {
- return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
- }
-
- /**
- * Image pyramids & scale-space
- * @returns {SpeedyProgramGroupPyramids}
- */
- get pyramids() {
- return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
- }
-
- /**
- * Keypoint detection & description
- * @returns {SpeedyProgramGroupKeypoints}
- */
- get keypoints() {
- return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
- }
-
- /**
- * Utility programs
- * @returns {SpeedyProgramGroupUtils}
- */
- get utils() {
- return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
- }
-
- /**
- * Release all programs from all groups. You'll
- * no longer be able to use any of them.
- * @returns {null}
- */
- release() {
- for (const key in this) {
- if (Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
- const group = this[key];
- if (group instanceof SpeedyProgramGroup) group.release();
- }
- }
- return null;
- }
- }
- ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-texture-pool.js
- * Texture pool
- */
-
-
-
-
-
-
- // Constants
- const DEFAULT_CAPACITY = 1024;
- const BUCKET = Symbol('Bucket');
-
- /*
-
- === Heuristics to figure out the capacity of a texture pool ===
-
- 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
-
- 2. Figure out the average texture size in your application (say, 640x360 pixels).
-
- 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
- uses 4 bytes (RGBA format).
-
- 4. Divide the maximum amount of VRAM by the average texture size in bytes
- (say, 72). That's the capacity of the pool.
-
- Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
-
- Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
-
- */
-
- /**
- * @typedef {number} TextureBucketIndex index of a bucket in a pool
- */
-
- /**
- * A bucket
- */
- class TextureBucket {
- /**
- * Constructor
- * @param {SpeedyDrawableTexture} texture managed texture
- * @param {TextureBucketIndex} index index of this bucket
- * @param {TextureBucketIndex} next index of the next bucket
- */
- constructor(texture, index, next) {
- /** @type {SpeedyDrawableTexture} managed texture */
- this.texture = texture;
-
- /** @type {TextureBucketIndex} index of this bucket */
- this.index = index;
-
- /** @type {TextureBucketIndex} index of the next bucket */
- this.next = next;
-
- /** @type {boolean} whether the texture is available or not */
- this.free = true;
- }
- }
-
- /**
- * Texture pool
- */
- class SpeedyTexturePool {
- /**
- * Constructor
- * @param {SpeedyGPU} gpu
- * @param {number} [capacity] number of textures in the pool
- */
- constructor(gpu, capacity = DEFAULT_CAPACITY) {
- utils/* Utils */.A.assert(capacity > 0);
-
- /** @type {TextureBucket[]} buckets */
- this._bucket = Array.from({
- length: capacity
- }, (_, i) => new TextureBucket(null, i, i - 1));
-
- /** @type {TextureBucketIndex} index of an available bucket */
- this._head = capacity - 1;
-
- /** @type {SpeedyGPU} GPU instance */
- this._gpu = gpu;
- }
-
- /**
- * Get a texture from the pool
- * @returns {SpeedyDrawableTexture}
- */
- allocate() {
- if (this._head < 0) throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
- const bucket = this._bucket[this._head];
- bucket.free = false;
- this._head = bucket.next;
- if (bucket.texture == null)
- // lazy instantiation
- bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
- return bucket.texture;
- }
-
- /**
- * Put a texture back in the pool
- * @param {SpeedyDrawableTexture} texture
- * @returns {null}
- */
- free(texture) {
- const bucket = texture[BUCKET];
- utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
- bucket.next = this._head;
- bucket.free = true;
- this._head = bucket.index;
- return null;
- }
-
- /**
- * Release the texture pool
- * @returns {null}
- */
- release() {
- for (let i = 0; i < this._bucket.length; i++) {
- if (this._bucket[i].texture != null) this._bucket[i].texture = this._bucket[i].texture.release();
- }
- return null;
- }
-
- /**
- * Create a texture with a reference to a bucket
- * @param {WebGL2RenderingContext} gl
- * @param {TextureBucket} bucket
- * @returns {SpeedyDrawableTexture}
- */
- static _createManagedTexture(gl, bucket) {
- const texture = new SpeedyDrawableTexture(gl, 1, 1);
- return Object.defineProperty(texture, BUCKET, {
- configurable: false,
- enumerable: false,
- writable: false,
- value: bucket
- });
- }
- }
- // EXTERNAL MODULE: ./src/utils/types.js
- var types = __nested_webpack_require_314174__(6049);
- ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-media-source.js
- * Wrappers around <img>, <video>, <canvas>, etc.
- */
-
-
-
-
-
-
- /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
-
- /** Internal token for protected constructors */
- const PRIVATE_TOKEN = Symbol();
-
- /**
- * An abstract media source: a wrapper around native
- * elements such as: HTMLImageElement, HTMLVideoElement,
- * and so on
- * @abstract
- */
- class SpeedyMediaSource {
- /**
- * @protected Constructor
- * @param {symbol} token
- */
- constructor(token) {
- // the constructor is not public
- if (token !== PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
-
- /** @type {SpeedyMediaSourceNativeElement} underlying media object */
- this._data = null;
- }
-
- /**
- * Load a media source
- * @param {SpeedyMediaSourceNativeElement} wrappedObject
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(wrappedObject) {
- if (wrappedObject instanceof HTMLImageElement) return SpeedyImageMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLVideoElement) return SpeedyVideoMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLCanvasElement) return SpeedyCanvasMediaSource.load(wrappedObject);else if (typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas) return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageBitmap) return SpeedyBitmapMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageData) return SpeedyDataMediaSource.load(wrappedObject);else throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
- }
-
- /**
- * The underlying wrapped object
- * @returns {SpeedyMediaSourceNativeElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * Is the underlying media loaded?
- * @returns {boolean}
- */
- isLoaded() {
- return this._data !== null;
- }
-
- /**
- * The type of the underlying media source
- * @abstract
- * @returns {MediaType}
- */
- get type() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Media width, in pixels
- * @abstract
- * @returns {number}
- */
- get width() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Media height, in pixels
- * @abstract
- * @returns {number}
- */
- get height() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Clone this media source
- * @abstract
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Release resources associated with this object
- * @returns {null}
- */
- release() {
- return this._data = null;
- }
-
- /**
- * Load the underlying media
- * @abstract
- * @param {SpeedyMediaSourceNativeElement} element
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(element) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Wait for an event to be triggered in an element
- * @param {Element} element
- * @param {string} eventName
- * @param {number} [timeout] in ms
- * @returns {SpeedyPromise<Element>}
- */
- static _waitUntil(element, eventName, timeout = 30000) {
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
- const timer = setTimeout(() => {
- clear();
- reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
- }, timeout);
- function clear() {
- clearTimeout(timer);
- element.removeEventListener('error', handleError, false);
- element.removeEventListener(eventName, handleSuccess, false);
- }
- function handleError() {
- const hasError = element.error !== null && typeof element.error === 'object';
- const error = hasError ? element.error : {
- code: -1,
- message: ''
- };
- const info = `${error.message} (error code ${error.code})`;
- clear();
- reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
- }
- function handleSuccess() {
- clear();
- resolve(element);
- }
- element.addEventListener('error', handleError, false);
- element.addEventListener(eventName, handleSuccess, false);
- });
- }
- }
-
- /**
- * Image media source:
- * a wrapper around HTMLImageElement
- */
- class SpeedyImageMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLImageElement} image element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLImageElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Image;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.naturalWidth : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.naturalHeight : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newNode = /** @type {HTMLImageElement} */this._data.cloneNode(true);
- return SpeedyImageMediaSource.load(newNode);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLImageElement} image
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(image) {
- if (this.isLoaded()) this.release();
- if (image.complete && image.naturalWidth !== 0) {
- // already loaded?
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = image;
- resolve(this);
- });
- } else {
- return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
- this._data = image;
- return this;
- });
- }
- }
-
- /**
- * Load the underlying media
- * @param {HTMLImageElement} image
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(image) {
- return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
- }
- }
-
- /**
- * Video media source:
- * a wrapper around HTMLVideoElement
- */
- class SpeedyVideoMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLVideoElement} video element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLVideoElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Video;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- // Warning: videoWidth & videoHeight may change at any time !!!
- // so you can't cache these dimensions
- return this._data ? this._data.videoWidth : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.videoHeight : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newNode = /** @type {HTMLVideoElement} */this._data.cloneNode(true);
- return SpeedyVideoMediaSource.load(newNode);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(video) {
- if (this.isLoaded()) this.release();
- utils/* Utils */.A.log('Loading a video...');
- video.load();
- return SpeedyVideoMediaSource._waitUntilPlayable(video).then(() => {
- return SpeedyVideoMediaSource._handleAutoplay(video).then(() => {
- this._data = video;
- return this;
- });
- });
- }
-
- /**
- * Load the underlying media
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(video) {
- return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
- }
-
- /**
- * Handle browser quirks concerning autoplay
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<void>} gets rejected if we can't autoplay
- */
- static _handleAutoplay(video) {
- // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
- // Chrome policy: https://developer.chrome.com/blog/autoplay/
- // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
-
- // videos marked as autoplay may not play if not visible on-screen
- // videos marked as autoplay should be muted
- if (video.autoplay /*&& video.muted*/) {
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- const promise = video.play();
-
- // handle older browsers
- if (promise === undefined) {
- resolve();
- return;
- }
-
- // wrap promise
- promise.then(resolve, reject);
- });
- }
-
- // nothing to do
- return speedy_promise/* SpeedyPromise */.i.resolve();
- }
-
- /**
- * Wait for the input video to be playable
- * @param {HTMLVideoElement} video
- * @returns {SpeedyPromise<HTMLVideoElement>} resolves to the input video when it can be played
- */
- static _waitUntilPlayable(video) {
- const TIMEOUT = 30000,
- INTERVAL = 500;
- if (video.readyState >= 3) return speedy_promise/* SpeedyPromise */.i.resolve(video);
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- let ms = 0,
- t = setInterval(() => {
- //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
- if (video.readyState >= 3) {
- clearInterval(t);
- resolve(video);
- } else if ((ms += INTERVAL) >= TIMEOUT) {
- clearInterval(t);
- reject(new utils_errors/* TimeoutError */.MU('The video took too long to load'));
- }
- }, INTERVAL);
- });
- }
- }
-
- /**
- * Canvas media source:
- * a wrapper around HTMLCanvasElement
- */
- class SpeedyCanvasMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {HTMLCanvasElement} canvas element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {HTMLCanvasElement}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Canvas;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
- const newContext = newCanvas.getContext('2d');
- newContext.drawImage(this._data, 0, 0);
- return SpeedyCanvasMediaSource.load(newCanvas);
- }
-
- /**
- * Load the underlying media
- * @param {HTMLCanvasElement} canvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(canvas) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = canvas;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {HTMLCanvasElement} canvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(canvas) {
- return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
- }
- }
-
- /**
- * OffscreenCanvas media source:
- * a wrapper around OffscreenCanvas
- */
- class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {OffscreenCanvas} offscreen canvas element */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {OffscreenCanvas}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.OffscreenCanvas;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const newCanvas = new OffscreenCanvas(this.width, this.height);
- const newContext = newCanvas.getContext('2d');
- newContext.drawImage(this._data, 0, 0);
- return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
- }
-
- /**
- * Load the underlying media
- * @param {OffscreenCanvas} offscreenCanvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(offscreenCanvas) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = offscreenCanvas;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {OffscreenCanvas} offscreenCanvas
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(offscreenCanvas) {
- return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
- }
- }
-
- /**
- * Bitmap media source:
- * a wrapper around ImageBitmap
- */
- class SpeedyBitmapMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {ImageBitmap} image bitmap */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {ImageBitmap}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Bitmap;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- createImageBitmap(this._data).then(newBitmap => {
- const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
- newSource._load(newBitmap).then(resolve, reject);
- }, reject);
- });
- }
-
- /**
- * Release resources associated with this object
- * @returns {null}
- */
- release() {
- if (this._data != null) this._data.close();
- return super.release();
- }
-
- /**
- * Load the underlying media
- * @param {ImageBitmap} bitmap
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(bitmap) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = bitmap;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {ImageBitmap} bitmap
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(bitmap) {
- return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
- }
- }
-
- /**
- * Data media source:
- * a wrapper around ImageData
- */
- class SpeedyDataMediaSource extends SpeedyMediaSource {
- /**
- * @private Constructor
- * @param {symbol} token
- */
- constructor(token) {
- super(token);
-
- /** @type {ImageData} image data */
- this._data = null;
- }
-
- /**
- * The underlying wrapped object
- * @returns {ImageData}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The type of the underlying media source
- * @returns {MediaType}
- */
- get type() {
- return types/* MediaType */.zu.Data;
- }
-
- /**
- * Media width, in pixels
- * @returns {number}
- */
- get width() {
- return this._data ? this._data.width : 0;
- }
-
- /**
- * Media height, in pixels
- * @returns {number}
- */
- get height() {
- return this._data ? this._data.height : 0;
- }
-
- /**
- * Clone this media source
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- clone() {
- if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
- const imageDataCopy = new ImageData(new Uint8ClampedArray(this._data.data), this._data.width, this._data.height);
- return SpeedyDataMediaSource.load(imageDataCopy);
- }
-
- /**
- * Load the underlying media
- * @param {ImageData} imageData
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- _load(imageData) {
- if (this.isLoaded()) this.release();
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- this._data = imageData;
- resolve(this);
- });
- }
-
- /**
- * Load the underlying media
- * @param {ImageData} imageData
- * @returns {SpeedyPromise<SpeedyMediaSource>}
- */
- static load(imageData) {
- return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
- }
- }
- // EXTERNAL MODULE: ./src/utils/observable.js
- var observable = __nested_webpack_require_314174__(3211);
- ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-gpu.js
- * GPU-accelerated routines for Computer Vision
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * GPU-accelerated routines for Computer Vision
- */
- class SpeedyGPU extends observable/* Observable */.c {
- /**
- * Constructor
- */
- constructor() {
- super();
-
- /** @type {SpeedyGL} cached reference */
- this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
-
- /** @type {SpeedyProgramCenter} GPU-based programs */
- this._programs = new SpeedyProgramCenter(this);
-
- /** @type {SpeedyTexturePool} texture pool */
- this._texturePool = new SpeedyTexturePool(this);
-
- // recreate the state if necessary
- this._speedyGL.subscribe(this._reset, this);
- }
-
- /**
- * Access point to all GPU programs
- * @returns {SpeedyProgramCenter}
- */
- get programs() {
- return this._programs;
- }
-
- /**
- * The WebGL Rendering Context
- * Be careful not to cache this, as the WebGL Rendering Context may be lost!
- * @returns {WebGL2RenderingContext}
- */
- get gl() {
- return this._speedyGL.gl;
- }
-
- /**
- * Internal canvas
- * @returns {HTMLCanvasElement}
- */
- get canvas() {
- return this._speedyGL.canvas;
- }
-
- /**
- * Texture pool
- * @returns {SpeedyTexturePool}
- */
- get texturePool() {
- return this._texturePool;
- }
-
- /**
- * Renders a texture to the canvas
- * @param {SpeedyTexture} texture
- * @returns {HTMLCanvasElement} returned for convenience
- */
- renderToCanvas(texture) {
- const width = texture.width;
- const height = texture.height;
- const canvas = this.canvas;
-
- // do we need to resize the canvas?
- if (width > canvas.width || height > canvas.height) {
- utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
- canvas.width = width;
- canvas.height = height;
- }
-
- // render
- this.programs.utils.renderToCanvas.outputs(width, height, null);
- this.programs.utils.renderToCanvas(texture);
-
- // done!
- return canvas;
- }
-
- /**
- * Upload an image to the GPU
- * @param {SpeedyMediaSource} source
- * @param {SpeedyTexture} outputTexture
- * @returns {SpeedyTexture} outputTexture
- */
- upload(source, outputTexture) {
- return outputTexture.upload(source.data, source.width, source.height);
- }
-
- /**
- * Releases resources
- * @returns {null}
- */
- release() {
- utils/* Utils */.A.assert(!this.isReleased());
-
- // release internal components
- this._programs = this._programs.release();
- this._texturePool = this._texturePool.release();
-
- // unsubscribe
- this._speedyGL.unsubscribe(this._reset);
- return null;
- }
-
- /**
- * Has this SpeedyGPU been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._programs == null;
- }
-
- /**
- * Lose & restore the WebGL context (useful for testing purposes)
- * @return {SpeedyPromise<void>} resolves as soon as the context is restored
- */
- loseAndRestoreWebGLContext() {
- return this._speedyGL.loseAndRestoreContext().then(() => void 0);
- }
-
- /**
- * Reset the internal state
- * (called on context reset)
- */
- _reset() {
- if (this.isReleased()) return;
- this._programs = new SpeedyProgramCenter(this);
- this._texturePool = new SpeedyTexturePool(this);
- this._notify();
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-size.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-size.js
- * Size of a rectangle
- */
-
- /**
- * Size of a rectangle
- */
- class SpeedySize {
- /**
- * Constructor
- * @param {number} width non-negative number
- * @param {number} height non-negative number
- */
- constructor(width, height) {
- /** @type {number} width */
- this._width = Math.max(0, +width);
-
- /** @type {number} height */
- this._height = Math.max(0, +height);
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * Width
- * @returns {number}
- */
- get width() {
- return this._width;
- }
-
- /**
- * Width
- * @param {number} value
- */
- set width(value) {
- this._width = Math.max(0, +value);
- }
-
- /**
- * Height
- * @returns {number}
- */
- get height() {
- return this._height;
- }
-
- /**
- * Height
- * @param {number} value
- */
- set height(value) {
- this._height = Math.max(0, +value);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedySize(${this.width}, ${this.height})`;
- }
-
- /**
- * Is this size equal to anotherSize?
- * @param {SpeedySize} anotherSize
- * @returns {boolean}
- */
- equals(anotherSize) {
- return this.width === anotherSize.width && this.height === anotherSize.height;
- }
-
- /**
- * The area of the rectangle
- * @returns {number}
- */
- area() {
- return this.width * this.height;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-media.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-media.js
- * SpeedyMedia implementation
- */
-
-
-
-
-
-
-
-
-
-
- /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
-
- /**
- * @typedef {object} SpeedyMediaOptions
- * @property {ImageFormat} [format] default is RGBA
- */
-
- /** A helper used to keep the constructor of SpeedyMedia private */
- const speedy_media_PRIVATE_TOKEN = Symbol();
-
- /**
- * SpeedyMedia encapsulates a media element
- * (e.g., image, video, canvas)
- */
- class SpeedyMedia {
- /**
- * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
- * @param {symbol} token
- * @param {SpeedyMediaSource} source
- * @param {SpeedyMediaOptions} [options] options object
- */
- constructor(token, source, options = {}) {
- // private constructor
- if (token !== speedy_media_PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
-
- /** @type {SpeedyMediaSource} media source */
- this._source = source;
-
- /** @type {ImageFormat} format */
- this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
-
- /** @type {SpeedyMediaOptions} options */
- this._options = Object.freeze(Object.assign(Object.assign({}, options), {}, {
- format: this._format
- }));
-
- // validate
- if (!source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);else if (this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
- }
-
- /**
- * Load a media source
- * Will wait until the HTML media source is loaded
- * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
- * @param {SpeedyMediaOptions} [options] options object
- * @param {boolean} [log] show log message?
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static load(mediaSource, options = {}, log = true) {
- return SpeedyMediaSource.load(mediaSource).then(source => {
- utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
-
- // FIXME user could pass an invalid format in options if ImageFormat is made public
- const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
-
- // show log message
- if (log) utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
-
- // done!
- return media;
- });
- }
-
- /**
- * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
- * @returns {SpeedyMediaSourceNativeElement} the media element
- */
- get source() {
- return this._source ? this._source.data : null;
- }
-
- /**
- * The type of the media attached to this SpeedyMedia object
- * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
- */
- get type() {
- if (this.isReleased()) return 'unknown';
- switch (this._source.type) {
- case types/* MediaType */.zu.Image:
- return 'image';
- case types/* MediaType */.zu.Video:
- return 'video';
- case types/* MediaType */.zu.Canvas:
- return 'canvas';
- case types/* MediaType */.zu.OffscreenCanvas:
- return 'offscreen-canvas';
- case types/* MediaType */.zu.Bitmap:
- return 'bitmap';
- case types/* MediaType */.zu.Data:
- return 'data';
- default:
- // this shouldn't happen
- return 'unknown';
- }
- }
-
- /**
- * Gets the width of the media
- * @returns {number} media width
- */
- get width() {
- return this._source ? this._source.width : 0;
- }
-
- /**
- * Gets the height of the media
- * @returns {number} media height
- */
- get height() {
- return this._source ? this._source.height : 0;
- }
-
- /**
- * The size of this media, in pixels
- * @returns {SpeedySize}
- */
- get size() {
- return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
- }
-
- /**
- * Returns a read-only object featuring advanced options
- * related to this SpeedyMedia object
- * @returns {SpeedyMediaOptions}
- */
- get options() {
- return this._options;
- }
-
- /**
- * Releases resources associated with this media
- * @returns {null}
- */
- release() {
- if (!this.isReleased()) {
- utils/* Utils */.A.log('Releasing SpeedyMedia object...');
- this._source = this._source.release();
- }
- return null;
- }
-
- /**
- * Has this media been released?
- * @returns {boolean}
- */
- isReleased() {
- return this._source == null;
- }
-
- /**
- * Clones the SpeedyMedia object
- * @returns {SpeedyPromise<SpeedyMedia>} a clone object
- */
- clone() {
- // has the media been released?
- if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
-
- // clone the object
- const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
-
- // done!
- return speedy_promise/* SpeedyPromise */.i.resolve(clone);
- }
-
- /**
- * Converts the media to an ImageBitmap
- * @returns {SpeedyPromise<ImageBitmap>}
- */
- toBitmap() {
- if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');else if (!this._source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');else if (this._source.type == types/* MediaType */.zu.Bitmap) return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);else return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-platform.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-platform.js
- * Utilities to query information about the graphics driver
- */
-
-
-
-
- /**
- * Utilities to query information about the graphics driver. This information
- * may or may not be available, depending on the privacy settings of the web
- * browser. In addition, it may be more or less accurate in different browsers.
- */
- class SpeedyPlatform extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Renderer string of the graphics driver
- * @returns {string}
- */
- static get renderer() {
- return speedy_gl/* SpeedyGL */.c.instance.renderer;
- }
-
- /**
- * Vendor string of the graphics driver
- * @returns {string}
- */
- static get vendor() {
- return speedy_gl/* SpeedyGL */.c.instance.vendor;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-vector.js
- * Vectors
- */
-
- /**
- * 2D vector of floating-point numbers
- */
- class SpeedyVector2 {
- /**
- * Create a 2D vector
- * @param {number} x
- * @param {number} y
- */
- constructor(x, y) {
- /** @type {number} x coordinate */
- this._x = +x;
-
- /** @type {number} y coordinate */
- this._y = +y;
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * x-coordinate
- * @returns {number}
- */
- get x() {
- return this._x;
- }
-
- /**
- * x-coordinate
- * @param {number} value
- */
- set x(value) {
- this._x = +value;
- }
-
- /**
- * y-coordinate
- * @returns {number}
- */
- get y() {
- return this._y;
- }
-
- /**
- * y-coordinate
- * @param {number} value
- */
- set y(value) {
- this._y = +value;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
- }
-
- /**
- * Is this vector equal to v?
- * @param {SpeedyVector2} v
- * @returns {boolean}
- */
- equals(v) {
- return this.x === v.x && this.y === v.y;
- }
-
- /**
- * Dot product between this vector and another vector
- * @param {SpeedyVector2} v another vector
- * @returns {number}
- */
- dot(v) {
- return this.x * v.x + this.y * v.y;
- }
-
- /**
- * The distance between this vector and another vector
- * @param {SpeedyVector2} v another vector
- * @returns {number}
- */
- distanceTo(v) {
- const dx = this.x - v.x;
- const dy = this.y - v.y;
- return Math.sqrt(dx * dx + dy * dy);
- }
-
- /**
- * Euclidean norm
- * @returns {number}
- */
- length() {
- return Math.sqrt(this.x * this.x + this.y * this.y);
- }
-
- /**
- * Returns a normalized version of this vector
- * @returns {SpeedyVector2}
- */
- normalized() {
- const len = this.length();
- if (len > 0.0) return new SpeedyVector2(this.x / len, this.y / len);else return new SpeedyVector2(0.0, 0.0);
- }
-
- /**
- * Returns a copy of this vector translated by offset
- * @param {SpeedyVector2} offset
- * @returns {SpeedyVector2}
- */
- plus(offset) {
- return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
- }
-
- /**
- * Returns a copy of this vector translated by -offset
- * @param {SpeedyVector2} offset
- * @returns {SpeedyVector2}
- */
- minus(offset) {
- return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
- }
-
- /**
- * Returns a copy of this vector scaled by a scalar
- * @param {number} scalar
- * @returns {SpeedyVector2}
- */
- times(scalar) {
- return new SpeedyVector2(this.x * scalar, this.y * scalar);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-point.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-point.js
- * Points in space
- */
-
-
-
- /**
- * 2D point
- */
- class SpeedyPoint2 {
- /**
- * Create a 2D point
- * @param {number} x
- * @param {number} y
- */
- constructor(x, y) {
- /** @type {number} x coordinate */
- this._x = +x;
-
- /** @type {number} y coordinate */
- this._y = +y;
- }
-
- //
- // ===== METHODS =====
- //
-
- /**
- * x-coordinate
- * @returns {number}
- */
- get x() {
- return this._x;
- }
-
- /**
- * x-coordinate
- * @param {number} value
- */
- set x(value) {
- this._x = +value;
- }
-
- /**
- * y-coordinate
- * @returns {number}
- */
- get y() {
- return this._y;
- }
-
- /**
- * y-coordinate
- * @param {number} value
- */
- set y(value) {
- this._y = +value;
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
- }
-
- /**
- * Add a vector to this point
- * @param {SpeedyVector2} v
- * @returns {SpeedyPoint2}
- */
- plus(v) {
- return new SpeedyPoint2(this.x + v.x, this.y + v.y);
- }
-
- /**
- * Subtracts a point p from this point
- * @param {SpeedyPoint2} p
- * @returns {SpeedyVector2}
- */
- minus(p) {
- return new SpeedyVector2(this.x - p.x, this.y - p.y);
- }
-
- /**
- * Is this point equal to p?
- * @param {SpeedyPoint2} p
- * @returns {boolean}
- */
- equals(p) {
- return this.x === p.x && this.y === p.y;
- }
- }
- // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
- var speedy_matrix_expr = __nested_webpack_require_314174__(6306);
- // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
- var speedy_matrix_wasm = __nested_webpack_require_314174__(6465);
- // EXTERNAL MODULE: ./src/core/speedy-matrix.js
- var speedy_matrix = __nested_webpack_require_314174__(4188);
- ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-matrix-factory.js
- * A factory of matrices
- */
-
-
-
-
-
-
-
-
- /**
- * Matrix routines
- */
- class SpeedyMatrixFactory extends Function {
- /**
- * Constructor
- */
- constructor() {
- // This factory can be invoked as a function
- super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
- return this.bind(this);
- }
-
- /**
- * @private
- *
- * Create a new matrix filled with the specified size and entries
- * @param {number} rows
- * @param {number} [columns]
- * @param {number[]} [entries] in column-major format
- * @returns {SpeedyMatrix}
- */
- _create(rows, columns = rows, entries = []) {
- return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
- }
-
- /**
- * @private
- *
- * Evaluate an expression synchronously and store the result in a new matrix
- * @param {SpeedyMatrixExpr} expr matrix expression
- * @returns {SpeedyMatrix}
- */
- _from(expr) {
- return speedy_matrix.SpeedyMatrix.From(expr);
- }
-
- /**
- * Create a new matrix filled with zeros with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Zeros(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
- }
-
- /**
- * Create a new matrix filled with ones with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Ones(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
- }
-
- /**
- * Create an identity matrix with the specified size
- * @param {number} rows
- * @param {number} [columns]
- * @returns {SpeedyMatrix}
- */
- Eye(rows, columns = rows) {
- return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
- }
-
- /**
- * Returns a promise that resolves immediately if the WebAssembly routines
- * are ready to be used, or as soon as they do become ready
- * @returns {SpeedyPromise<void>}
- */
- ready() {
- return speedy_matrix.SpeedyMatrix.ready();
- }
-
- /**
- * QR decomposition
- * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
- * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
- * @param {SpeedyMatrix} mat is m x n, input
- * @param {object} [options]
- * @param {'reduced'|'full'} [options.mode]
- * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
- */
- qr(Q, R, mat, {
- mode = 'reduced'
- } = {}) {
- const A = mat,
- m = mat.rows,
- n = mat.columns;
-
- // validate shapes & mode
- if (mode == 'reduced') {
- if (Q.rows != m || Q.columns != n || R.rows != n || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
- } else if (mode == 'full') {
- if (Q.rows != m || Q.columns != m || R.rows != m || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
- } else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
- const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
- const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
-
- // run the WASM routine
- if (mode == 'reduced') wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);else wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
-
- // done!
- return [Q, R];
- });
- }
-
- /**
- * Solve a possibly overdetermined system of linear
- * equations Ax = b for x using ordinary least squares
- * @param {SpeedyMatrix} solution n x 1, output
- * @param {SpeedyMatrix} A m x n, m >= n, input
- * @param {SpeedyMatrix} b m x 1, output
- * @param {object} [options]
- * @param {'qr'} [options.method] method of resolution
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
- */
- ols(solution, A, b, {
- method = 'qr'
- } = {}) {
- const m = A.rows,
- n = A.columns;
- const x = solution;
-
- // validate shapes
- if (m < n || n == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
- const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
- const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
-
- // run the WASM routine
- switch (method) {
- case 'qr':
- wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
- }
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
-
- // done!
- return solution;
- });
- }
-
- /**
- * Solve a system of linear equations Ax = b for x
- * @param {SpeedyMatrix} solution m x 1, output
- * @param {SpeedyMatrix} A m x m, input
- * @param {SpeedyMatrix} b m x 1, output
- * @param {object} [options]
- * @param {'qr'} [options.method] method of resolution
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
- */
- solve(solution, A, b, {
- method = 'qr'
- } = {}) {
- const m = A.rows,
- n = A.columns;
- const x = solution;
-
- // validate shapes
- if (m != n) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // select method
- switch (method) {
- case 'qr':
- return this.ols(x, A, b, {
- method
- });
-
- /*case 'lu':
- break;*/
-
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
- }
- });
- }
-
- /**
- * Compute a perspective transformation using 4 correspondences of points
- * @param {SpeedyMatrix} homography 3x3 output - homography matrix
- * @param {SpeedyMatrix} src 2x4 input points - source coordinates
- * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- perspective(homography, src, dest) {
- // validate shapes
- if (src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
-
- // done!
- return homography;
- });
- }
-
- /**
- * Compute a perspective transformation using n >= 4 correspondences of points
- * @param {SpeedyMatrix} homography 3x3 output - homography matrix
- * @param {SpeedyMatrix} src 2 x n input points - source coordinates
- * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
- * @param {object} [options]
- * @param {'default'|'pransac'} [options.method] method of computation
- * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
- * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
- * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
- * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- findHomography(homography, src, dest, {
- method = 'default',
- mask = null,
- reprojectionError = 3,
- numberOfHypotheses = 512,
- bundleSize = 128
- } = {}) {
- // validate shapes
- if (src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
- const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- switch (method) {
- case 'pransac':
- utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
- wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
- break;
- case 'default':
- case 'dlt':
- // obsolete
- wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
- }
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
-
- // deallocate matrices
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
-
- // done!
- return homography;
- });
- }
-
- /**
- * Apply a perspective transformation to a set of 2D points
- * @param {SpeedyMatrix} dest 2 x n output matrix
- * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
- * @param {SpeedyMatrix} transform 3x3 homography matrix
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
- */
- applyPerspectiveTransform(dest, src, transform) {
- // validate shapes
- if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 3 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
-
- // run the WASM routine
- wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return dest;
- });
- }
-
- /**
- * Compute an affine transform using 3 correspondences of points
- * @param {SpeedyMatrix} transform 2x3 output - affine transform
- * @param {SpeedyMatrix} src 2x3 input points - source coordinates
- * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
- */
- affine(transform, src, dest) {
- // validate shapes
- if (src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return transform;
- });
- }
-
- /**
- * Compute an affine transformation using n >= 3 correspondences of points
- * @param {SpeedyMatrix} transform 2x3 output - affine transform
- * @param {SpeedyMatrix} src 2 x n input points - source coordinates
- * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
- * @param {object} [options]
- * @param {'default'|'pransac'} [options.method] method of computation
- * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
- * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
- * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
- * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
- */
- findAffineTransform(transform, src, dest, {
- method = 'default',
- mask = null,
- reprojectionError = 3,
- numberOfHypotheses = 512,
- bundleSize = 128
- } = {}) {
- // validate shapes
- if (src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
- const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
-
- // run the WASM routine
- switch (method) {
- case 'pransac':
- utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
- wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
- break;
- case 'default':
- wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
- break;
- default:
- throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
- }
-
- // copy output matrices from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
-
- // deallocate matrices
- if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return transform;
- });
- }
-
- /**
- * Apply an affine transformation to a set of 2D points
- * @param {SpeedyMatrix} dest 2 x n output matrix
- * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
- * @param {SpeedyMatrix} transform 2x3 affine transform
- * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
- */
- applyAffineTransform(dest, src, transform) {
- // validate shapes
- if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
- return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
- wasm,
- memory
- }) => {
- // allocate matrices
- const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
- const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
- const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
-
- // copy input matrices to WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
-
- // run the WASM routine
- wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
-
- // copy output matrix from WASM memory
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
-
- // deallocate matrices
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
- speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
-
- // done!
- return dest;
- });
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-message.js
- * A message that is shared between nodes of a pipeline
- */
-
-
-
-
-
-
-
-
- /**
- * Types of messages
- * @enum {Symbol}
- */
- const SpeedyPipelineMessageType = Object.freeze({
- Nothing: Symbol('Nothing'),
- Image: Symbol('Image'),
- Keypoints: Symbol('Keypoints'),
- Vector2: Symbol('Vector2'),
- LSHTables: Symbol('LSHTables'),
- KeypointMatches: Symbol('KeypointMatches')
- });
-
- /**
- * Diagnostic data
- * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
- */
-
- /**
- * A message that is shared between nodes of a pipeline
- * @abstract
- */
- class SpeedyPipelineMessage {
- /**
- * Constructor
- * @param {SpeedyPipelineMessageType} type message type
- */
- constructor(type) {
- /** @type {SpeedyPipelineMessageType} message type */
- this._type = type;
- }
-
- /**
- * Message type
- * @returns {SpeedyPipelineMessageType}
- */
- get type() {
- return this._type;
- }
-
- /**
- * Checks if the type of this message is equal to parameter type
- * @param {SpeedyPipelineMessageType} type
- * @returns {boolean}
- */
- hasType(type) {
- return this._type === type;
- }
-
- /**
- * Is this an empty message?
- * @returns {boolean}
- */
- isEmpty() {
- return this.hasType(SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this.type);
- return `message of type ${type}`;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Set parameters
- * @abstract
- * @param {...any} args
- * @returns {SpeedyPipelineMessage} this message
- */
- set(...args) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Create a message of the specified type
- * @param {SpeedyPipelineMessageType} type
- * @returns {SpeedyPipelineMessage}
- */
- static create(type) {
- return createMessage(type);
- }
- }
-
- /**
- * An empty message carrying nothing
- */
- class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Set parameters
- * @returns {SpeedyPipelineMessage} this message
- */
- set() {
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name
- };
- }
- }
-
- /**
- * A message transporting an image
- */
- class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Image);
-
- /** @type {SpeedyDrawableTexture} the image we carry */
- this._image = null;
-
- /** @type {ImageFormat} image format */
- this._format = types/* ImageFormat */.f5.RGBA;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} image the image we carry
- * @param {ImageFormat} [format] image format
- * @returns {SpeedyPipelineMessage} this message
- */
- set(image, format = types/* ImageFormat */.f5.RGBA) {
- // set parameters
- this._image = image;
- this._format = format;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- const formatName = Object.keys(types/* ImageFormat */.f5).find(format => types/* ImageFormat */.f5[format] === this.format);
- return {
- type: this.constructor.name,
- format: String(formatName),
- imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
- image: this.image ? '<image data>' /* possibly MBs of data */ : '',
- hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
- };
- }
-
- /**
- * The image we carry
- * @returns {SpeedyDrawableTexture}
- */
- get image() {
- return this._image;
- }
-
- /**
- * Image format
- * @returns {ImageFormat}
- */
- get format() {
- return this._format;
- }
- }
-
- /**
- * A message transporting keypoints
- */
- class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Keypoints);
-
- /** @type {SpeedyDrawableTexture} encoded keypoints */
- this._encodedKeypoints = null;
-
- /** @type {number} descriptor size in bytes */
- this._descriptorSize = 0;
-
- /** @type {number} extra size in bytes */
- this._extraSize = 0;
-
- /** @type {number} encoder length */
- this._encoderLength = 1;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderLength positive integer
- * @returns {SpeedyPipelineMessage} this message
- */
- set(encodedKeypoints, descriptorSize, extraSize, encoderLength) {
- // set parameters
- this._encodedKeypoints = encodedKeypoints;
- this._descriptorSize = descriptorSize | 0;
- this._extraSize = extraSize | 0;
- this._encoderLength = encoderLength | 0;
-
- // validate
- utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
- utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
- utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- descriptorSize: this.descriptorSize,
- extraSize: this.extraSize,
- encoderLength: this.encoderLength,
- encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
- encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * Encoded keypoints
- * @returns {SpeedyDrawableTexture}
- */
- get encodedKeypoints() {
- return this._encodedKeypoints;
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- return this._descriptorSize;
- }
-
- /**
- * Extra size, in bytes
- * @returns {number}
- */
- get extraSize() {
- return this._extraSize;
- }
-
- /**
- * Encoder length
- * @returns {number}
- */
- get encoderLength() {
- return this._encoderLength;
- }
- }
-
- /*
- * A message transporting a set of 2D vectors
- */
- class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.Vector2);
-
- /** @type {SpeedyDrawableTexture} the set of vectors */
- this._vectors = null;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} vectors the set of vectors
- * @returns {SpeedyPipelineMessage} this message
- */
- set(vectors) {
- // set parameters
- this._vectors = vectors;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
- vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * The set of vectors
- * @returns {SpeedyDrawableTexture}
- */
- get vectors() {
- return this._vectors;
- }
- }
-
- /**
- * A message transporting LSH tables
- */
- class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.LSHTables);
-
- /** @type {SpeedyLSH} LSH data structure */
- this._lsh = null;
- }
-
- /**
- * Set parameters
- * @param {SpeedyLSH} lsh
- * @returns {SpeedyPipelineMessage} this message
- */
- set(lsh) {
- // set parameters
- this._lsh = lsh;
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- lsh: '<LSH tables>'
- };
- }
-
- /**
- * LSH data structure
- * @returns {SpeedyLSH}
- */
- get lsh() {
- return this._lsh;
- }
- }
-
- /*
- * A message transporting a set of keypoint matches
- */
- class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage {
- /**
- * Constructor
- */
- constructor() {
- super(SpeedyPipelineMessageType.KeypointMatches);
-
- /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
- this._encodedMatches = null;
-
- /** @type {number} number of matches per keypoint */
- this._matchesPerKeypoint = 1;
- }
-
- /**
- * Set parameters
- * @param {SpeedyDrawableTexture} encodedMatches
- * @param {number} matchesPerKeypoint
- * @returns {SpeedyPipelineMessage} this message
- */
- set(encodedMatches, matchesPerKeypoint) {
- // set parameters
- this._encodedMatches = encodedMatches;
- this._matchesPerKeypoint = matchesPerKeypoint | 0;
-
- // validate
- utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
-
- // done!
- return this;
- }
-
- /**
- * Inspect this message for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelineMessageDiagnosticData}
- */
- inspect(gpu) {
- return {
- type: this.constructor.name,
- matchesPerKeypoint: this.matchesPerKeypoint,
- encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
- encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
- };
- }
-
- /**
- * The matches
- * @returns {SpeedyDrawableTexture}
- */
- get encodedMatches() {
- return this._encodedMatches;
- }
-
- /**
- * Number of matches per keypoint
- * @returns {number}
- */
- get matchesPerKeypoint() {
- return this._matchesPerKeypoint;
- }
- }
-
- //
- // Utilities
- //
-
- /** Map message type to message class */
- const MESSAGE_CLASS = Object.freeze({
- [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
- [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
- [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
- [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
- [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
- [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches
- });
-
- /**
- * Create a message of the specified type
- * @param {SpeedyPipelineMessageType} type
- * @returns {SpeedyPipelineMessage}
- */
- function createMessage(type) {
- //return Reflect.construct(MESSAGE_CLASS[type], []);
- return new MESSAGE_CLASS[
- // error TS2538: Type 'Symbol' cannot be used as an index type.
- // heck, what the hack...
- /** @type {any} */
- type]();
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-portspec.js
- * Specification (requirements) of a port of a node of a pipeline
- */
-
-
-
-
- /**
- * A message constraint is a message validation predicate
- * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
- */
-
- /**
- * A validation predicate that validates all messages
- * @type {SpeedyPipelineMessageConstraint}
- */
- const always = message => true;
-
- /**
- * Specification (requirements) of a port of a node of a pipeline
- */
- class SpeedyPipelinePortSpec {
- /**
- * Constructor
- * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
- * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
- */
- constructor(expectedMessageType, messageConstraint = always) {
- /** @type {SpeedyPipelineMessageType} expected message type */
- this._expectedMessageType = expectedMessageType;
-
- /** @type {SpeedyPipelineMessageConstraint} message validation function */
- this._isValidMessage = typeof messageConstraint === 'function' ? messageConstraint : always;
-
- // expect a valid type
- utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
- }
-
- /**
- * Checks if two specs have the same expected type
- * @param {SpeedyPipelinePortSpec} spec
- * @returns {boolean}
- */
- isCompatibleWith(spec) {
- return this._expectedMessageType == spec._expectedMessageType;
- }
-
- /**
- * Is the given message accepted by a port that abides by this specification?
- * @param {SpeedyPipelineMessage} message
- * @returns {boolean}
- */
- accepts(message) {
- return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
- }
-
- /**
- * Convert to string
- * @returns {string}
- */
- toString() {
- const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this._expectedMessageType);
- return `Port expects ${type} satisfying ${this._isValidMessage}`;
- }
-
- /**
- * Expected message type
- * @returns {SpeedyPipelineMessageType}
- */
- get expectedMessageType() {
- return this._expectedMessageType;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-port.js
- * Port of a node of a pipeline
- */
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_INPUT_PORT_NAME = 'in';
- const DEFAULT_OUTPUT_PORT_NAME = 'out';
- const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
- const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
-
- /**
- * Diagnostic data
- * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
- */
-
- /**
- * Port of a node of a pipeline
- * @abstract
- */
- class SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- /** @type {string} the name of this port */
- this._name = String(name);
-
- /** @type {SpeedyPipelinePortSpec} the specification of this port */
- this._spec = spec;
-
- /** @type {SpeedyPipelineNode} the node to which this port belongs */
- this._node = node;
-
- /** @type {SpeedyPipelineMessage} the message located in this port */
- this._message = EMPTY_MESSAGE;
-
- // check if we've got an acceptable port name
- utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
- }
-
- /**
- * The name of this port
- * @returns {string}
- */
- get name() {
- return this._name;
- }
-
- /**
- * The node to which this port belongs
- * @returns {SpeedyPipelineNode}
- */
- get node() {
- return this._node;
- }
-
- /**
- * Connect this port to another
- * @abstract
- * @param {SpeedyPipelinePort} port
- */
- connectTo(port) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this an input port?
- * @abstract
- * @returns {boolean}
- */
- isInputPort() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this an output port?
- * @returns {boolean}
- */
- isOutputPort() {
- return !this.isInputPort();
- }
-
- /**
- * Clear the message stored in this port
- */
- clearMessage() {
- this._message = EMPTY_MESSAGE;
- }
-
- /**
- * Is there a valid message located in this port?
- * @returns {boolean}
- */
- hasMessage() {
- return !this._message.isEmpty();
- }
-
- /**
- * Read the message that is in this port
- * @returns {SpeedyPipelineMessage}
- */
- read() {
- if (this._message.isEmpty()) throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
- return this._message;
- }
-
- /**
- * Write a message to this port
- * @param {SpeedyPipelineMessage} message
- */
- write(message) {
- throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
- }
-
- /**
- * Inspect this port for debugging purposes
- * @param {SpeedyGPU} gpu
- * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
- */
- inspect(gpu) {
- return this._message.inspect(gpu);
- }
-
- /**
- * Default port name
- * @abstract
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
- }
-
- /**
- * Output port
- */
- class SpeedyPipelineOutputPort extends SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- super(name, spec, node);
-
- /** @type {SpeedyPipelineMessage} cached message */
- this._cachedMessage = null;
- }
-
- /**
- * Connect this port to another
- * @param {SpeedyPipelineInputPort} port
- */
- connectTo(port) {
- if (!port.isInputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
- port.connectTo(this);
- }
-
- /**
- * Is this an input port?
- * @returns {boolean}
- */
- isInputPort() {
- return false;
- }
-
- /**
- * Write a message to this port
- * @param {SpeedyPipelineMessage} message
- */
- write(message) {
- if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
- this._message = message;
- }
-
- /**
- * Write a message to this port using a cached message object
- * @param {...any} args to be passed to SpeedyPipelineMessage.set()
- */
- swrite(...args) {
- if (this._cachedMessage == null) this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
- this.write(this._cachedMessage.set(...args));
- }
-
- /**
- * Default port name
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- return DEFAULT_OUTPUT_PORT_NAME;
- }
- }
-
- /**
- * Input port
- */
- class SpeedyPipelineInputPort extends SpeedyPipelinePort {
- /**
- * Constructor
- * @param {string} name the name of this port
- * @param {SpeedyPipelinePortSpec} spec port specification
- * @param {SpeedyPipelineNode} node the node to which this port belongs
- */
- constructor(name, spec, node) {
- super(name, spec, node);
-
- /** @type {SpeedyPipelineOutputPort|null} incoming link */
- this._incomingLink = null;
- }
-
- /**
- * Incoming link
- * @returns {SpeedyPipelineOutputPort|null}
- */
- get incomingLink() {
- return this._incomingLink;
- }
-
- /**
- * Connect this port to another
- * @param {SpeedyPipelineOutputPort} port
- */
- connectTo(port) {
- if (!port.isOutputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);else if (!this._spec.isCompatibleWith(port._spec)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
- this._incomingLink = port;
- }
-
- /**
- * Unlink this port
- */
- disconnect() {
- this._incomingLink = null;
- }
-
- /**
- * Is this an input port?
- * @returns {boolean}
- */
- isInputPort() {
- return true;
- }
-
- /**
- * Receive a message using the incoming link
- * @param {string} [nodeName]
- * @returns {SpeedyPipelineMessage}
- */
- pullMessage(nodeName = '') {
- const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
- if (this._incomingLink == null) throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
- const message = this._incomingLink.read();
- if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
- return this._message = message;
- }
-
- /**
- * Default port name
- * @returns {string}
- */
- static get DEFAULT_NAME() {
- return DEFAULT_INPUT_PORT_NAME;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-portbuilder.js
- * Builder of a port of a node of a pipeline
- */
-
-
-
-
-
-
-
- /**
- * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
- */
-
- /**
- * Builder of a port of a node of a pipeline
- */
- class SpeedyPipelinePortBuilder {
- /**
- * Constructor
- * @param {typeof SpeedyPipelinePort} portClass input or output?
- * @param {string} portName
- */
- constructor(portClass, portName) {
- /** @type {typeof SpeedyPipelinePort} input or output? */
- this._class = portClass;
-
- /** @type {string} port name */
- this._name = String(portName);
-
- /** @type {SpeedyPipelineMessageType} accepted message type */
- this._type = SpeedyPipelineMessageType.Nothing;
-
- /** @type {SpeedyPipelineMessageConstraint} message validation function */
- this._messageConstraint = undefined;
- }
-
- /**
- * Declare that the new port expects a certain type of message
- * @param {SpeedyPipelineMessageType} type expected type
- * @returns {SpeedyPipelinePortBuilder} this builder
- */
- expects(type) {
- utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
- utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
- this._type = type;
- return this;
- }
-
- /**
- * Declare that the new port expects messages satisfying a constraint
- * @param {SpeedyPipelineMessageConstraint} constraint
- * @returns {SpeedyPipelinePortBuilder} this builder
- */
- satisfying(constraint) {
- utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
- utils/* Utils */.A.assert(this._messageConstraint === undefined);
- utils/* Utils */.A.assert(typeof constraint === 'function');
- this._messageConstraint = constraint;
- return this;
- }
-
- /**
- * Build a port
- * @param {SpeedyPipelineNode} node the node to which the new port will belong
- * @returns {SpeedyPipelinePort}
- */
- build(node) {
- const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
- return Reflect.construct(this._class, [this._name, spec, node]);
- }
- }
-
- /**
- * Creates a builder for an input port
- * @param {string} [portName]
- * @returns {SpeedyPipelinePortBuilder}
- */
- function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
- return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
- }
-
- /**
- * Creates a builder for an output port
- * @param {string} [portName]
- * @returns {SpeedyPipelinePortBuilder}
- */
- function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
- return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline-node.js
- * Node of a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
- /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
-
- /** Generate a random name for a node */
- const generateRandomName = () => Math.random().toString(16).substr(2);
-
- /** Create an empty input port dictionary */
- const createInputPortDictionary = () => ( /** @type {InputPortDictionary} */Object.create(null));
-
- /** Create an empty output port dictionary */
- const createOutputPortDictionary = () => ( /** @type {OutputPortDictionary} */Object.create(null));
-
- /**
- * Map an array of input ports to an InputPortDictionary whose keys are their names
- * @param {SpeedyPipelineInputPort[]} ports
- * @returns {InputPortDictionary}
- */
- function InputPortDictionary(ports) {
- return ports.reduce((dict, port) => (dict[port.name] = port, dict), createInputPortDictionary());
- }
-
- /**
- * Map an array of output ports to an OutputPortDictionary whose keys are their names
- * @param {SpeedyPipelineOutputPort[]} ports
- * @returns {OutputPortDictionary}
- */
- function OutputPortDictionary(ports) {
- return ports.reduce((dict, port) => (dict[port.name] = port, dict), createOutputPortDictionary());
- }
-
- /** A flag used for debugging purposes */
- let _texView = false;
-
- /**
- * Node of a pipeline
- * @abstract
- */
- class SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = generateRandomName(), texCount = 0, portBuilders = []) {
- /** @type {string} the name of this node */
- this._name = String(name);
-
- /** @type {SpeedyDrawableTexture[]} work texture(s) */
- this._tex = new Array(texCount).fill(null);
-
- // build the ports
- const ports = portBuilders.map(builder => builder.build(this));
- const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ports.filter(port => port.isInputPort());
- const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ports.filter(port => port.isOutputPort());
-
- /** @type {InputPortDictionary} input ports */
- this._inputPorts = InputPortDictionary(inputPorts);
-
- /** @type {OutputPortDictionary} output ports */
- this._outputPorts = OutputPortDictionary(outputPorts);
-
- // validate
- if (this._name.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);else if (portBuilders.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
- }
-
- /**
- * The name of this node
- * @returns {string}
- */
- get name() {
- return this._name;
- }
-
- /**
- * Name and type of this node
- * @returns {string}
- */
- get fullName() {
- return `${this.constructor.name}[${this.name}]`;
- }
-
- /**
- * Find input port by name
- * @param {string} [portName]
- * @returns {SpeedyPipelineInputPort}
- */
- input(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
- if (portName in this._inputPorts) return this._inputPorts[portName];
- throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
- }
-
- /**
- * Find output port by name
- * @param {string} [portName]
- * @returns {SpeedyPipelineOutputPort}
- */
- output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
- if (portName in this._outputPorts) return this._outputPorts[portName];
- throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
- }
-
- /**
- * Get data from the input ports and execute
- * the task that this node is supposed to!
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- execute(gpu) {
- let portName;
-
- // clear output ports
- for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
-
- // let the input ports receive what is due
- for (portName in this._inputPorts) this._inputPorts[portName].pullMessage(this.fullName);
-
- // run the task
- const runTask = this._run(gpu);
- if (typeof runTask === 'undefined') return void this._finishExecution(gpu);else return runTask.then(() => this._finishExecution(gpu));
- }
-
- /**
- * Finish the execution of this node;
- * to be called after execute()
- * @param {SpeedyGPU} gpu
- */
- _finishExecution(gpu) {
- // ensure that no output ports are empty
- for (const portName in this._outputPorts) {
- utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
- }
-
- // diagnosticize the node / pipeline
- if (settings/* Settings */.w.logging === 'diagnostic') {
- utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
-
- // Inspecting the data has performance implications.
- // It is for diagnostic purposes only, not meant to be done in production!
-
- for (const portName in this._inputPorts) utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
- for (const portName in this._outputPorts) utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
- }
- }
-
- /**
- * Run the specific task of this node
- * @abstract
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- gpu.subscribe(this._allocateWorkTextures, this, gpu);
- this._allocateWorkTextures(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._deallocateWorkTextures(gpu);
- gpu.unsubscribe(this._allocateWorkTextures, this);
- }
-
- /**
- * Clear all ports
- */
- clearPorts() {
- let portName;
- for (portName in this._inputPorts) this._inputPorts[portName].clearMessage();
- for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
- }
-
- /**
- * Find all nodes that feed input to this node
- * @returns {SpeedyPipelineNode[]}
- */
- inputNodes() {
- const nodes = [];
- for (const portName in this._inputPorts) {
- const port = this._inputPorts[portName];
- if (port.incomingLink != null) nodes.push(port.incomingLink.node);
- }
- return nodes;
- }
-
- /**
- * Is this a source of the pipeline?
- * @returns {boolean}
- */
- isSource() {
- return false;
- }
-
- /**
- * Is this a sink of the pipeline?
- * @returns {boolean}
- */
- isSink() {
- return false;
-
- // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
- //return Object.keys(this._outputPorts).length == 0;
- }
-
- /**
- * Allocate work texture(s)
- * @param {SpeedyGPU} gpu
- */
- _allocateWorkTextures(gpu) {
- for (let j = 0; j < this._tex.length; j++) this._tex[j] = gpu.texturePool.allocate();
- }
-
- /**
- * Deallocate work texture(s)
- * @param {SpeedyGPU} gpu
- */
- _deallocateWorkTextures(gpu) {
- for (let j = this._tex.length - 1; j >= 0; j--) this._tex[j] = gpu.texturePool.free(this._tex[j]);
- }
-
- /**
- * Visually inspect a texture for debugging purposes
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} texture
- */
- _visualize(gpu, texture) {
- const canvas = gpu.renderToCanvas(texture);
- if (!_texView) {
- document.body.appendChild(canvas);
- _texView = true;
- }
- }
- }
-
- /**
- * Source node (a node with no input ports)
- * @abstract
- */
- class SpeedyPipelineSourceNode extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
- utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
- }
-
- /**
- * Is this a source of the pipeline?
- * @returns {boolean}
- */
- isSource() {
- return true;
- }
- }
-
- /**
- * Sink node (a node with no output ports)
- * @abstract
- */
- class SpeedyPipelineSinkNode extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] the name of this node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
- utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
- }
-
- /**
- * Export data from this node to the user
- * @abstract
- * @returns {SpeedyPromise<any>}
- */
- export() {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Is this a sink of the pipeline?
- * @returns {boolean}
- */
- isSink() {
- return true;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-match.js
- * A match between two keypoint descriptors
- */
-
-
-
- // Constants
- const MATCH_NOT_FOUND = -1;
-
- /**
- * A match between two keypoint descriptors
- */
- class SpeedyKeypointMatch {
- /**
- * Constructor
- * @param {number} index index of the stored keypoint, a non-negative integer
- * @param {number} distance a measure of the quality of the match, a non-negative number
- */
- constructor(index, distance) {
- const isValid = distance < globals.MATCH_MAX_DISTANCE;
-
- /** @type {number} index of the stored keypoint */
- this._index = isValid ? index | 0 : MATCH_NOT_FOUND;
-
- /** @type {number} a measure of the quality of the match */
- this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
-
- // done!
- return Object.freeze(this);
- }
-
- /**
- * The index of the stored keypoint
- * @returns {number}
- */
- get index() {
- return this._index;
- }
-
- /**
- * A measure of the quality of the match (lower values indicate better matches)
- * @returns {number}
- */
- get distance() {
- return this._distance;
- }
-
- /**
- * A string representation of the keypoint match
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypointMatch(${this.index},${this.distance})`;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-keypoint.js
- * Keypoint class
- */
-
-
-
-
-
-
- /**
- * Represents a keypoint
- */
- class SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null) {
- /** @type {SpeedyPoint2} keypoint position */
- this._position = new SpeedyPoint2(+x, +y);
-
- /** @type {number} level of detail */
- this._lod = +lod;
-
- /** @type {number} rotation in radians */
- this._rotation = +rotation;
-
- /** @type {number} a cornerness measure */
- this._score = +score;
-
- /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
- this._descriptor = descriptor;
- }
-
- /**
- * Converts this keypoint to a descriptive string
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypoint(${this.x},${this.y})`;
- }
-
- /**
- * The position of this keypoint
- * @returns {SpeedyPoint2}
- */
- get position() {
- return this._position;
- }
-
- /**
- * The x-position of this keypoint
- * @returns {number}
- */
- get x() {
- return this._position.x;
- }
-
- /**
- * The x-position of this keypoint
- * @param {number} value
- */
- set x(value) {
- this._position.x = +value;
- }
-
- /**
- * The y-position of this keypoint
- * @returns {number}
- */
- get y() {
- return this._position.y;
- }
-
- /**
- * The y-position of this keypoint
- * @param {number} value
- */
- set y(value) {
- this._position.y = +value;
- }
-
- /**
- * The pyramid level-of-detail from which this keypoint was extracted
- * @returns {number}
- */
- get lod() {
- return this._lod;
- }
-
- /**
- * Scale: 2^lod
- * @returns {number}
- */
- get scale() {
- return Math.pow(2, this._lod);
- }
-
- /**
- * The orientation of the keypoint, in radians
- * @returns {number} Angle in radians
- */
- get rotation() {
- return this._rotation;
- }
-
- /**
- * Score: a cornerness measure
- * @returns {number} Score
- */
- get score() {
- return this._score;
- }
-
- /**
- * Keypoint descriptor
- * @return {SpeedyKeypointDescriptor|null}
- */
- get descriptor() {
- return this._descriptor;
- }
- }
-
- /**
- * Represents a tracked keypoint
- */
- class SpeedyTrackedKeypoint extends SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- * @param {SpeedyVector2} [flow] flow vector
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0, 0)) {
- super(x, y, lod, rotation, score, descriptor);
-
- /** @type {SpeedyVector2} flow vector */
- this._flow = flow;
- }
-
- /**
- * Flow vector
- * @returns {SpeedyVector2}
- */
- get flow() {
- return this._flow;
- }
- }
-
- /**
- * Represents a matched keypoint
- */
- class SpeedyMatchedKeypoint extends SpeedyKeypoint {
- /**
- * Constructor
- * @param {number} x X position
- * @param {number} y Y position
- * @param {number} [lod] Level-of-detail
- * @param {number} [rotation] Rotation in radians
- * @param {number} [score] Cornerness measure
- * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
- * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
- */
- constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = []) {
- super(x, y, lod, rotation, score, descriptor);
-
- /** @type {SpeedyKeypointMatch[]} keypoint matches */
- this._matches = matches;
- }
-
- /**
- * Keypoint matches
- * @returns {SpeedyKeypointMatch[]}
- */
- get matches() {
- return this._matches;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pipeline.js
- * A pipeline is a network of nodes in which data flows to a sink
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A dictionary indexed by the names of the sink nodes
- * @typedef {Object<string,any>} SpeedyPipelineOutput
- */
-
- /** @type {SpeedyGPU} shared GPU programs & textures */
- let gpu = null;
-
- /** @type {number} gpu reference count */
- let referenceCount = 0;
-
- /**
- * A pipeline is a network of nodes in which data flows to a sink
- */
- class SpeedyPipeline {
- /**
- * Constructor
- */
- constructor() {
- /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
- this._nodes = [];
-
- /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
- this._sequence = [];
-
- /** @type {boolean} are we running the pipeline at this moment? */
- this._busy = false;
- }
-
- /**
- * Find a node by its name
- * @template T extends SpeedyPipelineNode
- * @param {string} name
- * @returns {T|null}
- */
- node(name) {
- for (let i = 0, n = this._nodes.length; i < n; i++) {
- if (this._nodes[i].name === name) return this._nodes[i];
- }
- return null;
- }
-
- /**
- * Initialize the pipeline
- * @param {...SpeedyPipelineNode} nodes
- * @returns {SpeedyPipeline} this pipeline
- */
- init(...nodes) {
- // validate
- if (this._nodes.length > 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);else if (nodes.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
-
- // create a GPU instance and increase the reference count
- if (0 == referenceCount++) {
- utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
- gpu = new SpeedyGPU();
- }
-
- // add nodes to the network
- for (let i = 0; i < nodes.length; i++) {
- const node = nodes[i];
- if (!this._nodes.includes(node)) this._nodes.push(node);
- }
-
- // generate the sequence of nodes
- this._sequence = SpeedyPipeline._tsort(this._nodes);
- SpeedyPipeline._validateSequence(this._sequence);
-
- // initialize nodes
- for (let i = 0; i < this._sequence.length; i++) this._sequence[i].init(gpu);
-
- // done!
- return this;
- }
-
- /**
- * Release the resources associated with this pipeline
- * @returns {null}
- */
- release() {
- if (this._nodes.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
-
- // release nodes
- for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].release(gpu);
- this._sequence.length = 0;
- this._nodes.length = 0;
-
- // decrease reference count and release GPU if necessary
- if (0 == --referenceCount) gpu = gpu.release();
-
- // done!
- return null;
- }
-
- /**
- * Run the pipeline
- * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
- */
- run() {
- utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
-
- // is the pipeline busy?
- if (this._busy) {
- // if so, we need to wait 'til it finishes
- return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
- setTimeout(() => this.run().then(resolve, reject), 0);
- });
- } else {
- // the pipeline is now busy and won't accept concurrent tasks
- // (we allocate textures using a single pool)
- this._busy = true;
- }
-
- // find the sinks
- const sinks = /** @type {SpeedyPipelineSinkNode[]} */this._sequence.filter(node => node.isSink());
-
- // create output template
- const template = SpeedyPipeline._createOutputTemplate(sinks);
-
- // diagnostic log
- if (settings/* Settings */.w.logging === 'diagnostic') utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
-
- // run the pipeline
- return SpeedyPipeline._runSequence(this._sequence).then(() =>
- // export results
- speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
- // aggregate results by the names of the sinks
- results.reduce((obj, val, idx) => (obj[sinks[idx].name] = val, obj), template))).finally(() => {
- // clear all ports
- for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].clearPorts();
-
- // the pipeline is no longer busy
- this._busy = false;
-
- // diagnostic log
- if (settings/* Settings */.w.logging === 'diagnostic') {
- utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
- Object.keys(template).forEach(entry => {
- utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
- });
- }
- }).turbocharge();
- }
-
- /**
- * @internal
- *
- * GPU instance
- * @returns {SpeedyGPU}
- */
- get _gpu() {
- return gpu;
- }
-
- /**
- * Execute the tasks of a sequence of nodes
- * @param {SpeedyPipelineNode[]} sequence sequence of nodes
- * @param {number} [i] in [0,n)
- * @param {number} [n] number of nodes
- * @returns {SpeedyPromise<void>}
- */
- static _runSequence(sequence, i = 0, n = sequence.length) {
- for (; i < n; i++) {
- const runTask = sequence[i].execute(gpu);
-
- // this call greatly improves performance when downloading pixel data using PBOs
- gpu.gl.flush();
- if (typeof runTask !== 'undefined') return runTask.then(() => SpeedyPipeline._runSequence(sequence, i + 1, n));
- }
- return speedy_promise/* SpeedyPromise */.i.resolve();
- }
-
- /**
- * Topological sorting
- * @param {SpeedyPipelineNode[]} nodes
- * @returns {SpeedyPipelineNode[]}
- */
- static _tsort(nodes) {
- /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
-
- const outlinks = SpeedyPipeline._outlinks(nodes);
- const stack = nodes.map(node => ( /** @type {StackNode} */[node, false]));
- const trash = new Set();
- const sorted = new Array(nodes.length);
- let j = sorted.length;
- while (stack.length > 0) {
- const [node, done] = stack.pop();
- if (!done) {
- if (!trash.has(node)) {
- const outnodes = outlinks.get(node);
- trash.add(node);
- stack.push([node, true]);
- stack.push(...outnodes.map(node => ( /** @type {StackNode} */[node, false])));
- if (outnodes.some(node => trash.has(node) && !sorted.includes(node))) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
- }
- } else sorted[--j] = node;
- }
- return sorted;
- }
-
- /**
- * Figure out the outgoing links of all nodes
- * @param {SpeedyPipelineNode[]} nodes
- * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
- */
- static _outlinks(nodes) {
- const outlinks = new Map();
- for (let k = 0; k < nodes.length; k++) outlinks.set(nodes[k], []);
- for (let i = 0; i < nodes.length; i++) {
- const to = nodes[i];
- const inputs = to.inputNodes();
- for (let j = 0; j < inputs.length; j++) {
- const from = inputs[j];
- const links = outlinks.get(from);
- if (!links) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
- if (!links.includes(to)) links.push(to);
- }
- }
- return outlinks;
- }
-
- /**
- * Generate the output template by aggregating the names of the sinks
- * @param {SpeedyPipelineNode[]} [sinks]
- * @returns {SpeedyPipelineOutput}
- */
- static _createOutputTemplate(sinks = []) {
- const template = Object.create(null);
- for (let i = sinks.length - 1; i >= 0; i--) template[sinks[i].name] = null;
- return template;
- }
-
- /**
- * Validate a sequence of nodes
- * @param {SpeedyPipelineNode[]} sequence
- */
- static _validateSequence(sequence) {
- if (sequence.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);else if (!sequence[0].isSource()) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);else if (!sequence.find(node => node.isSink())) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-input.js
- * Gets an image into a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
-
- /**
- * Gets an image into a pipeline
- */
- class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, UPLOAD_BUFFER_SIZE, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMedia|null} source media */
- this._media = null;
-
- /** @type {number} texture index */
- this._textureIndex = 0;
- }
-
- /**
- * Source media
- * @returns {SpeedyMedia|null}
- */
- get media() {
- return this._media;
- }
-
- /**
- * Source media
- * @param {SpeedyMedia|null} media
- */
- set media(media) {
- if (media !== null && !(media instanceof SpeedyMedia)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
- this._media = media;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._media == null) throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
-
- // use round-robin to mitigate WebGL's implicit synchronization
- // and maybe minimize texture upload times
- this._textureIndex = (this._textureIndex + 1) % this._tex.length;
-
- // upload texture
- const outputTexture = this._tex[this._textureIndex];
- gpu.upload(this._media._source, outputTexture);
- this.output().swrite(outputTexture, this._media._format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-output.js
- * Gets an image out of a pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
-
- /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
- const DEFAULT_MEDIA_TYPE = "bitmap";
-
- /**
- * Gets an image out of a pipeline
- */
- class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'image') {
- super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
- this._mediaType = DEFAULT_MEDIA_TYPE;
-
- /** @type {ImageBitmap} output bitmap */
- this._bitmap = null;
-
- /** @type {ImageData} output pixel data */
- this._data = null;
-
- /** @type {ImageFormat} output format */
- this._format = types/* ImageFormat */.f5.RGBA;
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader(1);
- }
-
- /**
- * The media type that is exported from this node
- * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
- */
- get mediaType() {
- return this._mediaType;
- }
-
- /**
- * The media type that is exported from this node
- * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
- */
- set mediaType(value) {
- if (value != 'bitmap' && value != 'data') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
- this._mediaType = value;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- export() {
- const bitmapOrData = this._mediaType != 'data' ? this._bitmap : this._data;
- utils/* Utils */.A.assert(bitmapOrData != null);
- return SpeedyMedia.load(bitmapOrData, {
- format: this._format
- }, false);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- if (this._mediaType != 'data') {
- /* Create an ImageBitmap (default) */
- return new speedy_promise/* SpeedyPromise */.i(resolve => {
- const canvas = gpu.renderToCanvas(image);
- createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
- this._bitmap = bitmap;
- this._format = format;
- this._data = null;
- resolve();
- });
- });
- } else {
- /* Create an ImageData */
- return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
- const dataArray = new Uint8ClampedArray(pixels.buffer);
- this._data = new ImageData(dataArray, image.width, image.height);
- this._format = format;
- this._bitmap = null;
- });
- }
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * multiplexer.js
- * Image multiplexer
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {string[]} the names of the input ports indexed by their number */
- const INPUT_PORT = ['in0', 'in1'];
-
- /**
- * Image multiplexer
- */
- class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [...INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image)), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} which port should be linked to the output? */
- this._port = 0;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @returns {number}
- */
- get port() {
- return this._port;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @param {number} port
- */
- set port(port) {
- if (port < 0 || port >= INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
- this._port = port | 0;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const message = this.input(INPUT_PORT[this._port]).read();
- this.output().write(message);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * buffer.js
- * Image Buffer
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Image Buffer: a node with memory.
- * At time t, it outputs the image received at time t-1
- */
- class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} current page: 0 or 1 */
- this._pageIndex = 0;
-
- /** @type {boolean} first run? */
- this._initialized = false;
-
- /** @type {ImageFormat} previous image format */
- this._previousFormat = types/* ImageFormat */.f5.RGBA;
-
- /** @type {boolean} frozen buffer? */
- this._frozen = false;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @returns {boolean}
- */
- get frozen() {
- return this._frozen;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @param {boolean} value
- */
- set frozen(value) {
- this._frozen = Boolean(value);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const previousFormat = this._previousFormat;
- const page = this._tex;
- const previousInputTexture = page[1 - this._pageIndex];
- const outputTexture = page[this._pageIndex];
-
- // can't store pyramids
- if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
-
- // bufferize
- if (!this._frozen || !this._initialized) {
- // store input
- this._previousFormat = format;
- previousInputTexture.resize(image.width, image.height);
- image.copyTo(previousInputTexture);
-
- // page flipping
- this._pageIndex = 1 - this._pageIndex;
- }
-
- // first run?
- if (!this._initialized) {
- this._initialized = true;
- this.output().swrite(previousInputTexture, format);
- return;
- }
-
- // done!
- this.output().swrite(outputTexture, previousFormat);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * pyramid.js
- * Generate pyramid
- */
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
- const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
-
- /**
- * Generate pyramid
- */
- class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, MAX_TEXTURES + 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const outputTexture = this._tex[0];
- const pyramids = gpu.programs.pyramids;
- let width = image.width,
- height = image.height;
-
- // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
- const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
-
- // get work textures
- const mip = new Array(MAX_TEXTURES + 1);
- for (let i = MAX_TEXTURES; i >= 1; i--) mip[i - 1] = this._tex[i];
-
- // get a copy of the input image
- mip[0].resize(width, height);
- image.copyTo(mip[0]);
-
- // generate gaussian pyramid
- const numLevels = Math.min(mipLevels, MAX_LEVELS);
- for (let level = 1; level < numLevels; level++) {
- // use max(1, floor(size / 2^lod)), in accordance to
- // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
- const halfWidth = Math.max(1, width >>> 1);
- const halfHeight = Math.max(1, height >>> 1);
-
- // reduce operation
- const tmp = level - 1 + MAX_LEVELS;
- pyramids.smoothX.outputs(width, height, mip[tmp])(mip[level - 1]);
- pyramids.smoothY.outputs(width, height, mip[level - 1])(mip[tmp]);
- pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level])(mip[level - 1]);
- /*
- (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
- (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
- */
-
- // flush
- gpu.gl.flush();
-
- // next level
- width = halfWidth;
- height = halfHeight;
-
- /*
- // debug: view pyramid
- const view = mip[level-1];
- const canvas = gpu.renderToCanvas(view);
- if(!window._ww) document.body.appendChild(canvas);
- window._ww = 1;
- */
- }
-
- // copy to output & set mipmap
- outputTexture.resize(image.width, image.height);
- outputTexture.clear();
- image.copyTo(outputTexture);
- outputTexture.generateMipmaps(mip.slice(0, numLevels));
-
- // done!
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * mixer.js
- * Image Mixer
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Image Mixer
- */
- class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in0').expects(SpeedyPipelineMessageType.Image), InputPort('in1').expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} alpha coefficient (applied to image0) */
- this._alpha = 0.5;
-
- /** @type {number} beta coefficient (applied to image1) */
- this._beta = 0.5;
-
- /** @type {number} gamma coefficient (brightness control) */
- this._gamma = 0.0;
- }
-
- /**
- * Alpha coefficient (applied to image0)
- * @returns {number}
- */
- get alpha() {
- return this._alpha;
- }
-
- /**
- * Alpha coefficient (applied to image0)
- * @param {number} value
- */
- set alpha(value) {
- this._alpha = +value;
- }
-
- /**
- * Beta coefficient (applied to image1)
- * @returns {number}
- */
- get beta() {
- return this._beta;
- }
-
- /**
- * Beta coefficient (applied to image1)
- * @param {number} value
- */
- set beta(value) {
- this._beta = +value;
- }
-
- /**
- * Gamma coefficient (brightness control)
- * @returns {number}
- */
- get gamma() {
- return this._gamma;
- }
-
- /**
- * Gamma coefficient (brightness control)
- * @param {number} value
- */
- set gamma(value) {
- this._gamma = +value;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const in0 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in0').read();
- const in1 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in1').read();
- const image0 = in0.image,
- image1 = in1.image;
- const format0 = in0.format,
- format1 = in1.format;
- const width = Math.max(image0.width, image1.width);
- const height = Math.max(image0.height, image1.height);
- const alpha = this._alpha,
- beta = this._beta,
- gamma = this._gamma;
- const outputTexture = this._tex[0];
- if (format0 != format1) throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
- gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
- gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
- this.output().swrite(outputTexture, format0);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * portal.js
- * Image Portals
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A sink of an Image Portal
- * This is not a pipeline sink - it doesn't export any data!
- */
- class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {ImageFormat} stored image format */
- this._format = types/* ImageFormat */.f5.RGBA;
-
- /** @type {boolean} is this node initialized? */
- this._initialized = false;
- }
-
- /**
- * Stored image
- * @returns {SpeedyTexture}
- */
- get image() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._tex[0];
- }
-
- /**
- * Stored image format
- * @returns {ImageFormat}
- */
- get format() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._format;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._tex[0].resize(1, 1).clear(); // initial texture
- this._format = types/* ImageFormat */.f5.RGBA;
- this._initialized = true;
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const tex = this._tex[0];
-
- // can't store pyramids
- if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
-
- // copy input
- this._format = format;
- tex.resize(image.width, image.height);
- image.copyTo(tex);
- }
- }
-
- /**
- * A source of an Image Portal
- */
- class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
- this._source = null;
- }
-
- /**
- * Data source
- * @returns {SpeedyPipelineNodeImagePortalSink|null}
- */
- get source() {
- return this._source;
- }
-
- /**
- * Data source
- * @param {SpeedyPipelineNodeImagePortalSink|null} node
- */
- set source(node) {
- if (node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
- this._source = node;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
- this.output().swrite(this._source.image, this._source.format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * image-factory.js
- * Image-related nodes
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Portal nodes
- */
- class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image portal source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePortalSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeImagePortalSource(name);
- }
-
- /**
- * Create an image portal sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePortalSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeImagePortalSink(name);
- }
- }
-
- /**
- * Image nodes
- */
- class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeImageSource(name);
- }
-
- /**
- * Create an image sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeImageSink(name);
- }
-
- /**
- * Create an image multiplexer
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageMultiplexer}
- */
- static Multiplexer(name = undefined) {
- return new SpeedyPipelineNodeImageMultiplexer(name);
- }
-
- /**
- * Create an image buffer
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageBuffer}
- */
- static Buffer(name = undefined) {
- return new SpeedyPipelineNodeImageBuffer(name);
- }
-
- /**
- * Image Pyramid
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImagePyramid}
- */
- static Pyramid(name = undefined) {
- return new SpeedyPipelineNodeImagePyramid(name);
- }
-
- /**
- * Image Mixer (blending)
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeImageMixer}
- */
- static Mixer(name = undefined) {
- return new SpeedyPipelineNodeImageMixer(name);
- }
-
- /**
- * Image Portals
- * @returns {typeof SpeedyPipelineImagePortalFactory}
- */
- static get Portal() {
- return SpeedyPipelineImagePortalFactory;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * greyscale.js
- * Convert an image to greyscale
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Convert an image to greyscale
- */
- class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const filters = gpu.programs.filters;
- filters.rgb2grey.outputs(width, height, outputTexture);
- filters.rgb2grey(image);
- this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * gaussian-blur.js
- * Gaussian Blur
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
- * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
- * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
- * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
- */
- const DEFAULT_KERNEL = Object.freeze({
- 3: [0.27901008925473514, 0.44197982149052983, 0.27901008925473514],
- // 1D convolution (sigma = 1)
- 5: [0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021],
- // 1D convolution (separable kernel)
- 7: [0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274],
- 9: [0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988],
- 11: [0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346],
- 13: [0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363],
- 15: [0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383]
- //3: [ 0.25, 0.5, 0.25 ],
- //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
- });
-
- /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
- const DEFAULT_SIGMA = new SpeedyVector2(0, 0);
-
- /** convolution programs (x-axis) */
- const CONVOLUTION_X = Object.freeze({
- 3: 'convolution3x',
- 5: 'convolution5x',
- 7: 'convolution7x',
- 9: 'convolution9x',
- 11: 'convolution11x',
- 13: 'convolution13x',
- 15: 'convolution15x'
- });
-
- /** convolution programs (y-axis) */
- const CONVOLUTION_Y = Object.freeze({
- 3: 'convolution3y',
- 5: 'convolution5y',
- 7: 'convolution7y',
- 9: 'convolution9y',
- 11: 'convolution11y',
- 13: 'convolution13y',
- 15: 'convolution15y'
- });
-
- /**
- * @typedef {object} SeparableConvolutionKernel
- * @property {number[]} x
- * @property {number[]} y
- */
-
- /**
- * Gaussian Blur
- */
- class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel */
- this._kernelSize = new SpeedySize(5, 5);
-
- /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
- this._sigma = DEFAULT_SIGMA;
-
- /** @type {SeparableConvolutionKernel} convolution kernel */
- this._kernel = {
- x: DEFAULT_KERNEL[this._kernelSize.width],
- y: DEFAULT_KERNEL[this._kernelSize.height]
- };
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const kw = kernelSize.width,
- kh = kernelSize.height;
- if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
- this._kernelSize = kernelSize;
- this._updateKernel();
- }
-
- /**
- * Sigma of the Gaussian kernel
- * @returns {SpeedyVector2}
- */
- get sigma() {
- return this._sigma;
- }
-
- /**
- * Sigma of the Gaussian kernel
- * @param {SpeedyVector2} sigma
- */
- set sigma(sigma) {
- utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
- utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
- this._sigma = sigma;
- this._updateKernel();
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const kernX = this._kernel.x;
- const kernY = this._kernel.y;
- const convX = CONVOLUTION_X[this._kernelSize.width];
- const convY = CONVOLUTION_Y[this._kernelSize.height];
- const tex = this._tex[0];
- const outputTexture = this._tex[1];
- gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
- gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Update the internal kernel to match
- * sigma and kernelSize
- */
- _updateKernel() {
- if (this._sigma.x == DEFAULT_SIGMA.x) this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];else this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
- if (this._sigma.y == DEFAULT_SIGMA.y) this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];else this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * simple-blur.js
- * Simple Blur (Box Filter)
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** 1D convolution filters */
- const BOX_FILTER = Object.freeze({
- 3: new Array(3).fill(1 / 3),
- 5: new Array(5).fill(1 / 5),
- 7: new Array(7).fill(1 / 7),
- 9: new Array(9).fill(1 / 9),
- 11: new Array(11).fill(1 / 11),
- 13: new Array(13).fill(1 / 13),
- 15: new Array(15).fill(1 / 15)
- });
-
- /** convolution programs (x-axis) */
- const simple_blur_CONVOLUTION_X = Object.freeze({
- 3: 'convolution3x',
- 5: 'convolution5x',
- 7: 'convolution7x',
- 9: 'convolution9x',
- 11: 'convolution11x',
- 13: 'convolution13x',
- 15: 'convolution15x'
- });
-
- /** convolution programs (y-axis) */
- const simple_blur_CONVOLUTION_Y = Object.freeze({
- 3: 'convolution3y',
- 5: 'convolution5y',
- 7: 'convolution7y',
- 9: 'convolution9y',
- 11: 'convolution11y',
- 13: 'convolution13y',
- 15: 'convolution15y'
- });
-
- /**
- * @typedef {object} SeparableConvolutionKernel
- * @property {number[]} x
- * @property {number[]} y
- */
-
- /**
- * Simple Blur (Box Filter)
- */
- class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel */
- this._kernelSize = new SpeedySize(5, 5);
-
- /** @type {SeparableConvolutionKernel} convolution kernel */
- this._kernel = {
- x: BOX_FILTER[this._kernelSize.width],
- y: BOX_FILTER[this._kernelSize.height]
- };
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const kw = kernelSize.width,
- kh = kernelSize.height;
- if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
- this._kernelSize = kernelSize;
- this._kernel.x = BOX_FILTER[this._kernelSize.width];
- this._kernel.y = BOX_FILTER[this._kernelSize.height];
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const kernX = this._kernel.x;
- const kernY = this._kernel.y;
- const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
- const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
- const tex = this._tex[0];
- const outputTexture = this._tex[1];
- gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
- gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * median-blur.js
- * Median Blur
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Median programs
- const MEDIAN = {
- 3: 'median3',
- 5: 'median5',
- 7: 'median7'
- };
-
- /**
- * Median Blur
- */
- class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the kernel (assumed to be square) */
- this._kernelSize = new SpeedySize(5, 5);
- }
-
- /**
- * Size of the kernel
- * @returns {SpeedySize}
- */
- get kernelSize() {
- return this._kernelSize;
- }
-
- /**
- * Size of the kernel
- * @param {SpeedySize} kernelSize
- */
- set kernelSize(kernelSize) {
- utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
- const ksize = kernelSize.width;
- if (!(ksize == 3 || ksize == 5 || ksize == 7)) throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);else if (kernelSize.width != kernelSize.height) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
- this._kernelSize = kernelSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const ksize = this._kernelSize.width;
- const med = MEDIAN[ksize];
- const outputTexture = this._tex[0];
- gpu.programs.filters[med].outputs(width, height, outputTexture)(image);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * convolution.js
- * Image convolution
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- // 2D convolution programs
- const CONVOLUTION = {
- 3: 'convolution3',
- 5: 'convolution5',
- 7: 'convolution7'
- };
-
- /**
- * Image convolution
- */
- class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMatrix} convolution kernel (square matrix) */
- this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
- }
-
- /**
- * Convolution kernel
- * @returns {SpeedyMatrix}
- */
- get kernel() {
- return this._kernel;
- }
-
- /**
- * Convolution kernel
- * @param {SpeedyMatrix} kernel
- */
- set kernel(kernel) {
- if (kernel.rows != kernel.columns) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);else if (!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7)) throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
- this._kernel = kernel;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const ksize = this._kernel.rows;
- const conv = CONVOLUTION[ksize];
- const kernel = this._kernel.read();
- gpu.programs.filters[conv].outputs(width, height, outputTexture)(image, kernel);
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * nightvision.js
- * Nightvision filter
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
- */
-
- /**
- * Nightvision filter: "see in the dark"
- */
- class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.RGBA || msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} a value typically in [0,1]: larger number => higher contrast */
- this._gain = 0.5;
-
- /** @type {number} a value typically in [0,1]: controls brightness */
- this._offset = 0.5;
-
- /** @type {number} gain decay, a value in [0,1] */
- this._decay = 0.0;
-
- /** @type {NightvisionQualityLevel} quality level */
- this._quality = 'medium';
- }
-
- /**
- * Gain, a value typically in [0,1]: larger number => higher contrast
- * @returns {number}
- */
- get gain() {
- return this._gain;
- }
-
- /**
- * Gain, a value typically in [0,1]: larger number => higher contrast
- * @param {number} gain
- */
- set gain(gain) {
- this._gain = +gain;
- }
-
- /**
- * Offset, a value typically in [0,1] that controls the brightness
- * @returns {number}
- */
- get offset() {
- return this._offset;
- }
-
- /**
- * Offset, a value typically in [0,1] that controls the brightness
- * @param {number} offset
- */
- set offset(offset) {
- this._offset = +offset;
- }
-
- /**
- * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
- * @returns {number}
- */
- get decay() {
- return this._decay;
- }
-
- /**
- * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
- * @param {number} decay
- */
- set decay(decay) {
- this._decay = Math.max(0.0, Math.min(+decay, 1.0));
- }
-
- /**
- * Quality level of the filter
- * @returns {NightvisionQualityLevel}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Quality level of the filter
- * @param {NightvisionQualityLevel} quality
- */
- set quality(quality) {
- if (quality === 'high' || quality === 'medium' || quality === 'low') this._quality = quality;else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const gain = this._gain;
- const offset = this._offset;
- const decay = this._decay;
- const quality = this._quality;
- const filters = gpu.programs.filters;
- const tmp = this._tex[0];
- const illuminationMap = this._tex[1];
- const outputTexture = this._tex[2];
-
- // compute illumination map
- if (quality == 'medium') {
- filters.illuminationMapX.outputs(width, height, tmp);
- filters.illuminationMapY.outputs(width, height, illuminationMap);
- filters.illuminationMapX(image);
- filters.illuminationMapY(tmp);
- } else if (quality == 'high') {
- filters.illuminationMapHiX.outputs(width, height, tmp);
- filters.illuminationMapHiY.outputs(width, height, illuminationMap);
- filters.illuminationMapHiX(image);
- filters.illuminationMapHiY(tmp);
- } else if (quality == 'low') {
- filters.illuminationMapLoX.outputs(width, height, tmp);
- filters.illuminationMapLoY.outputs(width, height, illuminationMap);
- filters.illuminationMapLoX(image);
- filters.illuminationMapLoY(tmp);
- }
-
- // run nightvision
- if (format === types/* ImageFormat */.f5.GREY) {
- filters.nightvisionGreyscale.outputs(width, height, outputTexture);
- filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
- } else if (format === types/* ImageFormat */.f5.RGBA) {
- filters.nightvision.outputs(width, height, outputTexture);
- filters.nightvision(image, illuminationMap, gain, offset, decay);
- }
-
- // done!
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * normalize.js
- * Normalize image to a range
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Normalize image to a range
- */
- class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {number} a value in [0,255] */
- this._minValue = 0;
-
- /** @type {number} a value in [0,255] */
- this._maxValue = 255;
- }
-
- /**
- * Minimum intensity in the output image, a value in [0,255]
- * @returns {number}
- */
- get minValue() {
- return this._minValue;
- }
-
- /**
- * Minimum intensity in the output image, a value in [0,255]
- * @param {number} minValue
- */
- set minValue(minValue) {
- this._minValue = Math.max(0, Math.min(+minValue, 255));
- }
-
- /**
- * Maximum intensity in the output image, a value in [0,255]
- * @returns {number}
- */
- get maxValue() {
- return this._maxValue;
- }
-
- /**
- * Maximum intensity in the output image, a value in [0,255]
- * @param {number} maxValue
- */
- set maxValue(maxValue) {
- this._maxValue = Math.max(0, Math.min(+maxValue, 255));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[3];
- let minValue = this._minValue;
- let maxValue = this._maxValue;
- if (minValue > maxValue) minValue = maxValue = (minValue + maxValue) / 2;
- const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
- gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
- gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Scan a single component in all pixels of the image and find the min & max intensities
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTexture} image input image
- * @param {PixelComponent} pixelComponent a single PixelComponent flag
- * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
- */
- _scanMinMax(gpu, image, pixelComponent) {
- const tex = this._tex;
- const program = gpu.programs.utils;
- const width = image.width,
- height = image.height;
- const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
- utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
- program.copyComponents.outputs(width, height, tex[2]);
- program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
- let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
- for (let i = 0; i < numIterations; i++) texture = program.scanMinMax2D(texture, i);
- return texture;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * filter-factory.js
- * Image filters
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Image filters
- */
- class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Convert image to greyscale
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeGreyscale}
- */
- static Greyscale(name = undefined) {
- return new SpeedyPipelineNodeGreyscale(name);
- }
-
- /**
- * Gaussian Blur
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeGaussianBlur}
- */
- static GaussianBlur(name = undefined) {
- return new SpeedyPipelineNodeGaussianBlur(name);
- }
-
- /**
- * Simple Blur (Box Filter)
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeSimpleBlur}
- */
- static SimpleBlur(name = undefined) {
- return new SpeedyPipelineNodeSimpleBlur(name);
- }
-
- /**
- * Median Blur
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeMedianBlur}
- */
- static MedianBlur(name = undefined) {
- return new SpeedyPipelineNodeMedianBlur(name);
- }
-
- /**
- * Image Convolution
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeConvolution}
- */
- static Convolution(name = undefined) {
- return new SpeedyPipelineNodeConvolution(name);
- }
-
- /**
- * Nightvision
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeNightvision}
- */
- static Nightvision(name = undefined) {
- return new SpeedyPipelineNodeNightvision(name);
- }
-
- /**
- * Normalize image
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeNormalize}
- */
- static Normalize(name = undefined) {
- return new SpeedyPipelineNodeNormalize(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * perspective-warp.js
- * Warp an image using a perspective transformation
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Used when an invalid matrix is provided
- const SINGULAR_MATRIX = [0, 0, 0, 0, 0, 0, 0, 0, 1];
-
- /**
- * Warp an image using a perspective transformation
- */
- class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedyMatrix} perspective transformation */
- this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
- }
-
- /**
- * Perspective transform, a 3x3 homography matrix
- * @returns {SpeedyMatrix}
- */
- get transform() {
- return this._transform;
- }
-
- /**
- * Perspective transform, a 3x3 homography matrix
- * @param {SpeedyMatrix} transform
- */
- set transform(transform) {
- if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
- this._transform = transform;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const homography = this._transform.read();
- const inverseHomography = this._inverse3(homography);
- const isValidHomography = !Number.isNaN(inverseHomography[0]);
- gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
- gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
- this.output().swrite(outputTexture, format);
- }
-
- /**
- * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
- * @param {number[]} mat 3x3 matrix in column-major format
- * @param {number} [eps] epsilon
- * @returns {number[]} 3x3 inverse matrix in column-major format
- */
- _inverse3(mat, eps = 1e-6) {
- // read the entries of the matrix
- const a11 = mat[0];
- const a21 = mat[1];
- const a31 = mat[2];
- const a12 = mat[3];
- const a22 = mat[4];
- const a32 = mat[5];
- const a13 = mat[6];
- const a23 = mat[7];
- const a33 = mat[8];
-
- // compute cofactors
- const b1 = a33 * a22 - a32 * a23; // b11
- const b2 = a33 * a12 - a32 * a13; // b21
- const b3 = a23 * a12 - a22 * a13; // b31
-
- // compute the determinant
- const det = a11 * b1 - a21 * b2 + a31 * b3;
-
- // set up the inverse
- if (!(Math.abs(det) < eps)) {
- const d = 1.0 / det;
- mat[0] = b1 * d;
- mat[1] = -(a33 * a21 - a31 * a23) * d;
- mat[2] = (a32 * a21 - a31 * a22) * d;
- mat[3] = -b2 * d;
- mat[4] = (a33 * a11 - a31 * a13) * d;
- mat[5] = -(a32 * a11 - a31 * a12) * d;
- mat[6] = b3 * d;
- mat[7] = -(a23 * a11 - a21 * a13) * d;
- mat[8] = (a22 * a11 - a21 * a12) * d;
- } else mat.fill(Number.NaN, 0, 9);
-
- // done!
- return mat;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * resize.js
- * Resize image
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
-
- /**
- * Resize image
- */
- class SpeedyPipelineNodeResize extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
-
- /** @type {SpeedySize} size of the output image, in pixels */
- this._size = new SpeedySize(0, 0);
-
- /** @type {SpeedyVector2} size of the output relative to the size of the input */
- this._scale = new SpeedyVector2(1, 1);
-
- /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
- this._method = 'bilinear';
- }
-
- /**
- * Size of the output image, in pixels (use 0 to use scale)
- * @returns {SpeedySize}
- */
- get size() {
- return this._size;
- }
-
- /**
- * Size of the output image, in pixels (use 0 to use scale)
- * @param {SpeedySize} size
- */
- set size(size) {
- this._size = size;
- }
-
- /**
- * Size of the output image relative to the size of the input image
- * @returns {SpeedyVector2}
- */
- get scale() {
- return this._scale;
- }
-
- /**
- * Size of the output image relative to the size of the input image
- * @param {SpeedyVector2} scale
- */
- set scale(scale) {
- this._scale = scale;
- }
-
- /**
- * Interpolation method
- * @returns {SpeedyPipelineNodeResizeMethod}
- */
- get method() {
- return this._method;
- }
-
- /**
- * Interpolation method
- * @param {SpeedyPipelineNodeResizeMethod} method
- */
- set method(method) {
- if (method !== 'nearest' && method !== 'bilinear') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
- this._method = method;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const outputTexture = this._tex[0];
- const method = this._method;
- const newWidth = this._size.width || Math.max(1, this._scale.x * width);
- const newHeight = this._size.height || Math.max(1, this._scale.y * height);
- if (method == 'bilinear') {
- gpu.programs.transforms.resizeBilinear.outputs(newWidth, newHeight, outputTexture)(image);
- } else if (method == 'nearest') {
- gpu.programs.transforms.resizeNearest.outputs(newWidth, newHeight, outputTexture)(image);
- }
- this.output().swrite(outputTexture, format);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transform-factory.js
- * Image transforms
- */
-
-
-
-
-
- /**
- * Image transforms
- */
- class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Resize image
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeResize}
- */
- static Resize(name = undefined) {
- return new SpeedyPipelineNodeResize(name);
- }
-
- /**
- * Warp an image using a perspective transformation
- * @param {string} [name]
- * @returns {SpeedyPipelineNodePerspectiveWarp}
- */
- static PerspectiveWarp(name = undefined) {
- return new SpeedyPipelineNodePerspectiveWarp(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * detector.js
- * Abstract keypoint detectors
- */
-
-
-
-
-
-
-
-
-
-
- // Constants
- const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
- const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
- const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
- const NUMBER_OF_RGBA16_TEXTURES = 2;
-
- // legacy constants
- const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
- const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
- const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
-
- /**
- * Abstract keypoint detector
- * @abstract
- */
- class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = 0, portBuilders = undefined) {
- super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
-
- /** @type {number} encoder capacity */
- this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
-
- /** @type {GLint} auxiliary storage */
- this._oldWrapS = 0;
-
- /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
- this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
- }
-
- /**
- * Initialize this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- // initialize
- super.init(gpu);
-
- // encodeKeypointSkipOffsets() relies on this
- this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
-
- // allocate RGBA16 textures
- this._allocateTex16(gpu);
- gpu.subscribe(this._allocateTex16, this, gpu);
- }
-
- /**
- * Release this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- // deallocate RGBA16 textures
- gpu.unsubscribe(this._allocateTex16, this);
- this._deallocateTex16(gpu);
-
- // we need to restore the texture parameter because textures come from a pool!
- this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
-
- // release
- super.release(gpu);
- }
-
- /**
- * Set a parameter of the special texture
- * @param {GLenum} pname
- * @param {GLint} param new value
- * @returns {GLint} old value of param
- */
- _setupSpecialTexture(pname, param) {
- if (NUMBER_OF_INTERNAL_TEXTURES == 0) return;
-
- // legacy code
- const texture = this._tex[this._tex.length - 1];
- const gl = texture.gl;
- gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
- const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
- gl.texParameteri(gl.TEXTURE_2D, pname, param);
- gl.bindTexture(gl.TEXTURE_2D, null);
- return oldval;
- }
-
- /**
- * We can encode up to this many keypoints. If you find a
- * tight bound for this, download times will be faster.
- * @returns {number}
- */
- get capacity() {
- return this._capacity;
- }
-
- /**
- * We can encode up to this many keypoints. If you find a
- * tight bound for this, download times will be faster.
- * @param {number} capacity
- */
- set capacity(capacity) {
- this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
- }
-
- /**
- * Create a tiny texture with encoded keypoints out of
- * an encoded corners texture
- * @param {SpeedyGPU} gpu
- * @param {SpeedyTexture} corners input
- * @param {SpeedyDrawableTexture} encodedKeypoints output
- * @param {number} [descriptorSize] in bytes
- * @param {number} [extraSize] in bytes
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const encoderCapacity = this._capacity;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
- const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
- const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
- //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
- const maxSize = Math.max(width, height);
- const keypoints = gpu.programs.keypoints;
-
- // prepare programs
- keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
- keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
- keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
-
- // compute lookup table
- let lookupTable = keypoints.initLookupTable(corners);
- for (let b = 1; b < maxSize; b *= 2) lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
-
- /*
- // debug: view texture
- const lookupView = (keypoints.viewLookupTable.outputs(
- width, height, this._tex[0]
- ))(lookupTable);
- const canvas = gpu.renderToCanvas(lookupView);
- if(!this._ww) document.body.appendChild(canvas);
- this._ww = 1;
- */
-
- // encode keypoints
- return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
- }
- _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const capacity = this._capacity;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const width = corners.width,
- height = corners.height;
- const imageSize = [width, height];
- const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
- const keypoints = gpu.programs.keypoints;
- const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
-
- // prepare programs
- keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
- keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
- keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
- keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
-
- // copy the input corners to a special texture
- // that is needed by encodeKeypointSkipOffsets()
- corners = gpu.programs.utils.copy.outputs(width, height, specialTexture)(corners);
-
- // encode skip offsets
- let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
- for (let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) {
- // to boost performance
- // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
- // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
- offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
- }
-
- /*
- // debug: view corners
- let cornerview = offsets;
- const canvas = gpu.renderToCanvas(cornerview);
- if(!window._ww) document.body.appendChild(canvas);
- window._ww = 1;
- */
-
- // encode keypoint positions
- let encodedKps = tex[3].clear();
- for (let j = 0; j < ENCODER_PASSES; j++) encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
-
- // encode keypoint properties
- return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Create a tiny texture with zero encoded keypoints
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} encodedKeypoints output texture
- * @param {number} [descriptorSize] in bytes
- * @param {number} [extraSize] in bytes
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
- const capacity = 0;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const keypoints = gpu.programs.keypoints;
- keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
- return keypoints.encodeNullKeypoints();
- }
-
- /**
- * Allocate RGBA16 textures
- * @param {SpeedyGPU} gpu
- */
- _allocateTex16(gpu) {
- const gl = gpu.gl;
-
- // RGBA16UI is color renderable according to the OpenGL ES 3 spec
- for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
- }
-
- /**
- * Deallocate RGBA16 textures
- * @param {SpeedyGPU} gpu
- */
- _deallocateTex16(gpu) {
- for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = this._tex16[i].release();
- }
-
- /**
- * Compute the length of the keypoint encoder, given its capacity
- * @param {number} encoderCapacity how many keypoints can we fit?
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- */
- static encoderLength(encoderCapacity, descriptorSize, extraSize) {
- const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
- const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
- return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
- }
-
- /**
- * The maximum number of keypoints we can store using
- * a particular configuration of a keypoint encoder
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderLength
- */
- static encoderCapacity(descriptorSize, extraSize, encoderLength) {
- const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
- const numberOfPixels = encoderLength * encoderLength;
- return Math.floor(numberOfPixels / pixelsPerKeypoint);
- }
- }
-
- /**
- * Abstract scale-space keypoint detector
- * @abstract
- */
- class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
- super(name, texCount, portBuilders);
-
- /** @type {number} number of pyramid levels */
- this._levels = 1;
-
- /** @type {number} scale factor between two pyramid levels */
- this._scaleFactor = DEFAULT_SCALE_FACTOR;
- }
-
- /**
- * Number of pyramid levels
- * @returns {number}
- */
- get levels() {
- return this._levels;
- }
-
- /**
- * Number of pyramid levels
- * @param {number} levels
- */
- set levels(levels) {
- this._levels = Math.max(1, levels | 0);
- }
-
- /**
- * Scale factor between two pyramid levels
- * @returns {number}
- */
- get scaleFactor() {
- return this._scaleFactor;
- }
-
- /**
- * Scale factor between two pyramid levels
- * @param {number} scaleFactor should be greater than 1
- */
- set scaleFactor(scaleFactor) {
- this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * source.js
- * Gets keypoints into the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
- const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
- const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
-
- /**
- * Gets keypoints into the pipeline
- */
- class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
- this._keypoints = [];
-
- /** @type {Float32Array} upload buffer (UBO) */
- this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
-
- /** @type {number} maximum number of keypoints */
- this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
- }
-
- /**
- * Keypoints to be uploaded
- * @returns {SpeedyKeypoint[]}
- */
- get keypoints() {
- return this._keypoints;
- }
-
- /**
- * Keypoints to be uploaded
- * @param {SpeedyKeypoint[]} keypoints
- */
- set keypoints(keypoints) {
- if (!Array.isArray(keypoints)) throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
- this._keypoints = keypoints;
- }
-
- /**
- * The maximum number of keypoints we'll accept.
- * This should be a tight bound for better performance.
- * @returns {number}
- */
- get capacity() {
- return this._capacity;
- }
-
- /**
- * The maximum number of keypoints we'll accept.
- * This should be a tight bound for better performance.
- * @param {number} capacity
- */
- set capacity(capacity) {
- this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- // Orientation, descriptors and extra bytes will be lost
- const descriptorSize = 0,
- extraSize = 0;
- const keypoints = this._keypoints;
- const maxKeypoints = this._capacity;
- const numKeypoints = Math.min(keypoints.length, maxKeypoints);
- const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
- const buffer = this._buffer;
- const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
-
- uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
- let startIndex = 0,
- encodedKeypoints = uploadKeypoints.clear();
- for (let i = 0; i < numPasses; i++) {
- const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
- const endIndex = startIndex + n;
- uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
- encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
- startIndex = endIndex;
- }
- this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Create an upload buffer
- * @param {number} bufferSize number of keypoints
- * @returns {Float32Array}
- */
- static _createUploadBuffer(bufferSize) {
- const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
- utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
- return new Float32Array(internalBuffer);
- }
-
- /**
- * Fill upload buffer with keypoint data
- * @param {Float32Array} buffer
- * @param {SpeedyKeypoint[]} keypoints
- * @param {number} start index, inclusive
- * @param {number} end index, exclusive
- * @returns {Float32Array} buffer
- */
- static _fillUploadBuffer(buffer, keypoints, start, end) {
- const n = end - start;
- for (let i = 0; i < n; i++) {
- const keypoint = keypoints[start + i];
- const hasPos = keypoint.position !== undefined;
- const j = i * 4;
-
- // Format data as follows:
- // vec4(xpos, ypos, lod, score)
- buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
- buffer[j + 1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
- buffer[j + 2] = +keypoint.lod || 0;
- buffer[j + 3] = +keypoint.score || 0;
- }
-
- // done!
- return buffer;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * speedy-keypoint-descriptor.js
- * Keypoint descriptor
- */
-
- /**
- * Represents a keypoint descriptor
- */
- class SpeedyKeypointDescriptor {
- /**
- * Constructor
- * @param {Uint8Array} data descriptor bytes
- */
- constructor(data) {
- this._data = data;
- return Object.freeze(this);
- }
-
- /**
- * Descriptor data
- * @returns {Uint8Array}
- */
- get data() {
- return this._data;
- }
-
- /**
- * The size of the descriptor, in bytes
- * @returns {number}
- */
- get size() {
- return this._data.byteLength;
- }
-
- /**
- * A string representation of the keypoint descriptor
- * @returns {string}
- */
- toString() {
- return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * sink.js
- * Gets keypoints out of the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** next power of 2 */
- const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /** empty array of bytes */
- const ZERO_BYTES = new Uint8Array([]);
-
- /**
- * Gets keypoints out of the pipeline
- * @template {SpeedyKeypoint} T
- * @abstract
- */
- class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount]
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
- */
- constructor(name = 'keypoints', texCount = 0, portBuilders = []) {
- super(name, texCount + 2, portBuilders);
-
- /** @type {Array<T|null>} keypoints (output) */
- this._keypoints = [];
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader();
-
- /** @type {number} page flipping index */
- this._page = 0;
-
- /** @type {boolean} accelerate GPU-CPU transfers */
- this._turbo = false;
-
- /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
- this._includeDiscarded = false;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @returns {boolean}
- */
- get turbo() {
- return this._turbo;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @param {boolean} value
- */
- set turbo(value) {
- this._turbo = Boolean(value);
- }
-
- /**
- * Should discarded keypoints be exported as null or dropped altogether?
- * @returns {boolean}
- */
- get includeDiscarded() {
- return this._includeDiscarded;
- }
-
- /**
- * Should discarded keypoints be exported as null or dropped altogether?
- * @param {boolean} value
- */
- set includeDiscarded(value) {
- this._includeDiscarded = Boolean(value);
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<Array<T|null>>}
- */
- export() {
- return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Download and decode keypoints from the GPU
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} encodedKeypoints
- * @param {number} descriptorSize
- * @param {number} extraSize
- * @param {number} encoderLength
- * @returns {SpeedyPromise<void>}
- */
- _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength) {
- const useBufferedDownloads = this._turbo;
-
- /*
- I have found experimentally that, in Firefox, readPixelsAsync()
- performs MUCH better if the width of the target texture is a power
- of two. I have no idea why this is the case, nor if it's related to
- some interaction with the GL drivers, somehow. This seems to make no
- difference on Chrome, however. In any case, let's convert the input
- texture to POT.
- */
- const encoderWidth = sink_nextPot(encoderLength);
- //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
- const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
- //const encoderWidth=encoderLength,encoderHeight=encoderLength;
-
- // copy the set of keypoints to an internal texture
- const copiedTexture = this._tex[this._tex.length - 1 - this._page];
- gpu.programs.utils.copyKeypoints.outputs(encoderWidth, encoderHeight, copiedTexture)(encodedKeypoints);
-
- // flip page
- this._page = 1 - this._page;
-
- // download the internal texture
- return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
- // decode the keypoints and store them in this._keypoints
- this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
- });
- }
-
- /**
- * Decode a sequence of keypoints, given a flattened image of encoded pixels
- * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
- * @param {number} descriptorSize in bytes
- * @param {number} extraSize in bytes
- * @param {number} encoderWidth
- * @param {number} encoderHeight
- * @returns {Array<T|null>} keypoints
- */
- _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight) {
- const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
- const m = globals.LOG2_PYRAMID_MAX_SCALE,
- h = globals.PYRAMID_MAX_LEVELS;
- const piOver255 = Math.PI / 255.0;
- const keypoints = /** @type {Array<T|null>} */[];
- const includeDiscarded = this._includeDiscarded;
- let descriptorBytes = ZERO_BYTES,
- extraBytes = ZERO_BYTES;
- let x, y, z, w, lod, rotation, score;
- let keypoint;
-
- // validate
- if (descriptorSize % 4 != 0 || extraSize % 4 != 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
-
- // how many bytes should we read?
- const e2 = encoderWidth * encoderHeight * 4;
- const size = pixels.byteLength;
- if (size != e2) utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
-
- // copy the data (we use shared buffers when receiving pixels[])
- if (descriptorSize + extraSize > 0) pixels = new Uint8Array(pixels);
-
- // for each encoded keypoint
- for (let i = 0; i < size; i += bytesPerKeypoint) {
- // extract encoded header
- x = pixels[i + 1] << 8 | pixels[i];
- y = pixels[i + 3] << 8 | pixels[i + 2];
- z = pixels[i + 5] << 8 | pixels[i + 4];
- w = pixels[i + 7] << 8 | pixels[i + 6];
-
- // the keypoint is "null": we have reached the end of the list
- if (x == 0xFFFF && y == 0xFFFF) break;
-
- // the header is zero: discard the keypoint
- if (x + y + z + w == 0) {
- if (includeDiscarded) keypoints.push(null);
- continue;
- }
-
- // extract extra & descriptor bytes
- if (extraSize > 0) {
- extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
- if (extraBytes.byteLength < extraSize) {
- utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
- continue; // something is off here; discard
- }
- }
- if (descriptorSize > 0) {
- descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
- if (descriptorBytes.byteLength < descriptorSize) {
- utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
- continue; // something is off here; discard
- }
- }
-
- // decode position: convert from fixed-point
- x /= globals.FIX_RESOLUTION;
- y /= globals.FIX_RESOLUTION;
-
- // decode level-of-detail
- lod = pixels[i + 4] < 255 ? -m + (m + h) * pixels[i + 4] / 255.0 : 0.0;
-
- // decode orientation
- rotation = (2 * pixels[i + 5] - 255) * piOver255;
-
- // decode score
- score = utils/* Utils */.A.decodeFloat16(w);
-
- // create keypoint
- keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
-
- // register keypoint
- keypoints.push(keypoint);
- }
-
- // done!
- return keypoints;
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {T}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- throw new utils_errors/* AbstractMethodError */.aQ();
- }
-
- /**
- * Allocate extra space
- * @param {SpeedyGPU} gpu
- * @param {SpeedyDrawableTexture} output output texture
- * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
- * @param {number} inputDescriptorSize in bytes, must be positive
- * @param {number} inputExtraSize must be 0
- * @param {number} outputDescriptorSize must be inputDescriptorSize
- * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
- * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
- */
- _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize) {
- utils/* Utils */.A.assert(inputExtraSize === 0);
- utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
- const inputEncoderLength = inputEncodedKeypoints.width;
- const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
- const outputEncoderCapacity = inputEncoderCapacity;
- const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
- return gpu.programs.keypoints.allocateExtra.outputs(outputEncoderLength, outputEncoderLength, output)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
- }
- }
-
- /**
- * Gets standard keypoints out of the pipeline
- * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
- */
- class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // create keypoint
- return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
- }
- }
-
- /**
- * Gets tracked keypoints out of the pipeline
- * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
- */
- class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const {
- vectors
- } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input('flow').read();
-
- // allocate extra space
- const newDescriptorSize = descriptorSize;
- const newExtraSize = 4; // 1 pixel per flow vector per keypoint
- const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
-
- // attach flow vectors
- const newEncoderLength = encodedKeypointsWithExtraSpace.width;
- const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
-
- // done!
- return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyTrackedKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
- const extraSize = extraBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // read flow vector
- const fx = utils/* Utils */.A.decodeFloat16(extraBytes[1] << 8 | extraBytes[0]);
- const fy = utils/* Utils */.A.decodeFloat16(extraBytes[3] << 8 | extraBytes[2]);
- const flow = new SpeedyVector2(fx, fy);
-
- // create keypoint
- return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
- }
- }
-
- /**
- * Gets matched keypoints out of the pipeline
- * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
- */
- class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'keypoints') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const {
- encodedMatches,
- matchesPerKeypoint
- } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */this.input('matches').read();
-
- // allocate space for the matches
- const newDescriptorSize = descriptorSize;
- const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
- const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
-
- // transfer matches to a new texture
- const newEncoderLength = encodedKeypointsWithExtraSpace.width;
- const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
-
- // done!
- return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
- }
-
- /**
- * Instantiate a new keypoint
- * @param {number} x
- * @param {number} y
- * @param {number} lod
- * @param {number} rotation
- * @param {number} score
- * @param {Uint8Array} descriptorBytes
- * @param {Uint8Array} extraBytes
- * @returns {SpeedyMatchedKeypoint}
- */
- _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
- const descriptorSize = descriptorBytes.byteLength;
- const extraSize = extraBytes.byteLength;
-
- // read descriptor, if any
- const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
-
- // decode matches
- const matchesPerKeypoint = extraSize / 4;
- const matches = /** @type {SpeedyKeypointMatch[]} */new Array(matchesPerKeypoint);
- for (let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
- const base = matchIndex * 4;
- const u32 = extraBytes[base] | extraBytes[base + 1] << 8 | extraBytes[base + 2] << 16 | extraBytes[base + 3] << 24;
- const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
- matches[matchIndex] = match;
- }
-
- // done!
- return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * clipper.js
- * Keypoint clipper
- */
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const LOG2_STRIDE = 5;
- const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
-
- /**
- * Keypoint clipper: filters the best keypoints from a stream
- */
- class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} the maximum number of keypoints in the output */
- this._size = MAX_SIZE;
- }
-
- /**
- * The maximum number of keypoints in the output
- * @returns {number}
- */
- get size() {
- return this._size;
- }
-
- /**
- * The maximum number of keypoints in the output
- * @param {number} size
- */
- set size(size) {
- this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const keypoints = gpu.programs.keypoints;
- const clipValue = this._size;
- const tex = this._tex;
- const outputTexture = this._tex[3];
-
- // find the minimum power of 2 pot such that pot >= capacity
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
-
- // find the dimensions of the sorting shaders
- const stride = 1 << LOG2_STRIDE; // must be a power of 2
- //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
- const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
- const numberOfPixels = stride * height;
-
- // find the dimensions of the output texture
- const newCapacity = Math.min(capacity, clipValue);
- const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
-
- // generate permutation of keypoints
- keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
- let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort permutation
- const numPasses = Math.ceil(Math.log2(numberOfPixels));
- keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
- for (let i = 1; i <= numPasses; i++) {
- const blockSize = 1 << i; // 2, 4, 8...
- const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
- permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
- }
-
- // apply permutation
- keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
- keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
-
- /*
- // debug (read the contents of the permutation)
- const pixels = permutation.inspect(gpu), debug = [];
- for(let i = 0; i < pixels.length; i += 4) {
- let id = pixels[i] | (pixels[i+1] << 8);
- let score = pixels[i+2] / 255.0;
- let valid = pixels[i+3] / 255.0;
- debug.push([ id, valid, score, ].join(', '));
- }
- console.log(debug);
- */
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * border-clipper.js
- * Keypoint Border Clipper
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * The Border Clipper removes all keypoints within a border of the edges of an image
- */
- class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedySize} image size, in pixels */
- this._imageSize = new SpeedySize(0, 0);
-
- /** @type {SpeedyVector2} border size, in pixels */
- this._borderSize = new SpeedyVector2(0, 0);
- }
-
- /**
- * Image size, in pixels
- * @returns {SpeedySize}
- */
- get imageSize() {
- return this._imageSize;
- }
-
- /**
- * Image size, in pixels
- * @param {SpeedySize} imageSize
- */
- set imageSize(imageSize) {
- this._imageSize = imageSize;
- }
-
- /**
- * Border size, in pixels
- * @returns {SpeedyVector2}
- */
- get borderSize() {
- return this._borderSize;
- }
-
- /**
- * Border size, in pixels
- * @param {SpeedyVector2} borderSize
- */
- set borderSize(borderSize) {
- this._borderSize = borderSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const keypoints = gpu.programs.keypoints;
- const imageSize = this._imageSize;
- const borderSize = this._borderSize;
- const imageWidth = imageSize.width,
- imageHeight = imageSize.height;
- const borderLeft = borderSize.x,
- borderRight = borderSize.x;
- const borderTop = borderSize.y,
- borderBottom = borderSize.y;
- const tex = this._tex;
-
- // validate
- if (imageWidth == 0 || imageHeight == 0) throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
-
- // find the capacity of the keypoint stream
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
-
- // prepare programs
- keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
- keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
- keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
-
- // clip keypoints
- let clippedKeypoints = keypoints.clipBorder(imageWidth, imageHeight, borderTop, borderRight, borderBottom, borderLeft, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort keypoints
- let sortedKeypoints = keypoints.mixKeypointsInit(clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
- clippedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength);
-
- /*
- // debug: view keypoints
- keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
- */
-
- // done!
- this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * buffer.js
- * Keypoint Buffer
- */
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint Buffer: a node with memory.
- * At time t, it outputs the keypoints received at time t-1
- */
- class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} current page: 0 or 1 */
- this._pageIndex = 0;
-
- /** @type {boolean} first run? */
- this._initialized = false;
-
- /** @type {number} previous descriptor size, in bytes */
- this._previousDescriptorSize = 0;
-
- /** @type {number} previous extra size, in bytes */
- this._previousExtraSize = 0;
-
- /** @type {number} previous encoder length */
- this._previousEncoderLength = 0;
-
- /** @type {boolean} frozen buffer? */
- this._frozen = false;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @returns {boolean}
- */
- get frozen() {
- return this._frozen;
- }
-
- /**
- * A frozen buffer discards the input, effectively increasing the buffering time
- * @param {boolean} value
- */
- set frozen(value) {
- this._frozen = Boolean(value);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const previousDescriptorSize = this._previousDescriptorSize;
- const previousExtraSize = this._previousExtraSize;
- const previousEncoderLength = this._previousEncoderLength;
- const page = this._tex;
- const previousInputTexture = page[1 - this._pageIndex];
- const outputTexture = page[this._pageIndex];
-
- // bufferize
- if (!this._frozen || !this._initialized) {
- // store input
- this._previousDescriptorSize = descriptorSize;
- this._previousExtraSize = extraSize;
- this._previousEncoderLength = encoderLength;
- previousInputTexture.resize(encoderLength, encoderLength);
- encodedKeypoints.copyTo(previousInputTexture);
-
- // page flipping
- this._pageIndex = 1 - this._pageIndex;
- }
-
- // first run?
- if (!this._initialized) {
- this._initialized = true;
- this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
- return;
- }
-
- // done!
- this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * mixer.js
- * Keypoint Mixer
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint Mixer: merges two sets of keypoints
- */
- class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints), InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in0').read();
- const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in1').read();
- const descriptorSize = kps0.descriptorSize;
- const extraSize = kps0.extraSize;
- const keypoints = gpu.programs.keypoints;
- const tex = this._tex;
-
- // ensure that the format of kps0 equals the format of kps1
- if (!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize)) throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
-
- // find the capacity of kps0 + kps1
- const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
- const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
- const capacity = cap0 + cap1;
-
- // find the dimensions of the output texture
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
- const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
-
- // prepare programs
- keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
- keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
- keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
-
- // mix keypoints
- let mixedKeypoints = keypoints.mixKeypointsPreInit(kps0.encodedKeypoints, kps1.encodedKeypoints, kps0.encoderLength, kps1.encoderLength, cap0, cap1, descriptorSize, extraSize, encoderLength);
- let sortedKeypoints = keypoints.mixKeypointsInit(mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
- mixedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength);
-
- /*
- // debug: view keypoints
- keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
- this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
- */
-
- this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * shuffler.js
- * Keypoint Shuffler
- */
-
-
-
-
-
-
-
-
-
- /**
- * The Keypoint Shuffler shuffles a list of keypoints
- */
- class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} maximum number of keypoints */
- this._maxKeypoints = Number.NaN;
- }
-
- /**
- * Maximum number of keypoints (optional)
- * @returns {number}
- */
- get maxKeypoints() {
- return this._maxKeypoints;
- }
-
- /**
- * Maximum number of keypoints (optional)
- * @param {number} value
- */
- set maxKeypoints(value) {
- if (!Number.isNaN(value)) this._maxKeypoints = Math.max(0, value | 0);else this._maxKeypoints = Number.NaN;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- let {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const maxKeypoints = this._maxKeypoints;
-
- // shuffle the keypoints (including nulls)
- const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
- const permutationLength = Math.min(permutationMaxLength, capacity);
- const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
- encodedKeypoints = gpu.programs.keypoints.shuffle.setUBO('Permutation', permutation).outputs(encoderLength, encoderLength, this._tex[0])(encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // sort the keypoints
- gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
- gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
- gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
- let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
- for (let b = 1; b < capacity; b *= 2) sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
- encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // clip the output?
- if (!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
- const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
- encodedKeypoints = gpu.programs.keypoints.clip.outputs(newEncoderLength, newEncoderLength, this._tex[5])(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
- encoderLength = newEncoderLength;
- }
-
- // done!
- this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
- }
-
- /**
- * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
- * @param {number} n positive integer
- * @param {number} [bufsize] size of the output array
- * @returns {Int32Array} permutation
- */
- _generatePermutation(n, bufsize = n) {
- const array = new Int32Array(bufsize);
- const p = array.subarray(0, n).fill(-1);
- const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
- for (let i = 0, j = 0; i < n; i++) {
- if (p[i] < 0) {
- do {
- p[i] = q[j++];
- } while (p[i] < i);
- p[p[i]] = i;
- }
- }
- return array; // padded with zeros
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * multiplexer.js
- * Keypoint multiplexer
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {string[]} the names of the input ports indexed by their number */
- const multiplexer_INPUT_PORT = ['in0', 'in1'];
-
- /**
- * Keypoint multiplexer
- */
- class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [...multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints)), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} which port should be linked to the output? */
- this._port = 0;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @returns {number}
- */
- get port() {
- return this._port;
- }
-
- /**
- * The number of the port that should be linked to the output
- * @param {number} port
- */
- set port(port) {
- if (port < 0 || port >= multiplexer_INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
- this._port = port | 0;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
- this.output().write(message);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * transformer.js
- * Apply a transformation matrix to a set of keypoints
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Apply a transformation matrix to a set of keypoints
- */
- class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyMatrix} transformation matrix */
- this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
- }
-
- /**
- * Transformation matrix
- * @returns {SpeedyMatrix}
- */
- get transform() {
- return this._transform;
- }
-
- /**
- * Transformation matrix. Must be 3x3
- * @param {SpeedyMatrix} transform
- */
- set transform(transform) {
- if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
- this._transform = transform;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const outputTexture = this._tex[0];
- const homography = this._transform.read();
-
- // apply homography
- gpu.programs.keypoints.applyHomography.outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * subpixel.js
- * Subpixel refinement of keypoint location
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
-
- /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
- const METHOD2PROGRAM = Object.freeze({
- 'quadratic1d': 'subpixelQuadratic1d',
- 'taylor2d': 'subpixelTaylor2d',
- 'bicubic-upsample': 'subpixelBicubic',
- 'bilinear-upsample': 'subpixelBilinear'
- });
-
- /**
- * Subpixel refinement of keypoint location
- */
- class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SubpixelRefinementMethod} subpixel refinement method */
- this._method = 'quadratic1d';
-
- /** @type {number} max iterations for the upsampling methods */
- this._maxIterations = 6;
-
- /** @type {number} convergence threshold for the upsampling methods */
- this._epsilon = 0.1;
- }
-
- /**
- * Subpixel refinement method
- * @returns {SubpixelRefinementMethod}
- */
- get method() {
- return this._method;
- }
-
- /**
- * Subpixel refinement method
- * @param {SubpixelRefinementMethod} name
- */
- set method(name) {
- if (!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
- this._method = name;
- }
-
- /**
- * Max. iterations for the upsampling methods
- * @returns {number}
- */
- get maxIterations() {
- return this._maxIterations;
- }
-
- /**
- * Max. iterations for the upsampling methods
- * @param {number} value
- */
- set maxIterations(value) {
- this._maxIterations = Math.max(0, +value);
- }
-
- /**
- * Convergence threshold for the upsampling methods
- * @returns {number}
- */
- get epsilon() {
- return this._epsilon;
- }
-
- /**
- * Convergence threshold for the upsampling methods
- * @param {number} value
- */
- set epsilon(value) {
- this._epsilon = Math.max(0, +value);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read();
- const tex = this._tex;
- const program = METHOD2PROGRAM[this._method];
- const maxIterations = this._maxIterations;
- const epsilon = this._epsilon;
-
- // note: if you detected the keypoints using a pyramid,
- // you need to pass that pyramid as input!
-
- // we'll compute the offsets for each keypoint
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
- const offsets = gpu.programs.keypoints[program].outputs(offsetEncoderLength, offsetEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
-
- // apply the offsets to the keypoints
- const refinedKeypoints = gpu.programs.keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[1])(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
- this.output('displacements').swrite(offsets);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * fast.js
- * FAST corner detector
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_THRESHOLD = 20;
-
- /**
- * FAST corner detector
- */
- class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} FAST threshold in [0,255] */
- this._threshold = DEFAULT_THRESHOLD;
- }
-
- /**
- * FAST threshold in [0,255]
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * FAST threshold in [0,255]
- * @param {number} threshold
- */
- set threshold(threshold) {
- this._threshold = Math.max(0, Math.min(threshold | 0, 255));
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const tex = this._tex;
- const capacity = this._capacity;
- const threshold = this._threshold;
- const lodStep = Math.log2(this.scaleFactor);
- const levels = this.levels;
-
- // validate pyramid
- if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
-
- // skip if the capacity is zero
- if (capacity == 0) {
- const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
- const encoderLength = encodedKeypoints.width;
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- return;
- }
-
- // FAST
- gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
- gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
- let corners = tex[1].clear();
- let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
- for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
- corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
- //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
- }
-
- // Same-scale non-maximum suppression
- // *nicer results inside the loop; faster outside
- // Hard to notice a difference when using FAST
- corners = gpu.programs.keypoints.nonmaxSpace(corners);
-
- // Multi-scale non-maximum suppression
- // (doesn't seem to remove many keypoints)
- if (levels > 1) {
- corners = gpu.programs.keypoints.nonmaxScaleSimple.outputs(width, height, tex[1])(corners, image, lodStep);
- }
-
- // encode keypoints
- let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
- const encoderLength = encodedKeypoints.width;
-
- // scale refinement
- if (levels > 1) {
- encodedKeypoints = gpu.programs.keypoints.refineScaleFAST916.outputs(encoderLength, encoderLength, tex[4])(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
- }
-
- // done!
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * harris.js
- * Harris corner detector
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /** Window size helper */
- const HARRIS = Object.freeze({
- 1: 'harris1',
- 3: 'harris3',
- 5: 'harris5',
- 7: 'harris7'
- });
-
- /**
- * Harris corner detector
- */
- class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedySize} neighborhood size */
- this._windowSize = new SpeedySize(3, 3);
-
- /** @type {number} min corner quality in [0,1] */
- this._quality = 0.1;
- }
-
- /**
- * Minimum corner quality in [0,1] - this is a fraction of
- * the largest min. eigenvalue of the autocorrelation matrix
- * over the entire image
- * @returns {number}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Minimum corner quality in [0,1]
- * @param {number} quality
- */
- set quality(quality) {
- this._quality = Math.max(0.0, Math.min(+quality, 1.0));
- }
-
- /**
- * Neighborhood size
- * @returns {SpeedySize}
- */
- get windowSize() {
- return this._windowSize;
- }
-
- /**
- * Neighborhood size
- * @param {SpeedySize} windowSize
- */
- set windowSize(windowSize) {
- const d = windowSize.width;
- if (!(d == windowSize.height && (d == 1 || d == 3 || d == 5 || d == 7))) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
- this._windowSize = windowSize;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- image,
- format
- } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
- const width = image.width,
- height = image.height;
- const capacity = this._capacity;
- const quality = this._quality;
- const windowSize = this._windowSize.width;
- const levels = this.levels;
- const lodStep = Math.log2(this.scaleFactor);
- const intFactor = levels > 1 ? this.scaleFactor : 1;
- const harris = gpu.programs.keypoints[HARRIS[windowSize]];
- const tex = this._tex;
-
- // validate pyramid
- if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
-
- // skip if the capacity is zero
- if (capacity == 0) {
- const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
- const encoderLength = encodedKeypoints.width;
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- return;
- }
-
- // compute corner response map
- harris.outputs(width, height, tex[0], tex[1]);
- gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
- gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
- let corners = tex[1].clear();
- let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
- for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
- const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
- const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
- corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
- corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
- }
-
- // Same-scale non-maximum suppression
- // *performs better inside the loop
- //corners = gpu.programs.keypoints.nonmaxSpace(corners);
-
- // Multi-scale non-maximum suppression
- // (doesn't seem to remove many keypoints)
- if (levels > 1) {
- const laplacian = gpu.programs.keypoints.laplacian.outputs(width, height, tex[0])(corners, image, lodStep, 0);
- corners = gpu.programs.keypoints.nonmaxScale.outputs(width, height, tex[2])(corners, image, laplacian, lodStep);
- }
-
- // find the maximum corner response over the entire image
- gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
- numPasses = Math.ceil(Math.log2(Math.max(width, height)));
- let maxScore = corners;
- for (let j = 0; j < numPasses; j++) maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
-
- // discard corners below a quality level
- corners = gpu.programs.keypoints.harrisScoreCutoff.outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])(corners, maxScore, quality);
-
- // encode keypoints
- let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
- const encoderLength = encodedKeypoints.width;
-
- // scale refinement
- if (levels > 1) {
- encodedKeypoints = gpu.programs.keypoints.refineScaleLoG.outputs(encoderLength, encoderLength, tex[5])(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
- }
-
- // done!
- this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * descriptor.js
- * Abstract keypoint descriptor
- */
-
-
-
-
-
-
-
-
- /**
- * Abstract keypoint descriptor
- * @abstract
- */
- class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- * @param {number} [texCount] number of work textures
- * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
- */
- constructor(name = undefined, texCount = 0, portBuilders = undefined) {
- super(name, texCount + 1, portBuilders);
- }
-
- /**
- *
- * Allocate space for keypoint descriptors
- * @param {SpeedyGPU} gpu
- * @param {number} inputDescriptorSize should be 0
- * @param {number} inputExtraSize must be non-negative
- * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
- * @param {number} outputExtraSize must be inputExtraSize
- * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
- * @returns {SpeedyDrawableTexture} encodedKeypoints
- */
- _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints) {
- utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
- utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
- const inputEncoderLength = inputEncodedKeypoints.width;
- const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
- const outputEncoderCapacity = inputEncoderCapacity;
- const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
- const tex = this._tex[this._tex.length - 1];
- return gpu.programs.keypoints.allocateDescriptors.outputs(outputEncoderLength, outputEncoderLength, tex)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * orb.js
- * ORB descriptors
- */
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DESCRIPTOR_SIZE = 32; // 256 bits
-
- /**
- * ORB descriptors
- */
- class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const image = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read()).image;
- const tex = this._tex;
- const outputTexture = this._tex[2];
-
- // compute orientation
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
- const encodedOrientations = gpu.programs.keypoints.orbOrientation.outputs(orientationEncoderLength, orientationEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- const orientedKeypoints = gpu.programs.keypoints.transferOrientation.outputs(encoderLength, encoderLength, tex[1])(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
-
- // allocate space
- const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
- const newEncoderLength = encodedKps.width;
-
- // compute descriptors (it's a good idea to blur the image)
- const describedKeypoints = gpu.programs.keypoints.orbDescriptor.outputs(newEncoderLength, newEncoderLength, outputTexture)(image, encodedKps, extraSize, newEncoderLength);
-
- // done!
- this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lk.js
- * LK optical-flow
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- // Constants
- const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
- const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
- const DEFAULT_NUMBER_OF_ITERATIONS = 30;
- const DEFAULT_DISCARD_THRESHOLD = 0.0001;
- const DEFAULT_EPSILON = 0.01;
- const LK_PROGRAM = {
- 3: 'lk3',
- 5: 'lk5',
- 7: 'lk7',
- 9: 'lk9',
- 11: 'lk11',
- 13: 'lk13',
- 15: 'lk15',
- 17: 'lk17',
- 19: 'lk19',
- 21: 'lk21'
- };
-
- /**
- * LK optical-flow
- */
- class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 3, [InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SpeedySize} window size */
- this._windowSize = DEFAULT_WINDOW_SIZE;
-
- /** @type {number} number of pyramid levels to use */
- this._levels = DEFAULT_DEPTH;
-
- /** @type {number} minimum acceptable corner response */
- this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
-
- /** @type {number} number of iterations per pyramid level (termination criteria) */
- this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
-
- /** @type {number} minimum increment per iteration (termination criteria) */
- this._epsilon = DEFAULT_EPSILON;
- }
-
- /**
- * Window size (use odd numbers)
- * @returns {SpeedySize}
- */
- get windowSize() {
- return this._windowSize;
- }
-
- /**
- * Window size (use odd numbers)
- * @param {SpeedySize} windowSize must be a square window
- */
- set windowSize(windowSize) {
- if (windowSize.width != windowSize.height) {
- throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
- } else if (!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
- const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a, b) => a - b).map(k => k + 'x' + k).join(', ');
- throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
- }
- this._windowSize = windowSize;
- }
-
- /**
- * Number of pyramid levels to use
- * @returns {number}
- */
- get levels() {
- return this._levels;
- }
-
- /**
- * Number of pyramid levels to use
- * @param {number} levels
- */
- set levels(levels) {
- utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
- this._levels = levels | 0;
- }
-
- /**
- * Get the discard threshold, used to discard "bad" keypoints
- * @returns {number}
- */
- get discardThreshold() {
- return this._discardThreshold;
- }
-
- /**
- * Set the discard threshold, used to discard "bad" keypoints
- * @param {number} value typically 10^(-4) - increase to discard more
- */
- set discardThreshold(value) {
- utils/* Utils */.A.assert(value >= 0);
- this._discardThreshold = +value;
- }
-
- /**
- * Get the maximum number of iterations of the pyramidal LK algorithm
- * @returns {number}
- */
- get numberOfIterations() {
- return this._numberOfIterations;
- }
-
- /**
- * Set the maximum number of iterations of the pyramidal LK algorithm
- * @param {number} value
- */
- set numberOfIterations(value) {
- utils/* Utils */.A.assert(value >= 1);
- this._numberOfIterations = value | 0;
- }
-
- /**
- * Get the accuracy threshold, used to stop LK iterations
- * @returns {number}
- */
- get epsilon() {
- return this._epsilon;
- }
-
- /**
- * Get the accuracy threshold, used to stop LK iterations
- * @param {number} value typically 0.01
- */
- set epsilon(value) {
- utils/* Utils */.A.assert(value >= 0);
- this._epsilon = +value;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('previousKeypoints').read();
- const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('previousImage').read()).image;
- const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('nextImage').read()).image;
- const previousKeypoints = encodedKeypoints;
- const levels = this._levels;
- const windowSize = this._windowSize;
- const wsize = windowSize.width; // square window
- const numberOfIterations = this._numberOfIterations;
- const discardThreshold = this._discardThreshold;
- const epsilon = this._epsilon;
- const keypoints = gpu.programs.keypoints;
- const tex = this._tex;
-
- // do we need a pyramid?
- if (!(levels == 1 || previousImage.hasMipmaps() && nextImage.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);else if (previousImage.width !== nextImage.width || previousImage.height !== nextImage.height) throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
-
- // select the appropriate program
- const lk = keypoints[LK_PROGRAM[wsize]];
-
- // find the dimensions of the flow texture (1 pixel per flow vector)
- const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
- lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
-
- // compute optical-flow
- let flow = lk.clear();
- for (let lod = levels - 1; lod >= 0; lod--) flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
-
- // transfer optical-flow to nextKeypoints
- keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
- const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
-
- // done!
- this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
- this.output('flow').swrite(flow);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lsh-static-tables.js
- * Static LSH tables
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Static LSH tables
- */
- class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.LSHTables)]);
-
- /** @type {SpeedyKeypoint[]} "training" keypoints */
- this._keypoints = [];
-
- /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
- this._keypointsCopy = [];
-
- /** @type {number} number of tables in the LSH data structure */
- this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
-
- /** @type {number} number of bits of a hash */
- this._hashSize = LSH_DEFAULT_HASH_SIZE;
-
- /** @type {SpeedyLSH|null} LSH data structure */
- this._lsh = null;
- }
-
- /**
- * "Training" keypoints
- * @returns {SpeedyKeypoint[]}
- */
- get keypoints() {
- return this._keypoints;
- }
-
- /**
- * "Training" keypoints
- * @param {SpeedyKeypoint[]} keypoints
- */
- set keypoints(keypoints) {
- if (!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint))) throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
- if (this._keypoints !== keypoints) {
- this._keypoints = keypoints; // update internal pointer
- this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
- this._lsh = null; // (re)train the model
- }
- }
-
- /**
- * Number of tables in the LSH data structure
- * @returns {number}
- */
- get numberOfTables() {
- return this._numberOfTables;
- }
-
- /**
- * Number of tables in the LSH data structure
- * @param {number} n
- */
- set numberOfTables(n) {
- if (!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
- if (n !== this._numberOfTables) {
- this._numberOfTables = n | 0;
- this._lsh = null; // need to retrain the model
- }
- }
-
- /**
- * Number of bits of a hash
- * @returns {number}
- */
- get hashSize() {
- return this._hashSize;
- }
-
- /**
- * Number of bits of a hash
- * @param {number} h
- */
- set hashSize(h) {
- if (!LSH_ACCEPTABLE_HASH_SIZES.includes(h)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
- if (h !== this._hashSize) {
- this._hashSize = h | 0;
- this._lsh = null; // need to retrain the model
- }
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- // Need to train the model?
- if (this._lsh == null) {
- // internal work textures are only available after initialization,
- // i.e., after calling this._init()
- this._lsh = this._train();
- }
-
- // Pass it forward
- this.output().swrite(this._lsh);
- }
-
- /**
- * Train the model
- * @returns {SpeedyLSH}
- */
- _train() {
- const keypoints = this._keypointsCopy;
- const numberOfTables = this._numberOfTables;
- const hashSize = this._hashSize;
- if (keypoints.find(keypoint => keypoint.descriptor == null)) throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
- const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
- const lshTables = this._tex[0];
- const descriptorDB = this._tex[1];
- return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * lsh-knn.js
- * K approximate nearest neighbors matcher
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
-
- /** @type {number} how many neighbors to search for, by default */
- const DEFAULT_K = 1;
-
- /** @type {LSHKNNQualityLevel} default quality level */
- const DEFAULT_QUALITY = 'default';
-
- /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
- const NUMBER_OF_BIT_SWAPS = {
- 'fastest': 0,
- 'default': 1,
- 'demanding': 2
- };
-
- /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
- const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o, d) => (o[d] = fd(d), o), {}))(d => (fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o, h) => (o[h] = fh(h), o), {}))(h => (fl => [0, 1, 2].reduce((o, l) => (o[l] = fl(l), o), {}))(l => `lshKnn${d}h${h}lv${l}`)));
-
- /**
- * K approximate nearest neighbors matcher
- */
- class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
-
- /** @type {number} how many neighbors do you want? */
- this._k = DEFAULT_K;
-
- /** @type {LSHKNNQualityLevel} quality of the matching */
- this._quality = DEFAULT_QUALITY;
- }
-
- /**
- * How many neighbors do you want?
- * @returns {number}
- */
- get k() {
- return this._k;
- }
-
- /**
- * How many neighbors do you want?
- * @param {number} k number of neighbors
- */
- set k(k) {
- this._k = Math.max(1, k | 0);
- }
-
- /**
- * Quality of the matching
- * @returns {LSHKNNQualityLevel}
- */
- get quality() {
- return this._quality;
- }
-
- /**
- * Quality of the matching
- * @param {LSHKNNQualityLevel} quality
- */
- set quality(quality) {
- if (!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
- this._quality = quality;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- /** @type {SpeedyLSH} */
- const lsh = this.input('lsh').read().lsh;
- const keypoints = gpu.programs.keypoints;
- const tables = lsh.tables;
- const descriptorDB = lsh.descriptorDB;
- const tablesStride = tables.width;
- const descriptorDBStride = descriptorDB.width;
- const tableCount = lsh.tableCount;
- const hashSize = lsh.hashSize;
- const bucketCapacity = lsh.bucketCapacity;
- const bucketsPerTable = lsh.bucketsPerTable;
- const sequences = lsh.sequences;
- const candidatesA = this._tex[0];
- const candidatesB = this._tex[1];
- const candidatesC = this._tex[2];
- const filters = this._tex[3];
- const transferA = this._tex[4];
- const transferB = this._tex[5];
- const level = NUMBER_OF_BIT_SWAPS[this._quality];
- const matchesPerKeypoint = this._k;
-
- // validate parameters
- if (descriptorSize !== lsh.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
- utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
-
- // configure the output texture
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
- let encodedMatches = transferB;
- keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
-
- // prepare the LSH matching
- const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
- keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
- keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
- const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
- lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
- lshKnn.setUBO('LSHSequences', sequences);
-
- // match keypoints
- encodedMatches.clear();
- keypoints.lshKnnInitFilters();
- for (let i = 0; i < matchesPerKeypoint; i++) {
- // find the (i+1)-th best match
- let candidates = keypoints.lshKnnInitCandidates();
- for (let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
- candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
- gpu.gl.flush();
- }
- candidates.copyTo(filters);
-
- // transfer matches to an encoded matches texture
- encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
- }
-
- // done
- this.output().swrite(encodedMatches, matchesPerKeypoint);
-
- /*
- // debug
- let data = filters.inspect32(gpu), debug = [];
- for(let i = 0; i < data.length; i++) {
- const bits = MATCH_INDEX_BITS;
- const mask = (1 << bits) - 1;
- const u32 = data[i];
- const index = u32 & mask, distance = u32 >>> bits;
- //debug.push('|'+[ u32 ].toString());
- debug.push('|'+[ index, distance ].toString());
- }
- console.log(debug.join(','));
- */
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * bf-knn.js
- * Brute Force KNN Keypoint Matcher
- */
-
-
-
-
-
-
-
-
-
-
-
- /** @type {Object<number,string>} program name indexed by descriptor size */
- const PROGRAM_NAME = {
- 32: 'bfMatcher32',
- 64: 'bfMatcher64'
- };
-
- /**
- * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
- * invoking this (use a database of 50 keypoints or so - your mileage may vary)
- */
- class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
-
- /** @type {number} number of matches per keypoint (the "k" of knn) */
- this._matchesPerKeypoint = 1;
- }
-
- /**
- * Number of matches per keypoint
- * @returns {number}
- */
- get k() {
- return this._matchesPerKeypoint;
- }
-
- /**
- * Number of matches per keypoint
- * @param {number} value
- */
- set k(value) {
- this._matchesPerKeypoint = Math.max(1, value | 0);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
- const database = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('database').read();
- const candidatesA = this._tex[0];
- const candidatesB = this._tex[1];
- const candidatesC = this._tex[2];
- const encodedFiltersA = this._tex[3];
- const encodedMatchesA = this._tex[4];
- const encodedMatchesB = this._tex[5];
- const matchesPerKeypoint = this._matchesPerKeypoint;
- const keypoints = gpu.programs.keypoints;
-
- // validate parameters
- if (descriptorSize !== database.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);else if (!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
-
- // prepare the brute force matching
- const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
- const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
- const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
- const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
- const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
- const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
- const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
- keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
- keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
- keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
- bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
-
- // match keypoints
- let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
- let encodedFilters = keypoints.bfMatcherInitFilters();
- for (let k = 0; k < matchesPerKeypoint; k++) {
- let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
-
- // find the (k+1)-th best match
- for (let passId = 0; passId < numberOfPasses; passId++) {
- encodedPartialMatches = bfMatcher(encodedPartialMatches, encodedFilters, partialMatcherLength, database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength, encodedKeypoints, descriptorSize, extraSize, encoderLength, passId);
- gpu.gl.flush();
- }
- //gpu.gl.flush();
-
- // copy the (k+1)-th best match to the filter
- if (matchesPerKeypoint > 1) encodedPartialMatches.copyTo(encodedFilters);
-
- // aggregate matches
- encodedMatches = keypoints.bfMatcherTransfer(encodedMatches, encodedPartialMatches, matchesPerKeypoint, k);
- }
-
- // done!
- this.output().swrite(encodedMatches, matchesPerKeypoint);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * distance-filter.js
- * Given a set of pairs of keypoints, discard all pairs whose distance is
- * above a user-defined threshold. Useful for bidirectional optical-flow.
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Given a set of pairs of keypoints, discard all pairs whose distance is
- * above a user-defined threshold. Useful for bidirectional optical-flow.
- *
- * The pairs of keypoints are provided as two separate sets, "in" and
- * "reference". Keypoints that are kept will have their data extracted
- * from the "in" set.
- */
- class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} maximum accepted distance */
- this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
- }
-
- /**
- * Maximum accepted distance
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * Maximum accepted distance
- * @param {number} value
- */
- set threshold(value) {
- this._threshold = Math.max(0, +value);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
- const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
- const threshold = this._threshold;
-
- // validate shapes
- if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
-
- // calculate the shape of the output
- const outputTexture = this._tex[0];
- const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
- const descriptorSize = set0.descriptorSize;
- const extraSize = set0.extraSize;
-
- // apply the distance filter
- gpu.programs.keypoints.distanceFilter.outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * hamming-distance-filter.js
- * Given a set of pairs of keypoints, discard all pairs whose hamming
- * distance (of descriptor) is above a user-defined threshold
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** @type {Object<number,string>} Program names */
- const hamming_distance_filter_PROGRAM_NAME = {
- 32: 'hammingDistanceFilter32',
- 64: 'hammingDistanceFilter64'
- };
-
- /**
- * Given a set of pairs of keypoints, discard all pairs whose hamming
- * distance (of descriptor) is above a user-defined threshold
- *
- * The pairs of keypoints are provided as two separate sets, "in" and
- * "reference". Keypoints that are kept will have their data extracted
- * from the "in" set.
- */
- class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} distance threshold, an integer */
- this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
- }
-
- /**
- * Distance threshold, an integer
- * @returns {number}
- */
- get threshold() {
- return this._threshold;
- }
-
- /**
- * Distance threshold, an integer
- * @param {number} value
- */
- set threshold(value) {
- this._threshold = Math.max(0, value | 0);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
- const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
- const threshold = this._threshold;
-
- // validate shapes
- if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
-
- // validate descriptor size
- if (!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
-
- // calculate the shape of the output
- const outputTexture = this._tex[0];
- const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
- const descriptorSize = set0.descriptorSize;
- const extraSize = set0.extraSize;
-
- // apply the distance filter
- const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
- gpu.programs.keypoints[program].outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
-
- // done!
- this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * portal.js
- * Keypoint Portals
- */
-
-
-
-
-
-
-
-
-
-
-
- /**
- * A sink of a Keypoint Portal
- * This is not a pipeline sink - it doesn't export any data!
- */
- class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {number} descriptor size, in bytes */
- this._descriptorSize = 0;
-
- /** @type {number} extra size, in bytes */
- this._extraSize = 0;
-
- /** @type {number} extra size */
- this._encoderLength = 0;
-
- /** @type {boolean} is this node initialized? */
- this._initialized = false;
- }
-
- /**
- * Encoded keypoints
- * @returns {SpeedyTexture}
- */
- get encodedKeypoints() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._tex[0];
- }
-
- /**
- * Descriptor size, in bytes
- * @returns {number}
- */
- get descriptorSize() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._descriptorSize;
- }
-
- /**
- * Extra size, in bytes
- * @returns {number}
- */
- get extraSize() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._extraSize;
- }
-
- /**
- * Encoder length
- * @returns {number}
- */
- get encoderLength() {
- if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
- return this._encoderLength;
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
- this._tex[0].resize(encoderLength, encoderLength).clearToColor(1, 1, 1, 1); // initial texture
- this._descriptorSize = this._extraSize = 0;
- this._encoderLength = encoderLength;
- this._initialized = true;
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._initialized = false;
- super.release(gpu);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- encodedKeypoints,
- descriptorSize,
- extraSize,
- encoderLength
- } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
- const tex = this._tex[0];
-
- // copy input
- tex.resize(encodedKeypoints.width, encodedKeypoints.height);
- encodedKeypoints.copyTo(tex);
- this._descriptorSize = descriptorSize;
- this._extraSize = extraSize;
- this._encoderLength = encoderLength;
- }
- }
-
- /**
- * A source of a Keypoint Portal
- */
- class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = undefined) {
- super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
-
- /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
- this._source = null;
- }
-
- /**
- * Data source
- * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
- */
- get source() {
- return this._source;
- }
-
- /**
- * Data source
- * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
- */
- set source(node) {
- if (node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
- this._source = node;
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
- this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * keypoint-factory.js
- * Keypoint-related nodes
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /**
- * Keypoint detectors
- */
- class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * FAST corner detector
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeFASTKeypointDetector}
- */
- static FAST(name = undefined) {
- return new SpeedyPipelineNodeFASTKeypointDetector(name);
- }
-
- /**
- * Harris corner detector
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
- */
- static Harris(name = undefined) {
- return new SpeedyPipelineNodeHarrisKeypointDetector(name);
- }
- }
-
- /**
- * Keypoint descriptors
- */
- class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * ORB descriptors
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
- */
- static ORB(name = undefined) {
- return new SpeedyPipelineNodeORBKeypointDescriptor(name);
- }
- }
-
- /**
- * Keypoint trackers
- */
- class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * LK optical-flow
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeLKKeypointTracker}
- */
- static LK(name = undefined) {
- return new SpeedyPipelineNodeLKKeypointTracker(name);
- }
- }
-
- /**
- * Keypoint matchers
- */
- class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Static LSH tables
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeStaticLSHTables}
- */
- static StaticLSHTables(name = undefined) {
- return new SpeedyPipelineNodeStaticLSHTables(name);
- }
-
- /**
- * LSH-based K-approximate nearest neighbors
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
- */
- static LSHKNN(name = undefined) {
- return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
- }
-
- /**
- * Brute-force K-nearest neighbors keypoint matcher
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
- */
- static BFKNN(name = undefined) {
- return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
- }
- }
-
- /**
- * Portal nodes
- */
- class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Create an image portal source
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeKeypointPortalSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeKeypointPortalSource(name);
- }
-
- /**
- * Create an image portal sink
- * @param {string} [name] name of the node
- * @returns {SpeedyPipelineNodeKeypointPortalSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeKeypointPortalSink(name);
- }
- }
-
- /**
- * Keypoint-related nodes
- */
- class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q {
- /**
- * Keypoint detectors
- * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
- */
- static get Detector() {
- return SpeedyPipelineKeypointDetectorFactory;
- }
-
- /**
- * Keypoint descriptors
- * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
- */
- static get Descriptor() {
- return SpeedyPipelineKeypointDescriptorFactory;
- }
-
- /**
- * Keypoint trackers
- * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
- */
- static get Tracker() {
- return SpeedyPipelineKeypointTrackerFactory;
- }
-
- /**
- * Keypoint matchers
- * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
- */
- static get Matcher() {
- return SpeedyPipelineKeypointMatcherFactory;
- }
-
- /**
- * Keypoint Portals
- * @returns {typeof SpeedyPipelineKeypointPortalFactory}
- */
- static get Portal() {
- return SpeedyPipelineKeypointPortalFactory;
- }
-
- /**
- * Create a keypoint source
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSource}
- */
- static Source(name = undefined) {
- return new SpeedyPipelineNodeKeypointSource(name);
- }
-
- /**
- * Create a keypoint sink
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSink}
- */
- static Sink(name = undefined) {
- return new SpeedyPipelineNodeKeypointSink(name);
- }
-
- /**
- * Create a sink of tracked keypoints
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeTrackedKeypointSink}
- */
- static SinkOfTrackedKeypoints(name = undefined) {
- return new SpeedyPipelineNodeTrackedKeypointSink(name);
- }
-
- /**
- * Create a sink of matched keypoints
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeMatchedKeypointSink}
- */
- static SinkOfMatchedKeypoints(name = undefined) {
- return new SpeedyPipelineNodeMatchedKeypointSink(name);
- }
-
- /**
- * Keypoint clipper
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointClipper}
- */
- static Clipper(name = undefined) {
- return new SpeedyPipelineNodeKeypointClipper(name);
- }
-
- /**
- * Border Clipper
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointBorderClipper}
- */
- static BorderClipper(name = undefined) {
- return new SpeedyPipelineNodeKeypointBorderClipper(name);
- }
-
- /**
- * Create a keypoint buffer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointBuffer}
- */
- static Buffer(name = undefined) {
- return new SpeedyPipelineNodeKeypointBuffer(name);
- }
-
- /**
- * Create a keypoint mixer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointMixer}
- */
- static Mixer(name = undefined) {
- return new SpeedyPipelineNodeKeypointMixer(name);
- }
-
- /**
- * Create a keypoint shuffler
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointShuffler}
- */
- static Shuffler(name = undefined) {
- return new SpeedyPipelineNodeKeypointShuffler(name);
- }
-
- /**
- * Create a keypoint multiplexer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointMultiplexer}
- */
- static Multiplexer(name = undefined) {
- return new SpeedyPipelineNodeKeypointMultiplexer(name);
- }
-
- /**
- * Create a keypoint transformer
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointTransformer}
- */
- static Transformer(name = undefined) {
- return new SpeedyPipelineNodeKeypointTransformer(name);
- }
-
- /**
- * Create a subpixel refiner of keypoint locations
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
- */
- static SubpixelRefiner(name = undefined) {
- return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
- }
-
- /**
- * Distance filter
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeDistanceFilter}
- */
- static DistanceFilter(name = undefined) {
- return new SpeedyPipelineNodeKeypointDistanceFilter(name);
- }
-
- /**
- * Hamming distance filter
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeHammingDistanceFilter}
- */
- static HammingDistanceFilter(name = undefined) {
- return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * sink.js
- * Gets keypoints out of the pipeline
- */
-
-
-
-
-
-
-
-
-
-
-
- // next power of 2
- const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
-
- /**
- * Gets 2D vectors out of the pipeline
- */
- class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode {
- /**
- * Constructor
- * @param {string} [name] name of the node
- */
- constructor(name = 'vec2') {
- super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Vector2)]);
-
- /** @type {SpeedyVector2[]} 2D vectors (output) */
- this._vectors = [];
-
- /** @type {SpeedyTextureReader} texture reader */
- this._textureReader = new SpeedyTextureReader();
-
- /** @type {number} page flipping index */
- this._page = 0;
-
- /** @type {boolean} accelerate GPU-CPU transfers */
- this._turbo = false;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @returns {boolean}
- */
- get turbo() {
- return this._turbo;
- }
-
- /**
- * Accelerate GPU-CPU transfers
- * @param {boolean} value
- */
- set turbo(value) {
- this._turbo = Boolean(value);
- }
-
- /**
- * Initializes this node
- * @param {SpeedyGPU} gpu
- */
- init(gpu) {
- super.init(gpu);
- this._textureReader.init(gpu);
- }
-
- /**
- * Releases this node
- * @param {SpeedyGPU} gpu
- */
- release(gpu) {
- this._textureReader.release(gpu);
- super.release(gpu);
- }
-
- /**
- * Export data from this node to the user
- * @returns {SpeedyPromise<SpeedyVector2[]>}
- */
- export() {
- return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
- }
-
- /**
- * Run the specific task of this node
- * @param {SpeedyGPU} gpu
- * @returns {void|SpeedyPromise<void>}
- */
- _run(gpu) {
- const {
- vectors
- } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input().read();
- const useBufferedDownloads = this._turbo;
- const encoderLength = vectors.width;
-
- /*
- I have found experimentally that, in Firefox, readPixelsAsync()
- performs MUCH better if the width of the target texture is a power
- of two. I have no idea why this is the case, nor if it's related to
- some interaction with the GL drivers, somehow. This seems to make no
- difference on Chrome, however. In any case, let's convert the input
- texture to POT.
- */
- const encoderWidth = vector2_sink_nextPot(encoderLength);
- const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
- //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
-
- // copy the set of vectors to an internal texture
- const copiedTexture = this._tex[this._page];
- gpu.programs.utils.copy2DVectors.outputs(encoderWidth, encoderHeight, copiedTexture)(vectors);
-
- // flip page
- this._page = 1 - this._page;
-
- // download the internal texture
- return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
- this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
- });
- }
-
- /**
- * Decode a sequence of vectors, given a flattened image of encoded pixels
- * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
- * @param {number} encoderWidth
- * @param {number} encoderHeight
- * @returns {SpeedyVector2[]} vectors
- */
- static _decode(pixels, encoderWidth, encoderHeight) {
- const bytesPerVector = 4; // 1 pixel per vector
- const vectors = [];
- let hi = 0,
- lo = 0;
- let x = 0,
- y = 0;
-
- // how many bytes should we read?
- const e2 = encoderWidth * encoderHeight * bytesPerVector;
- const size = Math.min(pixels.length, e2);
-
- // for each encoded vector
- for (let i = 0; i < size; i += bytesPerVector) {
- // extract 16-bit words
- lo = pixels[i + 1] << 8 | pixels[i];
- hi = pixels[i + 3] << 8 | pixels[i + 2];
-
- // the vector is "null": we have reached the end of the list
- if (lo == 0xFFFF && hi == 0xFFFF) break;
-
- // the vector must be discarded
- if (lo == 0xFF00 && hi == 0xFF00) continue;
-
- // decode floats
- x = utils/* Utils */.A.decodeFloat16(lo);
- y = utils/* Utils */.A.decodeFloat16(hi);
-
- // register vector
- vectors.push(new SpeedyVector2(x, y));
- }
-
- // done!
- return vectors;
- }
- }
- ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * vector2-factory.js
- * 2D vectors
- */
-
-
-
-
- /**
- * 2D vectors
- */
- class SpeedyPipelineVector2Factory extends Function {
- /**
- * Constructor
- */
- constructor() {
- // This factory can be invoked as a function
- super('...args', 'return this._create(...args)');
- return this.bind(this);
- }
-
- /**
- * @private
- *
- * Create a 2D vector
- * @param {number} x x-coordinate
- * @param {number} y y-coordinate
- * @returns {SpeedyVector2}
- */
- _create(x, y) {
- return new SpeedyVector2(x, y);
- }
-
- /**
- * Create a Vector2 sink
- * @param {string} [name]
- * @returns {SpeedyPipelineNodeVector2Sink}
- */
- Sink(name = undefined) {
- return new SpeedyPipelineNodeVector2Sink(name);
- }
- }
- ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * fps-counter.js
- * A FPS counter
- */
-
-
-
- /** @const {number} update interval in milliseconds */
- const UPDATE_INTERVAL = 500;
-
- /** @type {FPSCounter|null} Singleton */
- let instance = null;
-
- /**
- * FPS counter
- */
- class FPSCounter {
- /**
- * Creates a new FPSCounter
- * @private
- */
- constructor() {
- /** @type {number} current FPS rate */
- this._fps = 60;
-
- /** @type {number} frame counter */
- this._frames = 0;
-
- /** @type {number} update interval in milliseconds */
- this._updateInterval = UPDATE_INTERVAL;
-
- /** @type {number} time of the last update */
- this._lastUpdate = performance.now();
-
- /** @type {function(): void} bound update function */
- this._boundUpdate = this._update.bind(this);
-
- // this should never happen...
- if (instance !== null) throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
-
- // start FPS counter
- this._boundUpdate();
- }
-
- /**
- * Gets an instance of the FPS counter.
- * We use lazy loading, i.e., we will not
- * create a FPS counter unless we need to!
- * @returns {FPSCounter}
- */
- static get instance() {
- if (instance === null) instance = new FPSCounter();
- return instance;
- }
-
- /**
- * Get the FPS rate
- * @returns {number} frames per second
- */
- get fps() {
- return this._fps;
- }
-
- /**
- * Updates the FPS counter
- */
- _update() {
- const now = performance.now();
- const deltaTime = now - this._lastUpdate;
- if (deltaTime >= this._updateInterval) {
- this._fps = Math.round(this._frames / (deltaTime * 0.001));
- this._frames = 0;
- this._lastUpdate = now;
- }
- this._frames++;
- requestAnimationFrame(this._boundUpdate);
- }
- }
- ;// CONCATENATED MODULE: ./src/main.js
- /*
- * speedy-vision.js
- * GPU-accelerated Computer Vision for JavaScript
- * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * main.js
- * The entry point of the library
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- /* eslint-disable no-undef */
- /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
- /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
- /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
- /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
-
- // Constants
-
- /** @type {SpeedyMatrixFactory} */
- const matrixFactory = new SpeedyMatrixFactory();
-
- /** @type {SpeedyPipelineVector2Factory} */
- const vector2Factory = new SpeedyPipelineVector2Factory();
-
- /**
- * GPU-accelerated Computer Vision for JavaScript
- */
- class Speedy {
- /**
- * The version of the library
- * @returns {string}
- */
- static get version() {
- if (false) {}else return "0.9.1";
- }
-
- /**
- * Checks if Speedy can be executed in this machine & browser
- * @returns {boolean}
- */
- static isSupported() {
- return typeof WebAssembly !== 'undefined' && typeof WebGL2RenderingContext !== 'undefined' && speedy_gl/* SpeedyGL */.c.instance.gl != null;
- }
-
- /**
- * Global settings
- * @returns {typeof Settings}
- */
- static get Settings() {
- return settings/* Settings */.w;
- }
-
- /**
- * Create a 2D vector
- * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
- */
- static get Vector2() {
- return vector2Factory;
- }
-
- /**
- * Create a 2D point
- * @param {number} x
- * @param {number} y
- * @returns {SpeedyPoint2}
- */
- static Point2(x, y) {
- return new SpeedyPoint2(x, y);
- }
-
- /**
- * Create a new size object
- * @param {number} width
- * @param {number} height
- * @returns {SpeedySize}
- */
- static Size(width, height) {
- return new SpeedySize(width, height);
- }
-
- /**
- * Create a Matrix (entries are given in column-major format)
- * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
- */
- static get Matrix() {
- return matrixFactory;
- }
-
- /**
- * Speedy Promises
- * @returns {typeof SpeedyPromise}
- */
- static get Promise() {
- return speedy_promise/* SpeedyPromise */.i;
- }
-
- /**
- * Create a new Pipeline
- * @returns {SpeedyPipeline}
- */
- static Pipeline() {
- return new SpeedyPipeline();
- }
-
- /**
- * Image-related nodes
- * @returns {typeof SpeedyPipelineImageFactory}
- */
- static get Image() {
- return SpeedyPipelineImageFactory;
- }
-
- /**
- * Image filters
- * @returns {typeof SpeedyPipelineFilterFactory}
- */
- static get Filter() {
- return SpeedyPipelineFilterFactory;
- }
-
- /**
- * Image transforms
- * @returns {typeof SpeedyPipelineTransformFactory}
- */
- static get Transform() {
- return SpeedyPipelineTransformFactory;
- }
-
- /**
- * Keypoint-related nodes
- * @returns {typeof SpeedyPipelineKeypointFactory}
- */
- static get Keypoint() {
- return SpeedyPipelineKeypointFactory;
- }
-
- /**
- * Loads a SpeedyMedia object based on the provided source element
- * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
- * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static load(sourceElement, options = {}) {
- return SpeedyMedia.load(sourceElement, options);
- }
-
- /**
- * Loads a camera stream
- * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
- * @param {number} [height] height of the stream
- * @returns {SpeedyPromise<SpeedyMedia>}
- */
- static camera(widthOrConstraints = 640, height = 360) {
- const constraints = typeof widthOrConstraints === 'object' ? widthOrConstraints : {
- audio: false,
- video: {
- width: widthOrConstraints | 0,
- height: height | 0
- }
- };
- return utils/* Utils */.A.requestCameraStream(constraints).then(video => SpeedyMedia.load(video));
- }
-
- /**
- * Utilities to query information about the graphics driver
- * @returns {typeof SpeedyPlatform}
- */
- static get Platform() {
- return SpeedyPlatform;
- }
-
- /**
- * The FPS rate
- * @returns {number} Frames per second (FPS)
- */
- static get fps() {
- return FPSCounter.instance.fps;
- }
- }
-
- // Freeze the namespace
- Object.freeze(Speedy);
-
- // Display a notice
- utils/* Utils */.A.log(`Speedy Vision version ${Speedy.version}. ` + `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` + "https://github.com/alemart/speedy-vision");
-
- // Big-endian machine? Currently untested.
- if (!globals.LITTLE_ENDIAN) utils/* Utils */.A.warning('Running on a big-endian machine');
- })();
-
- __nested_webpack_exports__ = __nested_webpack_exports__["default"];
- /******/ return __nested_webpack_exports__;
- /******/ })()
- ;
- });
-
- /***/ })
-
- /******/ });
- /************************************************************************/
- /******/ // The module cache
- /******/ var __webpack_module_cache__ = {};
- /******/
- /******/ // The require function
- /******/ function __webpack_require__(moduleId) {
- /******/ // Check if module is in cache
- /******/ var cachedModule = __webpack_module_cache__[moduleId];
- /******/ if (cachedModule !== undefined) {
- /******/ return cachedModule.exports;
- /******/ }
- /******/ // Create a new module (and put it into the cache)
- /******/ var module = __webpack_module_cache__[moduleId] = {
- /******/ // no module.id needed
- /******/ // no module.loaded needed
- /******/ exports: {}
- /******/ };
- /******/
- /******/ // Execute the module function
- /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
- /******/
- /******/ // Return the exports of the module
- /******/ return module.exports;
- /******/ }
- /******/
- /************************************************************************/
- /******/ /* webpack/runtime/compat get default export */
- /******/ (() => {
- /******/ // getDefaultExport function for compatibility with non-harmony modules
- /******/ __webpack_require__.n = (module) => {
- /******/ var getter = module && module.__esModule ?
- /******/ () => (module['default']) :
- /******/ () => (module);
- /******/ __webpack_require__.d(getter, { a: getter });
- /******/ return getter;
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/define property getters */
- /******/ (() => {
- /******/ // define getter functions for harmony exports
- /******/ __webpack_require__.d = (exports, definition) => {
- /******/ for(var key in definition) {
- /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
- /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
- /******/ }
- /******/ }
- /******/ };
- /******/ })();
- /******/
- /******/ /* webpack/runtime/hasOwnProperty shorthand */
- /******/ (() => {
- /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
- /******/ })();
- /******/
- /************************************************************************/
- var __webpack_exports__ = {};
- // This entry need to be wrapped in an IIFE because it need to be in strict mode.
- (() => {
- "use strict";
-
- // EXPORTS
- __webpack_require__.d(__webpack_exports__, {
- "default": () => (/* binding */ Martins)
- });
-
- // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
- var speedy_vision = __webpack_require__(774);
- var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
- ;// CONCATENATED MODULE: ./src/utils/errors.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * errors.ts
- * Error classes
- */
- /**
- * Generic error class
- */
- class MartinsError extends Error {
- /**
- * Constructor
- * @param message error message
- * @param cause cause of the error
- */
- constructor(message = '', cause = null) {
- super(message);
- this.cause = cause;
- }
- /*{
- // incorrect when minified
- //return this.constructor.name;
- }*/
- /**
- * Convert to string
- */
- toString() {
- const extendedMessage = this.cause ? '\n-> ' + this.cause.toString() : '';
- if (this.message != '')
- return this.name + ': ' + this.message + extendedMessage;
- else
- return this.name + extendedMessage;
- }
- }
- /**
- * A method has received one or more illegal arguments
- */
- class IllegalArgumentError extends MartinsError {
- get name() {
- return 'IllegalArgumentError';
- }
- }
- /**
- * The method arguments are valid, but the method can't be called due to the
- * current state of the object
- */
- class IllegalOperationError extends MartinsError {
- get name() {
- return 'IllegalOperationError';
- }
- }
- /**
- * The requested operation is not supported
- */
- class NotSupportedError extends MartinsError {
- get name() {
- return 'NotSupportedError';
- }
- }
- /**
- * Access denied
- */
- class AccessDeniedError extends MartinsError {
- get name() {
- return 'AccessDeniedError';
- }
- }
- /**
- * Timeout
- */
- class TimeoutError extends MartinsError {
- get name() {
- return 'TimeoutError';
- }
- }
- /**
- * Assertion error
- */
- class AssertionError extends MartinsError {
- get name() {
- return 'AssertionError';
- }
- }
- /**
- * Tracking error
- */
- class TrackingError extends MartinsError {
- get name() {
- return 'TrackingError';
- }
- }
- /**
- * Detection error
- */
- class DetectionError extends MartinsError {
- get name() {
- return 'DetectionError';
- }
- }
- /**
- * Training error
- */
- class TrainingError extends MartinsError {
- get name() {
- return 'TrainingError';
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/resolution.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * resolution.ts
- * Resolution utilities
- */
-
- /** Reference heights when in landscape mode, measured in pixels */
- const REFERENCE_HEIGHT = {
- 'xs': 120,
- 'xs+': 160,
- 'sm': 200,
- 'sm+': 240,
- 'md': 320,
- 'md+': 360,
- 'lg': 480,
- 'lg+': 600,
- };
- /**
- * Convert a resolution type to a (width, height) pair
- * @param resolution resolution type
- * @param aspectRatio desired width / height ratio
- * @returns size in pixels
- */
- function computeResolution(resolution, aspectRatio) {
- const referenceHeight = REFERENCE_HEIGHT[resolution];
- let width = 0, height = 0;
- if (aspectRatio >= 1) {
- // landscape
- height = referenceHeight;
- width = Math.round(height * aspectRatio);
- width -= width % 2;
- }
- else {
- // portrait
- width = referenceHeight;
- height = Math.round(width / aspectRatio);
- height -= height % 2;
- }
- return speedy_vision_default().Size(width, height);
- }
-
- ;// CONCATENATED MODULE: ./src/utils/utils.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * utils.ts
- * Generic utilities
- */
-
-
-
- /**
- * Generic utilities
- */
- class Utils {
- /**
- * Log a message
- * @param message
- * @param args optional additional messages
- */
- static log(message, ...args) {
- console.log('[martins-js]', message, ...args);
- }
- /**
- * Display a warning
- * @param message
- * @param args optional additional messages
- */
- static warning(message, ...args) {
- console.warn('[martins-js]', message, ...args);
- }
- /**
- * Display an error message
- * @param message
- * @param args optional additional messages
- */
- static error(message, ...args) {
- console.error('[martins-js]', message, ...args);
- }
- /**
- * Assertion
- * @param expr expression
- * @param errorMessage optional error message
- * @throws {AssertionError}
- */
- static assert(expr, errorMessage = '') {
- if (!expr)
- throw new AssertionError(errorMessage);
- }
- /**
- * Returns a range [0, 1, ..., n-1]
- * @param n non-negative integer
- * @returns range from 0 to n-1, inclusive
- */
- static range(n) {
- if ((n |= 0) < 0)
- throw new IllegalArgumentError();
- return Array.from({ length: n }, (_, i) => i);
- }
- /**
- * Convert a resolution type to a resolution measured in pixels
- * @param resolution resolution type
- * @param aspectRatio width / height ratio
- * @returns resolution measured in pixels
- */
- static resolution(resolution, aspectRatio) {
- return computeResolution(resolution, aspectRatio);
- }
- /**
- * Returns a string containing platform brand information
- * @returns platform brand information
- */
- static platformString() {
- return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
- navigator.userAgentData.platform : // use only low entropy data
- navigator.platform // navigator.platform is deprecated
- )(navigator);
- }
- /**
- * Checks if we're on iOS
- * @returns true if we're on iOS
- */
- static isIOS() {
- // at the time of this writing, navigator.userAgentData is not yet
- // compatible with Safari. navigator.platform is deprecated, but
- // predictable.
- if (/(iOS|iPhone|iPad|iPod)/i.test(navigator.platform))
- return true;
- if (/Mac/i.test(navigator.platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
- return navigator.maxTouchPoints > 2;
- return false;
- }
- /**
- * Checks if we're on a WebKit-based browser
- * @returns true if we're on a WebKit-based browser
- */
- static isWebKit() {
- // note: navigator.vendor is deprecated
- if (/Apple/.test(navigator.vendor))
- return true;
- // Can a non WebKit-based browser pass this test?
- // Test masked GL_RENDERER == "Apple GPU" (valid since Feb 2020)
- // https://bugs.webkit.org/show_bug.cgi?id=207608
- /*
- if(Speedy.Platform.renderer == 'Apple GPU' && Speedy.Platform.vendor == 'Apple Inc.')
- return true;
- */
- // Desktop and Mobile Safari, Epiphany on Linux
- if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
- return true;
- // Chrome, Firefox, Edge on iOS
- if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
- return true;
- // not WebKit
- return false;
- }
- /**
- * Device-specific information for debugging purposes
- */
- static deviceInfo() {
- return 'Device info: ' + JSON.stringify({
- isIOS: Utils.isIOS(),
- isWebKit: Utils.isWebKit(),
- renderer: (speedy_vision_default()).Platform.renderer,
- vendor: (speedy_vision_default()).Platform.vendor,
- screen: [screen.width, screen.height].join('x'),
- platform: [navigator.platform, navigator.vendor].join('; '),
- userAgent: navigator.userAgent,
- userAgentData: navigator.userAgentData || null,
- }, null, 2);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * ar-events.ts
- * AR-related Events
- */
- /**
- * AR Event
- */
- class AREvent extends Event {
- /**
- * Constructor
- * @param type event type
- */
- constructor(type) {
- super(type);
- }
- /**
- * Event type
- */
- get type() {
- return super.type;
- }
- }
- /**
- * AR Event Target
- */
- class AREventTarget {
- /**
- * Constructor
- */
- constructor() {
- this._delegate = new EventTarget();
- }
- /**
- * Add event listener
- * @param type event type
- * @param callback
- */
- addEventListener(type, callback) {
- this._delegate.addEventListener(type, callback);
- }
- /**
- * Remove event listener
- * @param type event type
- * @param callback
- */
- removeEventListener(type, callback) {
- this._delegate.removeEventListener(type, callback);
- }
- /**
- * Synchronously trigger an event
- * @param event
- * @returns same value as a standard event target
- * @internal
- */
- dispatchEvent(event) {
- return this._delegate.dispatchEvent(event);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/hud.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * hud.ts
- * Heads Up Display
- */
-
-
- /**
- * Heads Up Display: an overlay displayed in front of the augmented scene
- */
- class HUD {
- /**
- * Constructor
- * @param parent parent of the hud container
- * @param hudContainer an existing hud container (optional)
- */
- constructor(parent, hudContainer) {
- this._container = hudContainer || this._createContainer(parent);
- this._ownContainer = (hudContainer == null);
- // validate
- if (this._container.parentElement !== parent)
- throw new IllegalArgumentError('The container of the HUD must be a direct child of the container of the viewport');
- // the HUD should be hidden initially
- if (!this._container.hidden)
- Utils.warning(`The container of the HUD should have the hidden attribute`);
- }
- /**
- * The container of the HUD
- */
- get container() {
- return this._container;
- }
- /**
- * Whether or not the HUD is visible
- */
- get visible() {
- return !this._container.hidden;
- }
- /**
- * Whether or not the HUD is visible
- */
- set visible(visible) {
- this._container.hidden = !visible;
- }
- /**
- * Initialize the HUD
- * @param zIndex the z-index of the container
- * @internal
- */
- _init(zIndex) {
- const container = this._container;
- container.style.position = 'absolute';
- container.style.left = container.style.top = '0px';
- container.style.right = container.style.bottom = '0px';
- container.style.padding = container.style.margin = '0px';
- container.style.zIndex = String(zIndex);
- container.style.userSelect = 'none';
- }
- /**
- * Release the HUD
- * @internal
- */
- _release() {
- if (this._ownContainer) {
- this._ownContainer = false;
- this._container.remove();
- }
- }
- /**
- * Create a HUD container as an immediate child of the input node
- * @param parent parent container
- * @returns HUD container
- */
- _createContainer(parent) {
- const node = document.createElement('div');
- node.hidden = true;
- parent.appendChild(node);
- return node;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/viewport.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * viewport.ts
- * Viewport
- */
-
-
-
-
-
- /** An event emitted by a Viewport */
- class ViewportEvent extends AREvent {
- }
- /** Viewport event target */
- class ViewportEventTarget extends AREventTarget {
- }
- /** Default viewport constructor settings */
- const DEFAULT_VIEWPORT_SETTINGS = {
- container: null,
- hudContainer: null,
- resolution: 'lg',
- style: 'best-fit',
- canvas: null,
- };
- /** Z-index of the viewport container */
- const CONTAINER_ZINDEX = 1000000000;
- /** Base z-index of the children of the viewport container */
- const BASE_ZINDEX = 0;
- /** Z-index of the background canvas */
- const BACKGROUND_ZINDEX = BASE_ZINDEX + 0;
- /** Z-index of the foreground canvas */
- const FOREGROUND_ZINDEX = BASE_ZINDEX + 1;
- /** Z-index of the HUD */
- const HUD_ZINDEX = BASE_ZINDEX + 2;
- /** Default viewport width, in pixels */
- const DEFAULT_VIEWPORT_WIDTH = 300;
- /** Default viewport height, in pixels */
- const DEFAULT_VIEWPORT_HEIGHT = 150;
- /**
- * Viewport
- */
- class BaseViewport extends ViewportEventTarget {
- /**
- * Constructor
- * @param viewportSettings
- */
- constructor(viewportSettings) {
- super();
- const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
- const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
- // validate settings
- if (settings.container == null)
- throw new IllegalArgumentError('Unspecified viewport container');
- else if (!(settings.container instanceof HTMLElement))
- throw new IllegalArgumentError('Invalid viewport container');
- // initialize attributes
- this._resolution = settings.resolution;
- this._container = settings.container;
- this._hud = new HUD(settings.container, settings.hudContainer);
- // make this more elegant?
- // need to initialize this._style and validate settings.style
- this._style = DEFAULT_VIEWPORT_SETTINGS.style;
- this.style = settings.style;
- // create the background canvas
- this.__backgroundCanvas = this._createBackgroundCanvas(this._container, size);
- // create the foreground canvas
- if (settings.canvas == null) {
- this._foregroundCanvas = this._createForegroundCanvas(this._container, size);
- this._parentOfImportedForegroundCanvas = null;
- }
- else {
- this._foregroundCanvas = settings.canvas;
- this._parentOfImportedForegroundCanvas = settings.canvas.parentNode;
- }
- }
- /**
- * Make a request to the user agent so that the viewport container is
- * displayed in fullscreen mode. The container must be a compatible element[1]
- * and the user must interact with the page in order to comply with browser
- * policies[2]. In case of error, the returned promise is rejected.
- * [1] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#compatible_elements
- * [2] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#security
- */
- requestFullscreen() {
- const container = this._container;
- // fallback for older WebKit versions
- if (container.requestFullscreen === undefined) {
- if (container.webkitRequestFullscreen === undefined)
- return speedy_vision_default().Promise.reject(new NotSupportedError());
- else if (!document.webkitFullscreenEnabled)
- return speedy_vision_default().Promise.reject(new AccessDeniedError());
- // webkitRequestFullscreen() does not return a value
- container.webkitRequestFullscreen();
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- setTimeout(() => {
- if (container === document.webkitFullscreenElement)
- resolve();
- else
- reject(new TypeError());
- }, 100);
- });
- }
- // check if the fullscreen mode is available
- if (!document.fullscreenEnabled)
- return speedy_vision_default().Promise.reject(new AccessDeniedError());
- // request fullscreen
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- container.requestFullscreen({
- navigationUI: 'hide'
- }).then(resolve, reject);
- });
- }
- /**
- * Exit fullscreen mode
- */
- exitFullscreen() {
- // fallback for older WebKit versions
- if (document.exitFullscreen === undefined) {
- const doc = document;
- if (doc.webkitExitFullscreen === undefined)
- return speedy_vision_default().Promise.reject(new NotSupportedError());
- else if (doc.webkitFullscreenElement === null)
- return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
- // webkitExitFullscreen() does not return a value
- doc.webkitExitFullscreen();
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- setTimeout(() => {
- if (doc.webkitFullscreenElement === null)
- resolve();
- else
- reject(new TypeError());
- }, 100);
- });
- }
- // exit fullscreen
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- document.exitFullscreen().then(resolve, reject);
- });
- }
- /** Is the fullscreen mode available? */
- isFullscreenAvailable() {
- return document.fullscreenEnabled ||
- !!(document.webkitFullscreenEnabled);
- }
- /**
- * True if the viewport is being displayed in fullscreen mode
- */
- get fullscreen() {
- if (document.fullscreenElement !== undefined)
- return document.fullscreenElement === this._container;
- else if (document.webkitFullscreenElement !== undefined)
- return document.webkitFullscreenElement === this._container;
- else
- return false;
- }
- /**
- * Viewport container
- */
- get container() {
- return this._container;
- }
- /**
- * Viewport style
- */
- get style() {
- return this._style;
- }
- /**
- * Set viewport style
- */
- set style(value) {
- if (value != 'best-fit' && value != 'stretch' && value != 'inline')
- throw new IllegalArgumentError('Invalid viewport style: ' + value);
- const changed = (value != this._style);
- this._style = value;
- if (changed) {
- const event = new ViewportEvent('resize');
- this.dispatchEvent(event);
- }
- }
- /**
- * HUD
- */
- get hud() {
- return this._hud;
- }
- /**
- * Resolution of the virtual scene
- */
- get resolution() {
- return this._resolution;
- }
- /**
- * Size in pixels of the drawing buffer of the canvas
- * on which the virtual scene will be drawn
- */
- get virtualSize() {
- const aspectRatio = this._backgroundCanvas.width / this._backgroundCanvas.height;
- return Utils.resolution(this._resolution, aspectRatio);
- }
- /**
- * The canvas on which the virtual scene will be drawn
- */
- get canvas() {
- return this._foregroundCanvas;
- }
- /**
- * The canvas on which the physical scene will be drawn
- * @internal
- */
- get _backgroundCanvas() {
- return this.__backgroundCanvas;
- }
- /**
- * Size of the drawing buffer of the background canvas, in pixels
- * @internal
- */
- get _realSize() {
- throw new IllegalOperationError();
- }
- /**
- * Initialize the viewport (when the session starts)
- * @internal
- */
- _init() {
- // import foreground canvas
- if (this._parentOfImportedForegroundCanvas != null) {
- const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
- this._importForegroundCanvas(this._foregroundCanvas, this._container, size);
- }
- // setup CSS
- this._container.style.touchAction = 'none';
- this._container.style.backgroundColor = 'black';
- this._container.style.zIndex = String(CONTAINER_ZINDEX);
- // initialize the HUD
- this._hud._init(HUD_ZINDEX);
- this._hud.visible = true;
- }
- /**
- * Release the viewport (when the session starts)
- * @internal
- */
- _release() {
- // release the HUD
- this._hud._release();
- // reset the CSS
- this._container.style.touchAction = 'auto';
- // restore imported canvas
- if (this._parentOfImportedForegroundCanvas != null)
- this._restoreImportedForegroundCanvas();
- }
- /**
- * Create a canvas and attach it to another HTML element
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createCanvas(parent, size) {
- const canvas = document.createElement('canvas');
- canvas.width = size.width;
- canvas.height = size.height;
- parent.appendChild(canvas);
- return canvas;
- }
- /**
- * Create the background canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createBackgroundCanvas(parent, size) {
- const canvas = this._createCanvas(parent, size);
- return this._styleCanvas(canvas, BACKGROUND_ZINDEX);
- }
- /**
- * Create the foreground canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns a new canvas as a child of parent
- */
- _createForegroundCanvas(parent, size) {
- const canvas = this._createCanvas(parent, size);
- return this._styleCanvas(canvas, FOREGROUND_ZINDEX);
- }
- /**
- * Import an existing foreground canvas to the viewport
- * @param canvas existing canvas
- * @param parent parent container
- * @param size size of the drawing buffer
- * @returns the input canvas
- */
- _importForegroundCanvas(canvas, parent, size) {
- if (!(canvas instanceof HTMLCanvasElement))
- throw new IllegalArgumentError('Not a canvas: ' + canvas);
- // borrow the canvas; add it as a child of the viewport container
- canvas.remove();
- parent.appendChild(canvas);
- canvas.width = size.width;
- canvas.height = size.height;
- canvas.dataset.cssText = canvas.style.cssText; // save CSS
- canvas.style.cssText = ''; // clear CSS
- this._styleCanvas(canvas, FOREGROUND_ZINDEX);
- return canvas;
- }
- /**
- * Restore a previously imported foreground canvas to its original parent
- */
- _restoreImportedForegroundCanvas() {
- // not an imported canvas; nothing to do
- if (this._parentOfImportedForegroundCanvas == null)
- throw new IllegalOperationError();
- const canvas = this._foregroundCanvas;
- canvas.style.cssText = canvas.dataset.cssText || ''; // restore CSS
- canvas.remove();
- this._parentOfImportedForegroundCanvas.appendChild(canvas);
- }
- /**
- * Add suitable CSS rules to a canvas
- * @param canvas
- * @param canvasType
- * @returns canvas
- */
- _styleCanvas(canvas, zIndex) {
- canvas.style.position = 'absolute';
- canvas.style.left = '0px';
- canvas.style.top = '0px';
- canvas.style.width = '100%';
- canvas.style.height = '100%';
- canvas.style.zIndex = String(zIndex);
- return canvas;
- }
- }
- /**
- * Viewport decorator
- */
- class ViewportDecorator extends ViewportEventTarget {
- /**
- * Constructor
- * @param base to be decorated
- * @param getSize size getter
- */
- constructor(base, getSize) {
- super();
- this._base = base;
- this._getSize = getSize;
- }
- /**
- * Viewport container
- */
- get container() {
- return this._base.container;
- }
- /**
- * Viewport style
- */
- get style() {
- return this._base.style;
- }
- /**
- * Set viewport style
- */
- set style(value) {
- this._base.style = value;
- }
- /**
- * HUD
- */
- get hud() {
- return this._base.hud;
- }
- /**
- * Fullscreen mode
- */
- get fullscreen() {
- return this._base.fullscreen;
- }
- /**
- * Resolution of the virtual scene
- */
- get resolution() {
- return this._base.resolution;
- }
- /**
- * Size in pixels of the drawing buffer of the canvas
- * on which the virtual scene will be drawn
- */
- get virtualSize() {
- return this._base.virtualSize;
- }
- /**
- * The canvas on which the virtual scene will be drawn
- */
- get canvas() {
- return this._base.canvas;
- }
- /**
- * Request fullscreen mode
- */
- requestFullscreen() {
- return this._base.requestFullscreen();
- }
- /**
- * Exit fullscreen mode
- */
- exitFullscreen() {
- return this._base.exitFullscreen();
- }
- /**
- * Is the fullscreen mode available?
- */
- isFullscreenAvailable() {
- return this._base.isFullscreenAvailable();
- }
- /**
- * Background canvas
- * @internal
- */
- get _backgroundCanvas() {
- return this._base._backgroundCanvas;
- }
- /**
- * Size of the drawing buffer of the background canvas, in pixels
- * @internal
- */
- get _realSize() {
- return this._getSize();
- }
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- this._base._init();
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this._base._release();
- }
- /**
- * Add event listener
- * @param type event type
- * @param callback
- */
- addEventListener(type, callback) {
- this._base.addEventListener(type, callback);
- }
- /**
- * Remove event listener
- * @param type event type
- * @param callback
- */
- removeEventListener(type, callback) {
- this._base.removeEventListener(type, callback);
- }
- /**
- * Synchronously trigger an event
- * @param event
- * @returns same value as a standard event target
- * @internal
- */
- dispatchEvent(event) {
- return this._base.dispatchEvent(event);
- }
- }
- /**
- * A viewport that watches for page resizes
- */
- class ResizableViewport extends ViewportDecorator {
- /**
- * Constructor
- * @param base to be decorated
- * @param getSize size getter
- */
- constructor(base, getSize) {
- super(base, getSize);
- this._active = false;
- this.addEventListener('resize', this._onResize.bind(this));
- }
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- super._init();
- this._active = true;
- // Configure the resize listener. We want the viewport
- // to adjust itself if the phone/screen is resized or
- // changes orientation
- let timeout = null;
- const onWindowResize = () => {
- if (!this._active) {
- window.removeEventListener('resize', onWindowResize);
- return;
- }
- if (timeout !== null)
- clearTimeout(timeout);
- timeout = setTimeout(() => {
- timeout = null;
- this._resize();
- }, 50);
- };
- window.addEventListener('resize', onWindowResize);
- // handle changes of orientation
- // (is this needed? we already listen to resize events)
- if (screen.orientation !== undefined)
- screen.orientation.addEventListener('change', this._resize.bind(this));
- else
- window.addEventListener('orientationchange', this._resize.bind(this)); // deprecated
- // trigger a resize to setup the sizes / the CSS
- this._resize();
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- if (screen.orientation !== undefined)
- screen.orientation.removeEventListener('change', this._resize);
- else
- window.removeEventListener('orientationchange', this._resize); // deprecated
- this._active = false;
- super._release();
- }
- /**
- * Trigger a resize event
- */
- _resize() {
- const event = new ViewportEvent('resize');
- this.dispatchEvent(event);
- }
- /**
- * Function to be called when the viewport is resized
- */
- _onResize() {
- // Resize the drawing buffer of the foreground canvas, so that it
- // matches the desired resolution, as well as the aspect ratio of the
- // background canvas
- const foregroundCanvas = this.canvas;
- const virtualSize = this.virtualSize;
- foregroundCanvas.width = virtualSize.width;
- foregroundCanvas.height = virtualSize.height;
- // Resize the drawing buffer of the background canvas
- const backgroundCanvas = this._backgroundCanvas;
- const realSize = this._realSize;
- backgroundCanvas.width = realSize.width;
- backgroundCanvas.height = realSize.height;
- }
- }
- /**
- * Immersive viewport: it occupies the entire page
- */
- class ImmersiveViewport extends ResizableViewport {
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this.canvas.remove();
- this._backgroundCanvas.remove();
- this.hud.visible = false;
- this.container.style.cssText = ''; // reset CSS
- super._release();
- }
- /**
- * Resize the immersive viewport, so that it occupies the entire page.
- * We respect the aspect ratio of the source media
- */
- _onResize() {
- super._onResize();
- const container = this.container;
- container.style.position = 'fixed';
- if (this.style == 'best-fit') {
- // cover the page while maintaining the aspect ratio
- let viewportWidth = 0, viewportHeight = 0;
- const windowAspectRatio = window.innerWidth / window.innerHeight;
- const viewportAspectRatio = this._realSize.width / this._realSize.height;
- if (viewportAspectRatio <= windowAspectRatio) {
- viewportHeight = window.innerHeight;
- viewportWidth = (viewportHeight * viewportAspectRatio) | 0;
- }
- else {
- viewportWidth = window.innerWidth;
- viewportHeight = (viewportWidth / viewportAspectRatio) | 0;
- }
- container.style.left = `calc(50% - ${(viewportWidth + 1) >>> 1}px)`;
- container.style.top = `calc(50% - ${(viewportHeight + 1) >>> 1}px)`;
- container.style.width = viewportWidth + 'px';
- container.style.height = viewportHeight + 'px';
- }
- else if (this.style == 'stretch') {
- // stretch to cover the entire page
- container.style.left = '0px';
- container.style.top = '0px';
- container.style.width = window.innerWidth + 'px';
- container.style.height = window.innerHeight + 'px';
- }
- else
- throw new IllegalOperationError('Invalid immersive viewport style: ' + this.style);
- }
- }
- /**
- * Inline viewport: it follows the typical flow of a web page
- */
- class InlineViewport extends ResizableViewport {
- /**
- * Initialize the viewport
- * @internal
- */
- _init() {
- super._init();
- this.style = 'inline';
- }
- /**
- * Release the viewport
- * @internal
- */
- _release() {
- this.container.style.cssText = ''; // reset CSS
- super._release();
- }
- /**
- * Resize the inline viewport
- * (we still take orientation changes into account)
- */
- _onResize() {
- super._onResize();
- const container = this.container;
- container.style.position = 'relative';
- if (this.style == 'inline') {
- container.style.left = '0px';
- container.style.top = '0px';
- container.style.width = this.virtualSize.width + 'px';
- container.style.height = this.virtualSize.height + 'px';
- }
- else
- throw new IllegalOperationError('Invalid inline viewport style: ' + this.style);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/stats.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * stats.ts
- * Stats for performance measurements
- */
- /** update interval, given in seconds */
- const UPDATE_INTERVAL = 0.5;
- /**
- * Stats for performance measurements
- */
- class Stats {
- /**
- * Constructor
- */
- constructor() {
- this._timeOfLastUpdate = this._now();
- this._partialCycleCount = 0;
- this._cyclesPerSecond = 0;
- }
- /**
- * Update stats - call every frame
- */
- update() {
- const now = this._now();
- ++this._partialCycleCount;
- if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
- this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
- this._partialCycleCount = 0;
- this._timeOfLastUpdate = now;
- }
- }
- /**
- * Reset stats
- */
- reset() {
- this._timeOfLastUpdate = this._now();
- this._partialCycleCount = 0;
- this._cyclesPerSecond = 0;
- }
- /**
- * Number of cycles per second
- */
- get cyclesPerSecond() {
- return this._cyclesPerSecond;
- }
- /**
- * A measurement of time, in milliseconds
- * @returns time in ms
- */
- _now() {
- return performance.now();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * stats-panel.ts
- * Stats panel used for development purposes
- */
-
-
-
- /** Update interval, in ms */
- const stats_panel_UPDATE_INTERVAL = 500;
- /** Icons for different power profiles */
- const POWER_ICON = Object.freeze({
- 'default': '',
- 'low-power': '🔋',
- 'high-performance': '⚡'
- });
- /**
- * Stats panel used for development purposes
- */
- class StatsPanel {
- /**
- * Constructor
- * @param parent parent element of the panel
- */
- constructor(viewport) {
- this._viewport = viewport;
- this._lastUpdate = 0;
- this._container = this._createContainer();
- viewport.hud.container.appendChild(this._container);
- }
- /**
- * Release the panel
- */
- release() {
- this._container.remove();
- }
- /**
- * A method to be called in the update loop
- * @param time current time in ms
- * @param trackers the trackers attached to the session
- * @param sources the sources of media linked to the session
- * @param gpu GPU cycles per second
- * @param fps frames per second
- */
- update(time, trackers, sources, gpu, fps) {
- if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
- this._lastUpdate = time;
- this._update(trackers, sources, fps, gpu);
- }
- }
- /**
- * Visibility of the panel
- */
- get visible() {
- return !this._container.hidden;
- }
- /**
- * Visibility of the panel
- */
- set visible(visible) {
- this._container.hidden = !visible;
- }
- /**
- * Update the contents of the panel
- * @param trackers the trackers attached to the session
- * @param sources the sources of media linked to the session
- * @param fps frames per second
- * @param gpu GPU cycles per second
- */
- _update(trackers, sources, fps, gpu) {
- // all sanitized
- const lfps = this._label('_ar_fps');
- if (lfps !== null) {
- lfps.style.color = this._color(fps);
- lfps.innerText = String(fps);
- }
- const lgpu = this._label('_ar_gpu');
- if (lgpu !== null) {
- lgpu.style.color = this._color(gpu);
- lgpu.innerText = String(gpu);
- }
- const lpower = this._label('_ar_power');
- if (lpower !== null)
- lpower.innerHTML = POWER_ICON[Settings.powerPreference];
- const lin = this._label('_ar_in');
- if (lin !== null) {
- const sourceStats = sources.map(source => source._stats).join(', ');
- lin.innerText = sourceStats;
- }
- const lout = this._label('_ar_out');
- if (lout !== null) {
- const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
- lout.innerText = trackerStats;
- }
- }
- /**
- * Get a label of the panel
- * @param className
- * @returns the HTML element, or null if it doesn't exist
- */
- _label(className) {
- return this._container.getElementsByClassName(className).item(0);
- }
- /**
- * Associate a color to a frequency number
- * @param f frequency given in cycles per second
- * @returns colorized number (HTML)
- */
- _color(f) {
- const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
- const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
- const color2 = f >= 30 ? GREEN : RED;
- const color = Settings.powerPreference != 'low-power' ? color3 : color2;
- return color;
- }
- /**
- * Create the container for the panel
- * @returns a container
- */
- _createContainer() {
- const container = document.createElement('div');
- const print = (html) => container.insertAdjacentHTML('beforeend', html);
- container.style.position = 'absolute';
- container.style.left = container.style.top = '0px';
- container.style.zIndex = '1000000';
- container.style.padding = '4px';
- container.style.whiteSpace = 'pre-line';
- container.style.backgroundColor = 'rgba(0,0,0,0.5)';
- container.style.color = 'white';
- container.style.fontFamily = 'monospace';
- container.style.fontSize = '14px';
- // all sanitized
- container.innerText = 'MARTINS.js ' + Martins.version;
- print('<br>');
- print('FPS: <span class="_ar_fps"></span> | ');
- print('GPU: <span class="_ar_gpu"></span> ');
- print('<span class="_ar_power"></span>');
- print('<br>');
- print('IN: <span class="_ar_in"></span>');
- print('<br>');
- print('OUT: <span class="_ar_out"></span>');
- if (this._viewport.isFullscreenAvailable()) {
- print('<br>');
- container.appendChild(this._createFullscreenToggle());
- }
- return container;
- }
- /**
- * Create a fullscreen toggle
- * @returns a fullscreen toggle
- */
- _createFullscreenToggle() {
- const toggle = document.createElement('a');
- Utils.assert(this._viewport != null);
- toggle.href = 'javascript:void(0)';
- toggle.innerText = 'Toggle fullscreen';
- toggle.style.color = 'white';
- toggle.setAttribute('role', 'button');
- toggle.addEventListener('click', () => {
- if (!this._viewport.fullscreen) {
- this._viewport.requestFullscreen().catch(err => {
- alert(`Can't enable fullscreen mode. ` + err.toString());
- });
- }
- else {
- this._viewport.exitFullscreen();
- }
- });
- return toggle;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/frame.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * frame.ts
- * A Frame holds information used to render a single animation frame of a Session
- */
- /**
- * Iterable frame results (helper class)
- */
- class IterableTrackerResults {
- constructor(_results) {
- this._results = _results;
- this._index = 0;
- }
- next() {
- const i = this._index++;
- return i < this._results.length ?
- { done: false, value: this._results[i] } :
- { done: true, value: undefined };
- }
- [Symbol.iterator]() {
- return this;
- }
- }
- /**
- * A Frame holds information used to render a single animation frame of a Session
- */
- class Frame {
- /**
- * Constructor
- * @param session
- * @param results
- */
- constructor(session, results) {
- this._session = session;
- this._results = new IterableTrackerResults(results);
- }
- /**
- * The session of which this frame holds data
- */
- get session() {
- return this._session;
- }
- /**
- * The results of all trackers in this frame
- */
- get results() {
- return this._results;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/time.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * time.ts
- * Time utilities
- */
- /**
- * Time Manager
- */
- class Time {
- constructor() {
- /** time scale */
- this._scale = 1;
- /** time since the start of the session, in milliseconds */
- this._time = 0;
- /** unscaled time since the start of the session, in milliseconds */
- this._unscaledTime = 0;
- /** elapsed time between the current and the previous frame, in milliseconds */
- this._delta = 0;
- /** time of the first update call, in milliseconds */
- this._firstUpdate = 0;
- /** time of the last update call, in milliseconds */
- this._lastUpdate = Number.POSITIVE_INFINITY;
- }
- /**
- * Update the Time Manager
- * @param timestamp in milliseconds
- * @internal
- */
- _update(timestamp) {
- if (timestamp < this._lastUpdate) {
- this._firstUpdate = this._lastUpdate = timestamp;
- return;
- }
- this._delta = (timestamp - this._lastUpdate) * this._scale;
- this._time += this._delta;
- this._unscaledTime = timestamp - this._firstUpdate;
- this._lastUpdate = timestamp;
- }
- /**
- * Elapsed time since the start of the session, measured at the
- * beginning of the current animation frame and given in seconds
- */
- get elapsed() {
- return this._time * 0.001;
- }
- /**
- * Elapsed time between the current and the previous animation
- * frame, given in seconds
- */
- get delta() {
- return this._delta * 0.001;
- }
- /**
- * Time scale (defaults to 1)
- */
- get scale() {
- return this._scale;
- }
- /**
- * Time scale (defaults to 1)
- */
- set scale(scale) {
- this._scale = Math.max(0, +scale);
- }
- /**
- * Time scale independent elapsed time since the start of the session,
- * measured at the beginning of the current animation frame and given
- * in seconds
- */
- get unscaled() {
- return this._unscaledTime * 0.001;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/core/gizmos.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * gizmos.ts
- * Visual cues for testing & debugging
- */
-
- /** The maximum match distance ratio we'll consider to be "good" */
- const GOOD_MATCH_THRESHOLD = 0.7;
- /**
- * Visual cues for testing & debugging
- */
- class Gizmos {
- /**
- * Constructor
- */
- constructor() {
- this._visible = false;
- }
- /**
- * Whether or not the gizmos will be rendered
- */
- get visible() {
- return this._visible;
- }
- /**
- * Whether or not the gizmos will be rendered
- */
- set visible(visible) {
- this._visible = visible;
- }
- /**
- * Render gizmos
- * @param viewport
- * @param trackers
- * @internal
- */
- _render(viewport, trackers) {
- // no need to render?
- if (!this._visible)
- return;
- // viewport
- const viewportSize = viewport._realSize;
- const canvas = viewport._backgroundCanvas;
- const ctx = canvas.getContext('2d', { alpha: false });
- if (!ctx)
- throw new IllegalOperationError();
- // debug
- //ctx.fillStyle = '#000';
- //ctx.fillRect(0, 0, canvas.width, canvas.height);
- //ctx.clearRect(0, 0, canvas.width, canvas.height);
- // render keypoints
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const keypoints = output.keypoints;
- const screenSize = output.screenSize;
- if (keypoints !== undefined && screenSize !== undefined)
- this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
- }
- // render polylines
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const polyline = output.polyline;
- const screenSize = output.screenSize;
- if (polyline !== undefined && screenSize !== undefined)
- this._renderPolyline(ctx, polyline, screenSize, viewportSize);
- }
- // render the axes of the 3D coordinate system
- for (let i = 0; i < trackers.length; i++) {
- if (trackers[i].type != 'image-tracker')
- continue;
- const output = trackers[i]._output;
- const cameraMatrix = output.cameraMatrix;
- const screenSize = output.screenSize;
- if (cameraMatrix !== undefined && screenSize !== undefined)
- this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
- }
- }
- /**
- * Split keypoints in matched/unmatched categories and
- * render them for testing & development purposes
- * @param ctx canvas 2D context
- * @param keypoints keypoints to render
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param size base keypoint rendering size
- */
- _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
- if (keypoints.length == 0)
- return;
- if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
- this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
- return;
- }
- const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
- (keypoint.matches.length > 1 &&
- keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
- keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
- const matchedKeypoints = keypoints;
- const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
- const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
- this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
- this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
- }
- /**
- * Render keypoints for testing & development purposes
- * @param ctx canvas 2D context
- * @param keypoints keypoints to render
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param color color of the rendered keypoints
- * @param size base keypoint rendering size
- */
- _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- ctx.beginPath();
- for (let i = keypoints.length - 1; i >= 0; i--) {
- const keypoint = keypoints[i];
- const x = (keypoint.x * sx + 0.5) | 0;
- const y = (keypoint.y * sy + 0.5) | 0;
- const r = (size * keypoint.scale + 0.5) | 0;
- ctx.rect(x - r, y - r, 2 * r, 2 * r);
- }
- ctx.strokeStyle = color;
- ctx.lineWidth = 1;
- ctx.stroke();
- }
- /**
- * Render polyline for testing & development purposes
- * @param ctx canvas 2D context
- * @param polyline vertices
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param color color of the rendered polyline
- * @param lineWidth
- */
- _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
- if (polyline.length == 0)
- return;
- const n = polyline.length;
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- // render polyline
- ctx.beginPath();
- ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
- for (let j = 0; j < n; j++)
- ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
- ctx.strokeStyle = color;
- ctx.lineWidth = lineWidth;
- ctx.stroke();
- }
- /**
- * Render the axes of a 3D coordinate system
- * @param ctx canvas 2D context
- * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
- * @param screenSize AR screen size
- * @param viewportSize viewport size
- * @param lineWidth
- */
- _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
- const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
- const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
- const length = 1; // length of each axis-corresponding line, given in normalized space units
- const sx = viewportSize.width / screenSize.width;
- const sy = viewportSize.height / screenSize.height;
- /*
-
- Multiply the 3x4 camera matrix P by:
-
- [ 0 L 0 0 ]
- [ 0 0 L 0 ] , where L = length in normalized space of the lines
- [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
- [ 1 1 1 1 ]
-
- Each column of the resulting matrix will give us the pixel coordinates
- we're looking for.
-
- Note: we're working with homogeneous coordinates
-
- */
- const p = cameraMatrix.read();
- const l = length;
- const o = [p[9], p[10], p[11]]; // origin of the coordinate system
- const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
- const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
- const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
- const axis = [x, y, z];
- // draw each axis
- const ox = o[0] / o[2], oy = o[1] / o[2];
- for (let i = 0; i < 3; i++) {
- const q = axis[i];
- const x = q[0] / q[2], y = q[1] / q[2];
- ctx.beginPath();
- ctx.moveTo(ox * sx, oy * sy);
- ctx.lineTo(x * sx, y * sy);
- ctx.strokeStyle = color[i];
- ctx.lineWidth = lineWidth;
- ctx.stroke();
- }
- //console.log("Origin",ox,oy);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/utils/asap.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * asap.ts
- * Schedule a function to run "as soon as possible"
- */
- /** callbacks */
- const callbacks = [];
- /** arguments to be passed to the callbacks */
- const args = [];
- /** asap key */
- const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
- // Register an event listener
- window.addEventListener('message', event => {
- if (event.source !== window || event.data !== ASAP_KEY)
- return;
- event.stopPropagation();
- if (callbacks.length == 0)
- return;
- const fn = callbacks.pop();
- const argArray = args.pop();
- fn.apply(undefined, argArray);
- }, true);
- /**
- * Schedule a function to run "as soon as possible"
- * @param fn callback
- * @param params optional parameters
- */
- function asap(fn, ...params) {
- callbacks.unshift(fn);
- args.unshift(params);
- window.postMessage(ASAP_KEY, '*');
- }
-
- ;// CONCATENATED MODULE: ./src/core/session.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * session.ts
- * WebAR Session
- */
-
-
-
-
-
-
-
-
-
-
-
-
- /** An event emitted by a Session */
- class SessionEvent extends AREvent {
- }
- /** Default options when starting a session */
- const DEFAULT_OPTIONS = {
- mode: 'immersive',
- trackers: [],
- sources: [],
- viewport: null,
- stats: false,
- gizmos: false,
- };
- /**
- * A Session represents an intent to display AR content
- * and encapsulates the main loop (update-render cycle)
- */
- class Session extends AREventTarget {
- /**
- * Constructor
- * @param sources previously initialized sources of data
- * @param mode session mode
- * @param viewport viewport
- * @param stats render stats panel?
- * @param gizmos render gizmos?
- */
- constructor(sources, mode, viewport, stats, gizmos) {
- super();
- this._mode = mode;
- this._trackers = [];
- this._sources = sources;
- this._updateStats = new Stats();
- this._renderStats = new Stats();
- this._active = true;
- this._frameReady = true; // no trackers at the moment
- this._rafQueue = [];
- this._time = new Time();
- this._gizmos = new Gizmos();
- this._gizmos.visible = gizmos;
- // get media
- const media = this.media;
- // setup the viewport
- if (mode == 'immersive')
- this._viewport = new ImmersiveViewport(viewport, () => media.size);
- else if (mode == 'inline')
- this._viewport = new InlineViewport(viewport, () => media.size);
- else
- throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
- this._viewport._init();
- // setup the main loop
- this._setupUpdateLoop();
- this._setupRenderLoop();
- // setup the stats panel
- this._statsPanel = new StatsPanel(this._viewport);
- this._statsPanel.visible = stats;
- // done!
- Session._count++;
- Utils.log(`The ${mode} session is now active!`);
- }
- /**
- * Checks if the engine can be run in the browser the client is using
- * @returns true if the engine is compatible with the browser
- */
- static isSupported() {
- //alert(Utils.deviceInfo()); // debug
- // If Safari / iOS, require version 15.2 or later
- if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
- /*
-
- iOS compatibility
- -----------------
-
- The engine is known to work on iPhone 8 or later, with iOS 15.2 or
- later. Tested on many devices, including iPads, on the cloud.
-
- The engine crashes on an iPhone 13 Pro Max with iOS 15.1 and on an
- iPhone 12 Pro with iOS 15.0.2. A (valid) shader from speedy-vision
- version 0.9.1 (bf-knn) fails to compile: "WebGL error. Program has
- not been successfully linked".
-
- The engine freezes on an older iPhone 6S (2015) with iOS 15.8.2.
- The exact cause is unknown, but it happens when training an image
- tracker, at ImageTrackerTrainingState._gpuUpdate() (a WebGL error?
- a hardware limitation?)
-
- Successfully tested down to iPhone 8 so far.
- Successfully tested down to iOS 15.2.
-
- >> WebGL2 support was introduced in Safari 15 <<
-
- Note: the webp image format used in the demos is supported on
- Safari for iOS 14+. Desktop Safari 14-15.6 supports webp, but
- requires macOS 11 Big Sur or later. https://caniuse.com/webp
-
- */
- const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
- const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
- const matches = safari || ios; // match safari first (min version)
- if (matches !== null) {
- const version = matches[3] || '0.0';
- const [x, y] = version.split(/[\._]/).map(v => parseInt(v) | 0);
- if ((x < 15) || (x == 15 && y < 2)) {
- Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
- return false;
- }
- // XXX reject older iPhone models? Which ones?
- /*if(navigator.userAgent.includes('iPhone')) {
- // detect screen size?
- }*/
- }
- else
- Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
- }
- // Android: reject very old / weak devices?
- // XXX establish criteria?
- /*if(Utils.isAndroid()) {
- }*/
- // Check if WebGL2 and WebAssembly are supported
- return speedy_vision_default().isSupported();
- }
- /**
- * Instantiate a session
- * @param options options
- * @returns a promise that resolves to a new session
- */
- static instantiate(options = DEFAULT_OPTIONS) {
- const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
- Utils.log(`Starting a new ${mode} session...`);
- return speedy_vision_default().Promise.resolve().then(() => {
- // is the engine supported?
- if (!Session.isSupported())
- throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with the MARTINS.js engine');
- // block multiple immersive sessions
- if (mode !== 'inline' && Session.count > 0)
- throw new IllegalOperationError(`Can't start more than one immersive session`);
- // initialize matrix routines
- return speedy_vision_default().Matrix.ready();
- }).then(() => {
- // validate sources of data
- const videoSources = sources.filter(source => source._type == 'video');
- if (videoSources.length != 1)
- throw new IllegalArgumentError(`One video source of data must be provided`);
- for (let i = sources.length - 1; i >= 0; i--) {
- if (sources.indexOf(sources[i]) < i)
- throw new IllegalArgumentError(`Found repeated sources of data`);
- }
- // initialize sources of data
- return speedy_vision_default().Promise.all(sources.map(source => source._init()));
- }).then(() => {
- // get the viewport
- if (!viewport)
- throw new IllegalArgumentError(`Can't create a session without a viewport`);
- // instantiate session
- return new Session(sources, mode, viewport, stats, gizmos);
- }).then(session => {
- // validate trackers
- if (trackers.length == 0)
- Utils.warning(`No trackers have been attached to the session!`);
- for (let i = trackers.length - 1; i >= 0; i--) {
- if (trackers.indexOf(trackers[i]) < i)
- throw new IllegalArgumentError(`Found repeated trackers`);
- }
- // attach trackers and return the session
- return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
- }).catch(err => {
- // log errors, if any
- Utils.error(`Can't start session: ${err.message}`);
- throw err;
- });
- }
- /**
- * Number of active sessions
- */
- static get count() {
- return this._count;
- }
- /**
- * End the session
- * @returns promise that resolves after the session is shut down
- */
- end() {
- // is the session inactive?
- if (!this._active)
- return speedy_vision_default().Promise.resolve();
- // deactivate the session
- Utils.log('Shutting down the session...');
- this._active = false; // set before wait()
- // wait a few ms, so that the GPU is no longer sending any data
- const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
- setTimeout(resolve, ms);
- });
- // release resources
- return wait(100).then(() => speedy_vision_default().Promise.all(
- // release trackers
- this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
- // release input sources
- this._sources.map(source => source._release()))).then(() => {
- this._sources.length = 0;
- this._trackers.length = 0;
- // release internal components
- this._updateStats.reset();
- this._renderStats.reset();
- this._statsPanel.release();
- this._viewport._release();
- // end the session
- Session._count--;
- // dispatch event
- const event = new SessionEvent('end');
- this.dispatchEvent(event);
- // done!
- Utils.log('Session ended.');
- });
- }
- /**
- * Analogous to window.requestAnimationFrame()
- * @param callback
- * @returns a handle
- */
- requestAnimationFrame(callback) {
- const handle = Symbol('raf-handle');
- if (this._active)
- this._rafQueue.push([handle, callback]);
- else
- throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
- return handle;
- }
- /**
- * Analogous to window.cancelAnimationFrame()
- * @param handle a handle returned by this.requestAnimationFrame()
- */
- cancelAnimationFrame(handle) {
- for (let i = this._rafQueue.length - 1; i >= 0; i--) {
- if (this._rafQueue[i][0] === handle) {
- this._rafQueue.splice(i, 1);
- break;
- }
- }
- }
- /**
- * The underlying media (generally a camera stream)
- * @internal
- */
- get media() {
- for (let i = this._sources.length - 1; i >= 0; i--) {
- if (this._sources[i]._type == 'video')
- return this._sources[i]._data;
- }
- // this shouldn't happen
- throw new IllegalOperationError(`Invalid input source`);
- }
- /**
- * Session mode
- */
- get mode() {
- return this._mode;
- }
- /**
- * Rendering viewport
- */
- get viewport() {
- return this._viewport;
- }
- /**
- * Time utilities
- */
- get time() {
- return this._time;
- }
- /**
- * Visual cues for testing & debugging
- */
- get gizmos() {
- return this._gizmos;
- }
- /**
- * Attach a tracker to the session
- * @param tracker
- */
- _attachTracker(tracker) {
- if (this._trackers.indexOf(tracker) >= 0)
- throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
- else if (!this._active)
- throw new IllegalOperationError(`Inactive session`);
- this._trackers.push(tracker);
- return tracker._init(this);
- }
- /**
- * Render the user media to the background canvas
- */
- _renderUserMedia() {
- const canvas = this._viewport._backgroundCanvas;
- const ctx = canvas.getContext('2d', { alpha: false });
- if (ctx && this.media.type != 'data') {
- ctx.imageSmoothingEnabled = false;
- // draw user media
- const image = this.media.source;
- ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
- // render output image(s)
- for (let i = 0; i < this._trackers.length; i++) {
- const media = this._trackers[i]._output.image;
- if (media !== undefined) {
- const image = media.source;
- ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
- //ctx.drawImage(image, canvas.width - media.width, canvas.height - media.height, media.width, media.height);
- }
- }
- // render gizmos
- this._gizmos._render(this._viewport, this._trackers);
- }
- }
- /**
- * Setup the update loop
- */
- _setupUpdateLoop() {
- const scheduleNextFrame = () => {
- if (this._active) {
- if (Settings.powerPreference == 'high-performance')
- asap(repeat);
- else
- window.requestAnimationFrame(repeat);
- }
- };
- const update = () => {
- this._update().then(scheduleNextFrame).turbocharge();
- };
- function repeat() {
- if (Settings.powerPreference == 'low-power') // 30 fps
- window.requestAnimationFrame(update);
- else
- update();
- }
- window.requestAnimationFrame(update);
- }
- /**
- * The core of the update loop
- */
- _update() {
- // active session?
- if (this._active) {
- return speedy_vision_default().Promise.all(
- // update trackers
- this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
- // update internals
- this._updateStats.update();
- this._frameReady = true;
- }).catch(err => {
- // log error
- Utils.error('Tracking error: ' + err.toString(), err);
- // handle WebGL errors
- const cause = err.cause;
- if (err.name == 'GLError') {
- alert(err.message); // fatal error?
- alert(Utils.deviceInfo()); // display useful info
- throw err;
- }
- else if (typeof cause == 'object' && cause.name == 'GLError') {
- alert(err.message);
- alert(cause.message);
- alert(Utils.deviceInfo());
- throw err;
- }
- });
- }
- else {
- // inactive session
- this._updateStats.reset();
- return speedy_vision_default().Promise.resolve();
- }
- }
- /**
- * Setup the render loop
- */
- _setupRenderLoop() {
- let skip = false, toggle = false;
- const render = (timestamp) => {
- const enableFrameSkipping = (Settings.powerPreference == 'low-power');
- const highPerformance = (Settings.powerPreference == 'high-performance');
- // advance time
- this._time._update(timestamp);
- // skip frames
- if (!enableFrameSkipping || !(skip = !skip))
- this._render(timestamp, false);
- //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
- // repeat
- if (this._active)
- window.requestAnimationFrame(render);
- };
- window.requestAnimationFrame(render);
- }
- /**
- * Render a frame (RAF callback)
- * @param time current time, in ms
- * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
- */
- _render(time, skipUserMedia) {
- // is the session active?
- if (this._active) {
- // are we ready to render a frame?
- if (this._frameReady) {
- // create a frame
- const results = this._trackers.map(tracker => tracker._output.exports || ({
- tracker: tracker,
- trackables: [],
- }));
- const frame = new Frame(this, results);
- // clone & clear the RAF queue
- const rafQueue = this._rafQueue.slice(0);
- this._rafQueue.length = 0;
- // render user media
- if (!skipUserMedia)
- this._renderUserMedia();
- // render frame
- for (let i = 0; i < rafQueue.length; i++)
- rafQueue[i][1].call(undefined, time, frame);
- // update internals
- this._renderStats.update();
- this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
- this._frameReady = false;
- }
- else {
- // skip frame
- ;
- // we'll update the renderStats even if we skip the frame,
- // otherwise this becomes updateStats! (approximately)
- // This is a window.requestAnimationFrame() call, so the
- // browser is rendering content even if we're not.
- this._renderStats.update();
- }
- }
- else {
- // inactive session
- this._renderStats.reset();
- }
- }
- }
- /** Number of active sessions */
- Session._count = 0;
-
- ;// CONCATENATED MODULE: ./src/core/settings.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * settings.ts
- * Global Settings
- */
-
-
-
-
- /**
- * Global Settings
- */
- class Settings {
- /**
- * Power preference (may impact performance x battery life)
- */
- static get powerPreference() {
- return this._powerPreference;
- }
- /**
- * Power preference (may impact performance x battery life)
- * Note: this setting should be the very first thing you set
- * (before the WebGL context is created by Speedy)
- */
- static set powerPreference(value) {
- // validate
- if (Session.count > 0)
- throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
- else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
- throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
- /*
- // we won't use 'high-performance' for Speedy's GPU computations
- // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
- // also, it seems like low-power mode may break WebGL2 in some drivers?!
-
- if(value == 'high-performance')
- Speedy.Settings.powerPreference = 'default';
- else
- Speedy.Settings.powerPreference = value;
- */
- // change the GPU polling mode
- if (value == 'high-performance')
- (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
- else
- (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
- // update the power preference
- this._powerPreference = value;
- // log
- Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
- }
- }
- Settings._powerPreference = 'default';
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * reference-image-database.ts
- * A collection of Reference Images
- */
-
-
- /** Default capacity of a Reference Image Database */
- const DEFAULT_CAPACITY = 100; // this number should exceed normal usage
- // XXX this number may be changed (is 100 too conservative?)
- // further testing is needed to verify the appropriateness of this number;
- // it depends on the images, on the keypoint descriptors, and even on the target devices
- /** Generate a unique name for a reference image */
- const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
- /**
- * A collection of Reference Images
- */
- class ReferenceImageDatabase {
- /**
- * Constructor
- */
- constructor() {
- this._capacity = DEFAULT_CAPACITY;
- this._database = [];
- this._locked = false;
- }
- /**
- * The number of reference images stored in this database
- */
- get count() {
- return this._database.length;
- }
- /**
- * Maximum number of elements
- */
- get capacity() {
- return this._capacity;
- }
- /**
- * Maximum number of elements
- * Increasing the capacity is considered experimental
- */
- set capacity(value) {
- const capacity = Math.max(0, value | 0);
- if (this.count > capacity)
- throw new IllegalArgumentError(`Can't set the capacity of the database to ${capacity}: it currently stores ${this.count} entries`);
- this._capacity = capacity;
- }
- /**
- * Iterates over the collection
- */
- *[Symbol.iterator]() {
- const ref = this._database.map(entry => entry.referenceImage);
- yield* ref;
- }
- /**
- * Add reference images to this database
- * Add only the images you actually need to track!
- * (each image take up storage space)
- * @param referenceImages one or more reference images with unique names (a unique name will
- * be generated automatically if you don't specify one)
- * @returns a promise that resolves as soon as the images are loaded and added to this database
- */
- add(referenceImages) {
- // handle no input
- if (referenceImages.length == 0)
- return speedy_vision_default().Promise.resolve();
- // handle multiple images as input
- if (referenceImages.length > 1) {
- const promises = referenceImages.map(image => this.add([image]));
- return speedy_vision_default().Promise.all(promises).then(() => void (0));
- }
- // handle a single image as input
- const referenceImage = referenceImages[0];
- // locked database?
- if (this._locked)
- throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
- // reached full capacity?
- if (this.count >= this.capacity)
- throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
- // check for duplicate names
- if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
- throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
- // load the media and add the reference image to the database
- return speedy_vision_default().load(referenceImage.image).then(media => {
- this._database.push({
- referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
- media: media
- });
- });
- }
- /**
- * Lock the database, so that new reference images can no longer be added to it
- * @internal
- */
- _lock() {
- this._locked = true;
- }
- /**
- * Get the media object associated to a reference image
- * @param name reference image name
- * @returns media
- * @internal
- */
- _findMedia(name) {
- for (let i = 0; i < this._database.length; i++) {
- if (this._database[i].referenceImage.name === name)
- return this._database[i].media;
- }
- throw new IllegalArgumentError(`Can't find reference image "${name}"`);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * settings.ts
- * Settings of the Image Tracker
- */
- /** Default tracking resolution */
- const DEFAULT_TRACKING_RESOLUTION = 'sm+';
- /** Maximum number of keypoints to be stored for each reference image when in the training state */
- const TRAIN_MAX_KEYPOINTS = 1024; //512;
- /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
- const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
- /** Normalized width & height of an image target, in pixels */
- const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
- /** Used to identify the best maches */
- const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
- /** Maximum number of keypoints to be analyzed when in the scanning state */
- const SCAN_MAX_KEYPOINTS = 512;
- /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
- const SCAN_PYRAMID_LEVELS = 4; //7;
- /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
- const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
- /** Threshold of the FAST corner detector used in the scanning/training states */
- const SCAN_FAST_THRESHOLD = 60;
- /** Minimum number of accepted matches for us to move out from the scanning state */
- const SCAN_MIN_MATCHES = 20; //30;
- /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
- const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
- /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
- const SCAN_RANSAC_REPROJECTIONERROR = 5;
- /** Number of tables used in the LSH-based keypoint matching */
- const SCAN_LSH_TABLES = 8; // up to 32
- /** Hash size, in bits, used in the LSH-based keypoint matching */
- const SCAN_LSH_HASHSIZE = 15; // up to 16
- /** Use the Nightvision filter when in the scanning/training state? */
- const SCAN_WITH_NIGHTVISION = true;
- /** Nightvision filter: gain */
- const NIGHTVISION_GAIN = 0.3; // 0.2;
- /** Nightvision filter: offset */
- const NIGHTVISION_OFFSET = 0.5;
- /** Nightvision filter: decay */
- const NIGHTVISION_DECAY = 0.0;
- /** Nightvision filter: quality level */
- const NIGHTVISION_QUALITY = 'low';
- /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
- const ORB_GAUSSIAN_KSIZE = 9;
- /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
- const ORB_GAUSSIAN_SIGMA = 2.0;
- /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
- const SUBPIXEL_GAUSSIAN_KSIZE = 5;
- /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
- const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
- /** Subpixel refinement method */
- const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
- /** Minimum acceptable number of matched keypoints when in the tracking state */
- const TRACK_MIN_MATCHES = 4; //10; //20;
- /** Maximum number of keypoints to be analyzed in the tracking state */
- const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
- /** Capacity of the keypoint detector used in the the tracking state */
- const TRACK_DETECTOR_CAPACITY = 2048; //4096;
- /** Quality of the Harris/Shi-Tomasi corner detector */
- const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
- /** Use the Nightvision filter when in the tracking state? */
- const TRACK_WITH_NIGHTVISION = false; // produces shaking?
- /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
- const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
- /** Relative size (%) used to clip keypoints from the borders of the rectified image */
- const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
- /** Number of iterations used to refine the target image before tracking */
- const TRACK_REFINEMENT_ITERATIONS = 3;
- /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
- const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
- /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
- const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
- /** Used to identify the best maches */
- const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
- /** Number of consecutive frames in which we tolerate a "target lost" situation */
- const TRACK_LOST_TOLERANCE = 10;
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * state.ts
- * Abstract state of the Image Tracker
- */
-
-
-
- /**
- * Abstract state of the Image Tracker
- */
- class ImageTrackerState {
- /**
- * Constructor
- * @param name
- * @param imageTracker
- */
- constructor(name, imageTracker) {
- this._name = name;
- this._imageTracker = imageTracker;
- this._pipeline = this._createPipeline();
- }
- /**
- * State name
- */
- get name() {
- return this._name;
- }
- /**
- * AR screen size
- */
- get screenSize() {
- const screen = this._pipeline.node('screen');
- if (!screen)
- throw new IllegalOperationError();
- // this is available once this state has run at least once
- return screen.size;
- }
- /**
- * Initialize the state
- */
- init() {
- }
- /**
- * Release resources
- */
- release() {
- return this._pipeline.release();
- }
- /**
- * Update the state
- * @param media user media
- * @param screenSize AR screen size for image processing
- * @param state all states
- * @returns promise
- */
- update(media, screenSize) {
- const source = this._pipeline.node('source');
- const screen = this._pipeline.node('screen');
- // validate the pipeline
- if (!source || !screen)
- throw new IllegalOperationError();
- // prepare the pipeline
- source.media = media;
- screen.size = screenSize;
- // run the pipeline
- return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- }
- /**
- * Called when leaving the state, after update()
- */
- onLeaveState() {
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * GPU processing
- * @returns promise with the pipeline results
- */
- _gpuUpdate() {
- return this._pipeline.run();
- }
- //
- // Some utility methods common to various states
- //
- /**
- * Find the coordinates of a polyline surrounding the target image
- * @param homography maps the target image to the AR screen
- * @param targetSize size of the target space
- * @returns promise that resolves to 4 points in AR screen space
- */
- _findPolylineCoordinates(homography, targetSize) {
- const w = targetSize.width, h = targetSize.height;
- const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
- 0, 0,
- w, 0,
- w, h,
- 0, h,
- ]);
- const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
- return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
- }
- /**
- * Find a polyline surrounding the target image
- * @param homography maps the target image to the AR screen
- * @param targetSize size of the target space
- * @returns promise that resolves to 4 points in AR screen space
- */
- _findPolyline(homography, targetSize) {
- return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
- const polydata = polylineCoordinates.read();
- const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
- return polyline;
- });
- }
- /**
- * Whether or not to rotate the warped image in order to best fit the AR screen
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns boolean
- */
- _mustRotateWarpedImage(media, screenSize) {
- const screenAspectRatio = screenSize.width / screenSize.height;
- const mediaAspectRatio = media.width / media.height;
- const eps = 0.1;
- return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
- }
- /**
- * Find a rectification matrix to be applied to an image fitting the entire AR screen
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findRectificationMatrixOfFullscreenImage(media, screenSize) {
- const b = TRACK_RECTIFIED_BORDER;
- const sw = screenSize.width, sh = screenSize.height;
- const mediaAspectRatio = media.width / media.height;
- const mustRotate = this._mustRotateWarpedImage(media, screenSize);
- // compute the vertices of the target in screen space
- // we suppose portrait or landscape mode for both screen & media
- const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
- const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
- const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
- const right = sw - left;
- const bottom = sh - top;
- const targetVertices = speedy_vision_default().Matrix(2, 4, [
- left, top,
- right, top,
- right, bottom,
- left, bottom,
- ]);
- const screenVertices = speedy_vision_default().Matrix(2, 4, [
- 0, 0,
- sw, 0,
- sw, sh,
- 0, sh
- ]);
- const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
- const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
- const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
- return (mustRotate ? speedy_vision_default().Matrix.perspective(
- // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
- preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
- // alignment: align the target to the center of the screen
- speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
- // pre-rectify and then align
- rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
- }
- /**
- * Find a rectification matrix to be applied to the target image
- * @param homography maps a reference image to the AR screen
- * @param targetSize size of the target space
- * @param media media associated with the reference image
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
- const sw = screenSize.width, sh = screenSize.height;
- const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
- const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
- return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
- // from target space to (full)screen
- speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
- // from (full)screen to rectified coordinates
- this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
- // function composition
- rectificationMatrix.setTo(mat.times(rectificationMatrix)));
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * initial.ts
- * Initial state of the Image Tracker
- */
-
-
-
-
- /**
- * The purpose of the initial state of the Image Tracker
- * is to initialize the training state using the state machine
- */
- class ImageTrackerInitialState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('initial', imageTracker);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const source = this._pipeline.node('source');
- const media = source.media;
- const mediaSize = media.size;
- if (mediaSize.area() < this.screenSize.area())
- Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- return speedy_vision_default().Promise.resolve({
- nextState: 'training',
- trackerOutput: {},
- });
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- // this pipeline does nothing useful,
- // but it does preload some shaders...
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
- const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
- const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
- const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
- const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- nightvision.quality = NIGHTVISION_QUALITY;
- subpixel.method = SUBPIXEL_METHOD;
- //borderClipper.imageSize = screen.size;
- borderClipper.imageSize = speedy_vision_default().Size(100, 100);
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- matcher.k = 1; //2;
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- keypointPortalSource.source = keypointPortalSink;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(blur.input());
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
- muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
- muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // store reference keypoints
- keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
- bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
- keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
- // portals
- descriptor.output().connectTo(keypointPortalSink.input());
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- // done!
- pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
- /*
- const run = pipeline.run.bind(pipeline);
- pipeline.run = function() {
- console.time("TIME");
- return run().then(x => {
- console.timeEnd("TIME");
- return x;
- });
- };
- */
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * training.ts
- * Training state of the Image Tracker
- */
-
-
-
-
-
- /**
- * Training state of the Image Tracker
- */
- class ImageTrackerTrainingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('training', imageTracker);
- this._currentImageIndex = 0;
- this._image = [];
- // initialize the training map
- this._trainingMap = {
- referenceImageIndex: [],
- referenceImage: [],
- keypoints: []
- };
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const database = this._imageTracker.database;
- // validate
- if (database.count == 0)
- throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
- // prepare to train...
- this._currentImageIndex = 0;
- this._image.length = 0;
- this._trainingMap.referenceImageIndex.length = 0;
- this._trainingMap.referenceImage.length = 0;
- this._trainingMap.keypoints.length = 0;
- // lock the database
- Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
- database._lock();
- // collect all images
- for (const referenceImage of database)
- this._image.push(referenceImage);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const arScreenSize = this.screenSize;
- const source = this._pipeline.node('source');
- const screen = this._pipeline.node('screen');
- const keypointScaler = this._pipeline.node('keypointScaler');
- // this shouldn't happen
- if (this._currentImageIndex >= this._image.length)
- return speedy_vision_default().Promise.reject(new IllegalOperationError());
- // set the appropriate training media
- const database = this._imageTracker.database;
- const referenceImage = this._image[this._currentImageIndex];
- const media = database._findMedia(referenceImage.name);
- source.media = media;
- // compute the appropriate size of the training image space
- const resolution = this._imageTracker.resolution;
- const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
- const aspectRatioOfTrainingImage = media.width / media.height;
- /*
- let sin = 0, cos = 1;
-
- if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
- // training image and source video: both in landscape mode or both in portrait mode
- screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- }
- else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
- // training image: portrait mode; source video: landscape mode
- screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- sin = 1; cos = 0; // rotate 90deg
- }
- else {
- // training image: landscape mode; source video: portrait mode
- }
- */
- screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
- screen.size.width = Math.round(screen.size.width * scale);
- screen.size.height = Math.round(screen.size.height * scale);
- // convert keypoints from the training image space to AR screen space
- // let's pretend that trained keypoints belong to the AR screen space,
- // regardless of the size of the target image. This will make things
- // easier when computing the homography.
- /*
- const sw = arScreenSize.width / screen.size.width;
- const sh = arScreenSize.height / screen.size.height;
- */
- const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
- const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
- keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
- sw, 0, 0,
- 0, sh, 0,
- 0, 0, 1,
- ]);
- // log
- Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const referenceImage = this._image[this._currentImageIndex];
- const keypoints = result.keypoints;
- const image = result.image;
- // log
- Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
- // set the training map, so that we can map all keypoints of the current image to the current image
- this._trainingMap.referenceImage.push(referenceImage);
- for (let i = 0; i < keypoints.length; i++) {
- this._trainingMap.keypoints.push(keypoints[i]);
- this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
- }
- // the current image has been processed!
- ++this._currentImageIndex;
- // set output
- if (this._currentImageIndex >= this._image.length) {
- // finished training!
- return speedy_vision_default().Promise.resolve({
- //nextState: 'training',
- nextState: 'scanning',
- nextStateSettings: {
- keypoints: this._trainingMap.keypoints,
- },
- trackerOutput: {},
- //trackerOutput: { image, keypoints, screenSize: this.screenSize },
- });
- }
- else {
- // we're not done yet
- return speedy_vision_default().Promise.resolve({
- nextState: 'training',
- trackerOutput: {},
- //trackerOutput: { image, keypoints, screenSize: this.screenSize },
- });
- }
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
- const pyramid = speedy_vision_default().Image.Pyramid();
- const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const blurredPyramid = speedy_vision_default().Image.Pyramid();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
- const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- detector.levels = SCAN_PYRAMID_LEVELS;
- detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
- detector.threshold = SCAN_FAST_THRESHOLD;
- detector.capacity = 8192;
- subpixel.method = SUBPIXEL_METHOD;
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- clipper.size = TRAIN_MAX_KEYPOINTS;
- keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess image
- greyscale.output().connectTo(nightvisionMux.input('in0'));
- greyscale.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(pyramid.input());
- // keypoint detection
- pyramid.output().connectTo(detector.input());
- detector.output().connectTo(clipper.input());
- // keypoint refinement
- greyscale.output().connectTo(denoiser.input()); // reduce noise
- denoiser.output().connectTo(blurredPyramid.input());
- clipper.output().connectTo(subpixel.input('keypoints'));
- blurredPyramid.output().connectTo(subpixel.input('image'));
- // keypoint description
- greyscale.output().connectTo(blur.input());
- blur.output().connectTo(descriptor.input('image'));
- clipper.output().connectTo(descriptor.input('keypoints'));
- // prepare output
- descriptor.output().connectTo(keypointScaler.input());
- keypointScaler.output().connectTo(keypointSink.input());
- nightvisionMux.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
- return pipeline;
- }
- /**
- * Get reference image
- * @param keypointIndex -1 if not found
- * @returns reference image
- */
- referenceImageOfKeypoint(keypointIndex) {
- const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
- if (imageIndex < 0)
- return null;
- return this._trainingMap.referenceImage[imageIndex];
- }
- /**
- * Get reference image index
- * @param keypointIndex -1 if not found
- * @returns reference image index, or -1 if not found
- */
- referenceImageIndexOfKeypoint(keypointIndex) {
- const n = this._trainingMap.referenceImageIndex.length;
- if (keypointIndex < 0 || keypointIndex >= n)
- return -1;
- const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
- if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
- return -1;
- return imageIndex;
- }
- /**
- * Get keypoint of the trained set
- * @param keypointIndex -1 if not found
- * @returns a keypoint
- */
- referenceKeypoint(keypointIndex) {
- if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
- return null;
- return this._trainingMap.keypoints[keypointIndex];
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * scanning.ts
- * Scanning state of the Image Tracker
- */
-
-
-
-
- /** Default target space size (used when training) */
- const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
- /** Port of the portal multiplexer: get new data from the camera */
- const PORT_CAMERA = 0;
- /** Port of the portal multiplexer: get previously memorized data */
- const PORT_MEMORY = 1;
- /**
- * Scanning state of the Image Tracker
- */
- class ImageTrackerScanningState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('scanning', imageTracker);
- this._counter = 0;
- this._bestScore = 0;
- this._bestHomography = speedy_vision_default().Matrix.Eye(3);
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const imagePortalMux = this._pipeline.node('imagePortalMux');
- const lshTables = this._pipeline.node('lshTables');
- const keypoints = settings.keypoints;
- // set attributes
- this._counter = 0;
- this._bestScore = 0;
- // reset the image memorization circuit
- imagePortalMux.port = PORT_CAMERA;
- // prepare the keypoint matcher
- if (keypoints !== undefined)
- lshTables.keypoints = keypoints;
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const imagePortalMux = this._pipeline.node('imagePortalMux');
- const keypoints = result.keypoints;
- const matchedKeypoints = this._goodMatches(keypoints);
- // tracker output
- const trackerOutput = {
- keypoints: keypoints,
- screenSize: this.screenSize
- };
- // keep the last memorized image
- imagePortalMux.port = PORT_MEMORY;
- // have we found enough matches...?
- if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
- return this._findHomography(matchedKeypoints).then(([homography, score]) => {
- // have we found the best homography so far?
- if (score >= this._bestScore) {
- // store it only if we'll be running the pipeline again
- if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
- this._bestScore = score;
- this._bestHomography = homography;
- // memorize the last image, corresponding to the best homography(*)
- imagePortalMux.port = PORT_CAMERA;
- /*
-
- (*) technically speaking, this is not exactly the case. Since we're
- using turbo to download the keypoints, there's a slight difference
- between the data used to compute the homography and the last image.
- Still, assuming continuity of the video stream, this logic is
- good enough.
-
- */
- }
- }
- // find a polyline surrounding the target
- return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
- }).then(polyline => {
- // continue a little longer in the scanning state
- if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
- return {
- nextState: this.name,
- trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
- };
- }
- // this image should correspond to the best homography
- const snapshot = this._pipeline.node('imagePortalSink');
- // the reference image that we'll track
- const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
- // let's track the target!
- return {
- nextState: 'pre-tracking',
- nextStateSettings: {
- homography: this._bestHomography,
- snapshot: snapshot,
- referenceImage: referenceImage,
- },
- trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
- };
- }).catch(() => {
- // continue in the scanning state
- return {
- nextState: this.name,
- trackerOutput: trackerOutput,
- };
- });
- }
- else {
- // not enough matches...!
- this._counter = 0;
- this._bestScore = 0;
- }
- // we'll continue to scan the scene
- return speedy_vision_default().Promise.resolve({
- nextState: this.name,
- trackerOutput: trackerOutput,
- });
- }
- /**
- * Find "high quality" matches of a single reference image
- * @param keypoints
- * @returns high quality matches
- */
- _goodMatches(keypoints) {
- const matchedKeypointsPerImageIndex = Object.create(null);
- // filter "good matches"
- for (let j = keypoints.length - 1; j >= 0; j--) {
- const keypoint = keypoints[j];
- if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
- const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
- // the best match should be "much better" than the second best match,
- // which means that they are "distinct enough"
- if (d1 <= SCAN_MATCH_RATIO * d2) {
- const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
- //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
- //if(idx1 == idx2 && idx1 >= 0) {
- if (idx1 >= 0) {
- if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
- matchedKeypointsPerImageIndex[idx1] = [];
- matchedKeypointsPerImageIndex[idx1].push(keypoint);
- }
- }
- }
- }
- // find the image with the most matches
- let matchedKeypoints = [];
- for (const imageIndex in matchedKeypointsPerImageIndex) {
- if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
- matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
- }
- // done!
- return matchedKeypoints;
- }
- /**
- * Find a homography matrix using matched keypoints
- * @param matchedKeypoints "good" matches only
- * @returns homography from reference image space to AR screen space & homography "quality" score
- */
- _findHomography(matchedKeypoints) {
- const srcCoords = [];
- const dstCoords = [];
- // find matching coordinates of the keypoints
- for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
- const matchedKeypoint = matchedKeypoints[i];
- const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
- if (referenceKeypoint != null) {
- srcCoords.push(referenceKeypoint.x);
- srcCoords.push(referenceKeypoint.y);
- dstCoords.push(matchedKeypoint.x);
- dstCoords.push(matchedKeypoint.y);
- }
- else {
- // this shouldn't happen
- return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
- }
- }
- // too few points?
- const n = srcCoords.length / 2;
- if (n < 4) {
- return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
- }
- // compute a homography
- const src = speedy_vision_default().Matrix(2, n, srcCoords);
- const dst = speedy_vision_default().Matrix(2, n, dstCoords);
- const mask = speedy_vision_default().Matrix.Zeros(1, n);
- const homography = speedy_vision_default().Matrix.Zeros(3);
- return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
- method: 'pransac',
- reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512,
- bundleSize: 128,
- mask: mask,
- }).then(homography => {
- // check if this is a valid homography
- const a00 = homography.at(0, 0);
- if (Number.isNaN(a00))
- throw new DetectionError(`Can't compute homography`);
- // count the number of inliers
- const inliers = mask.read();
- let inlierCount = 0;
- for (let i = inliers.length - 1; i >= 0; i--)
- inlierCount += inliers[i];
- const score = inlierCount / inliers.length;
- // done!
- return [homography, score];
- });
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
- const pyramid = speedy_vision_default().Image.Pyramid();
- const detector = speedy_vision_default().Keypoint.Detector.FAST();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
- const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
- const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
- const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
- const imagePortalBuffer = speedy_vision_default().Image.Buffer();
- const imagePortalCopy = speedy_vision_default().Transform.Resize();
- //const imageSink = Speedy.Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- detector.levels = SCAN_PYRAMID_LEVELS;
- detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
- detector.threshold = SCAN_FAST_THRESHOLD;
- detector.capacity = 2048;
- clipper.size = SCAN_MAX_KEYPOINTS;
- lshTables.keypoints = [];
- lshTables.numberOfTables = SCAN_LSH_TABLES;
- lshTables.hashSize = SCAN_LSH_HASHSIZE;
- knn.k = 2;
- knn.quality = 'default';
- //knn.quality = 'fastest';
- imagePortalSource.source = imagePortalSink;
- imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
- imagePortalCopy.size = speedy_vision_default().Size(0, 0);
- imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
- keypointSink.turbo = true;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess image
- greyscale.output().connectTo(blur.input());
- greyscale.output().connectTo(nightvisionMux.input('in0'));
- greyscale.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(pyramid.input());
- // keypoint detection
- pyramid.output().connectTo(detector.input());
- detector.output().connectTo(clipper.input());
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- clipper.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(knn.input('keypoints'));
- lshTables.output().connectTo(knn.input('lsh'));
- // prepare output
- clipper.output().connectTo(keypointSink.input());
- knn.output().connectTo(keypointSink.input('matches'));
- //pyramid.output().connectTo(imageSink.input());
- // memorize image
- source.output().connectTo(imagePortalBuffer.input());
- imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
- imagePortalSource.output().connectTo(imagePortalCopy.input());
- imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
- imagePortalMux.output().connectTo(imagePortalSink.input());
- // done!
- pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * pre-tracking.ts
- * Pre-tracking state of the Image Tracker
- */
-
-
-
-
-
- /** Default target space size (used when training) */
- const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
- /** Use the camera stream as the input of the pipeline */
- const PORT_CAMERA_IMAGE = 1;
- /** Use the reference image as the input of the pipeline */
- const PORT_REFERENCE_IMAGE = 0;
- /**
- * The pre-tracking state of the Image Tracker is a new training
- * phase for the particular, actual target we'll be tracking
- */
- class ImageTrackerPreTrackingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('pre-tracking', imageTracker);
- this._homography = speedy_vision_default().Matrix.Eye(3);
- this._referenceImage = null;
- this._step = 'read-reference-image';
- this._referenceKeypoints = [];
- this._iterations = 0;
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const imagePortalSource = this._pipeline.node('imagePortalSource');
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
- const homography = settings.homography;
- const referenceImage = settings.referenceImage;
- const snapshot = settings.snapshot;
- // this shouldn't happen
- if (!referenceImage)
- throw new TrackingError(`Can't track a null reference image`);
- // set attributes
- this._homography = homography;
- this._referenceImage = referenceImage;
- this._step = 'read-reference-image';
- this._referenceKeypoints = [];
- this._iterations = 0;
- // setup the pipeline
- imagePortalSource.source = snapshot;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const referenceImage = this._referenceImage;
- const source = this._pipeline.node('source');
- const sourceMux = this._pipeline.node('sourceMux');
- const imageRectifier = this._pipeline.node('imageRectifier');
- const keypointRectifier = this._pipeline.node('keypointRectifier');
- const borderClipper = this._pipeline.node('borderClipper');
- const screenSize = this.screenSize;
- // set the source media to the reference image we're going to track
- const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
- source.media = targetMedia;
- // setup the source multiplexer
- if (this._step == 'read-reference-image')
- sourceMux.port = PORT_REFERENCE_IMAGE;
- else
- sourceMux.port = PORT_CAMERA_IMAGE;
- // clip keypoints from the borders of the target image
- borderClipper.imageSize = screenSize;
- borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
- // rectify the image
- const rectify = (this._step == 'read-reference-image') ?
- this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
- this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
- return rectify.then(rectificationMatrix => {
- imageRectifier.transform = rectificationMatrix;
- });
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const referenceImage = this._referenceImage;
- const imagePortalSink = this._pipeline.node('imagePortal');
- const keypointPortalSink = this._pipeline.node('keypointPortalSink');
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
- const keypoints = result.keypoints;
- const image = result.image;
- // tracker output
- const trackerOutput = {
- keypoints: image !== undefined ? keypoints : undefined,
- image: image,
- screenSize: this.screenSize,
- };
- // decide what to do next
- switch (this._step) {
- case 'read-reference-image': {
- // enable matching
- muxOfReferenceKeypoints.port = 1;
- // store reference keypoints
- this._referenceKeypoints = keypoints;
- // next step
- this._step = 'warp-camera-image';
- return speedy_vision_default().Promise.resolve({
- nextState: 'pre-tracking',
- trackerOutput: trackerOutput,
- });
- }
- case 'warp-camera-image': {
- // freeze reference keypoints
- bufferOfReferenceKeypoints.frozen = true;
- muxOfBufferOfReferenceKeypoints.port = 1;
- // refine warp?
- if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
- this._step = 'warp-camera-image';
- else
- this._step = 'train-camera-image';
- // warp image & go to next step
- return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
- nextState: 'pre-tracking',
- trackerOutput: trackerOutput,
- })).catch(err => {
- Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
- return {
- nextState: 'scanning',
- trackerOutput: trackerOutput,
- };
- });
- }
- case 'train-camera-image': {
- // log
- Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
- // change the coordinates
- return this._changeSpace(this._homography, this.screenSize).then(homography => {
- // we're ready to track the target!
- return speedy_vision_default().Promise.resolve({
- //nextState: 'pre-tracking',
- nextState: 'tracking',
- trackerOutput: trackerOutput,
- nextStateSettings: {
- homography: homography,
- referenceImage: referenceImage,
- templateKeypoints: keypoints,
- keypointPortalSink: keypointPortalSink,
- imagePortalSink: imagePortalSink,
- screenSize: this.screenSize,
- },
- });
- });
- }
- }
- }
- /**
- * Find an adjustment warp between the camera image and the reference image
- * @param dstKeypoints destination
- * @param srcKeypoints source
- * @returns a promise that resolves to a 3x3 homography
- */
- _findWarp(dstKeypoints, srcKeypoints) {
- //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
- const srcCoords = [];
- const dstCoords = [];
- // find matching coordinates of the keypoints
- for (let i = 0; i < dstKeypoints.length; i++) {
- const dstKeypoint = dstKeypoints[i];
- if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
- const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
- // the best match should be "much better" than the second best match,
- // which means that they are "distinct enough"
- if (d1 <= TRACK_MATCH_RATIO * d2) {
- const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
- srcCoords.push(srcKeypoint.x);
- srcCoords.push(srcKeypoint.y);
- dstCoords.push(dstKeypoint.x);
- dstCoords.push(dstKeypoint.y);
- }
- }
- }
- // too few points?
- const n = srcCoords.length / 2;
- if (n < 4) {
- return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
- }
- // compute warp
- const model = speedy_vision_default().Matrix.Eye(3);
- return this._findKeypointWarp().then(transform =>
- // rectify keypoints
- speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
- // find warp
- speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512 * 4,
- bundleSize: 128,
- })).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute warp: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a warp to be applied to the keypoints
- * @returns affine transform
- */
- _findKeypointWarp() {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const screenSize = this.screenSize;
- // no rotation is needed
- if (!this._mustRotateWarpedImage(media, screenSize))
- return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
- // rotate by 90 degrees clockwise around the pivot
- const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
- return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
- 0, 1, 0,
- -1, 0, 0,
- py + px, py - px, 1,
- ]));
- }
- /**
- * Change the space of the homography in order to improve tracking quality
- * @param homography mapping coordinates from normalized target space to AR screen space
- * @param screenSize AR screen size
- * @returns homography mapping coordinates from AR screen space to AR screen space
- */
- _changeSpace(homography, screenSize) {
- const sw = screenSize.width, sh = screenSize.height;
- const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
- const mat = speedy_vision_default().Matrix.Zeros(3);
- return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
- const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
- const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
- const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
- const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
- const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imagePortalSource.source = null;
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- sourceMux.port = PORT_REFERENCE_IMAGE;
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- detector.quality = TRACK_HARRIS_QUALITY;
- detector.capacity = TRACK_DETECTOR_CAPACITY;
- subpixel.method = SUBPIXEL_METHOD;
- clipper.size = TRACK_MAX_KEYPOINTS;
- borderClipper.imageSize = screen.size;
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- matcher.k = 2;
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- keypointPortalSource.source = keypointPortalSink;
- muxOfReferenceKeypoints.port = 0;
- muxOfBufferOfReferenceKeypoints.port = 0;
- bufferOfReferenceKeypoints.frozen = false;
- keypointSink.turbo = false;
- // prepare input
- source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
- imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
- sourceMux.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- nightvisionMux.output().connectTo(blur.input());
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
- muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
- muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // store reference keypoints
- keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
- bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
- keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
- // portals
- descriptor.output().connectTo(keypointPortalSink.input());
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- //imageRectifier.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * image-tracker-event.ts
- * Events emitted by an Image Tracker
- */
-
- /**
- * An event emitted by an Image Tracker
- */
- class ImageTrackerEvent extends AREvent {
- /**
- * Constructor
- * @param type event type
- * @param referenceImage optional reference image
- */
- constructor(type, referenceImage) {
- super(type);
- this._referenceImage = referenceImage;
- }
- /**
- * Reference image
- */
- get referenceImage() {
- return this._referenceImage;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * camera-model.ts
- * Camera model
- */
-
-
-
-
- /** Number of samples we'll be keeping to help calibrate the camera */
- const INTRISICS_SAMPLES = 401; //201; //31; // odd number
- /** Whether or not to auto-calibrate the camera */
- const FOVY_AUTODETECT = false; //true;
- /** A guess of the vertical field-of-view of a generic camera, in degrees */
- const FOVY_GUESS = 45; //50; // will be part of the viewing frustum
- /** Number of iterations used to refine the estimated pose */
- const POSE_ITERATIONS = 30;
- /** Number of samples used in the rotation filter */
- const ROTATION_FILTER_SAMPLES = 10;
- /** Number of samples used in the translation filter */
- const TRANSLATION_FILTER_SAMPLES = 10;
- /** Convert degrees to radians */
- const DEG2RAD = 0.017453292519943295; // pi / 180
- /** Convert radians to degrees */
- const RAD2DEG = 57.29577951308232; // 180 / pi
- /** Numerical tolerance */
- const EPSILON = 1e-6;
- /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
- const FX = 0;
- /** Index of the vertical focal length in the camera intrinsics matrix */
- const FY = 4;
- /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
- const U0 = 6;
- /** Index of the vertical position of the principal point in the camera intrinsics matrix */
- const V0 = 7;
- /** Translation refinement: predefined buffers for efficiency */
- const TRANSLATION_REFINEMENT_BUFFERS = (() => {
- const l = 1.0;
- const x = [0, l, 0, -l, 0];
- const y = [-l, 0, l, 0, 0];
- const n = x.length;
- return Object.freeze({
- x, y,
- a1: new Array(n),
- a2: new Array(n),
- a3: new Array(n),
- m: new Array(3 * n * 3),
- v: new Array(3 * n),
- t: new Array(3),
- r: new Array(3 * n),
- c: new Array(3),
- Mc: new Array(3 * n),
- });
- })();
- /** Translation refinement: number of iterations */
- const TRANSLATION_REFINEMENT_ITERATIONS = 3; // 1; // 5;
- /** Translation refinement: number of samples */
- const TRANSLATION_REFINEMENT_SAMPLES = 5; // TRANSLATION_REFINEMENT_BUFFERS.x.length;
- /** Translation refinement: the triple of the number of samples */
- const TRANSLATION_REFINEMENT_SAMPLES_3X = 15; //3 * TRANSLATION_REFINEMENT_SAMPLES;
- /**
- * Camera model
- */
- class CameraModel {
- /**
- * Constructor
- */
- constructor() {
- this._screenSize = speedy_vision_default().Size(0, 0);
- this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
- this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // identity matrix
- this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // no rotation & no translation [ R | t ] = [ I | 0 ]
- this._f = (new Array(INTRISICS_SAMPLES)).fill(this._intrinsics[FY]);
- this._fp = 0;
- this._partialRotationBuffer = [];
- this._translationBuffer = [];
- }
- /**
- * Initialize the model
- * @param screenSize
- */
- init(screenSize) {
- // validate
- if (screenSize.area() == 0)
- throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
- // set the screen size
- this._screenSize.width = screenSize.width;
- this._screenSize.height = screenSize.height;
- // reset the model
- this._resetIntrinsics();
- this._resetExtrinsics();
- // log
- Utils.log(`Initializing the camera model...`);
- }
- /**
- * Release the model
- */
- release() {
- this.reset();
- return null;
- }
- /**
- * Update the camera model
- * @param homography 3x3 perspective transform
- * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
- * @returns promise that resolves to a camera matrix
- */
- update(homography, screenSize) {
- // validate the shape of the homography
- if (homography.rows != 3 || homography.columns != 3)
- throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
- // validate screenSize
- if (screenSize.area() == 0)
- throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
- // changed screen size?
- if (!this._screenSize.equals(screenSize)) {
- Utils.log(`Camera model: detected a change in screen size...`);
- // update the screen size
- this._screenSize.width = screenSize.width;
- this._screenSize.height = screenSize.height;
- // reset camera
- this.reset();
- }
- // read the entries of the homography
- const h = homography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
- // validate the homography (homography matrices aren't singular)
- const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
- if (Math.abs(det) < EPSILON) {
- Utils.warning(`Can't update the camera model using an invalid homography matrix`);
- return speedy_vision_default().Promise.resolve(this._matrix);
- }
- // estimate the focal length (auto-calibration)
- const f = this._estimateFocal(homography);
- if (f > 0)
- this._storeFocal(f);
- //console.log(this.fovy * RAD2DEG);
- // estimate the pose
- const pose = this._estimatePose(homography);
- this._storePose(pose);
- // compute the camera matrix
- const C = this.denormalizer();
- const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
- const E = speedy_vision_default().Matrix(3, 4, this._extrinsics);
- this._matrix.setToSync(K.times(E).times(C));
- //console.log("intrinsics -----------", K.toString());
- //console.log("matrix ----------------",this._matrix.toString());
- return speedy_vision_default().Promise.resolve(this._matrix);
- }
- /**
- * Reset camera model
- */
- reset() {
- this._resetIntrinsics();
- this._resetExtrinsics();
- }
- /**
- * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
- * 2D AR screen space (measured in pixels)
- * @returns 3x4 camera matrix
- */
- get matrix() {
- return this._matrix;
- }
- /**
- * Camera intrinsics matrix
- * @returns 3x3 intrinsics matrix in column-major format
- */
- get intrinsics() {
- return this._intrinsics;
- }
- /**
- * Camera extrinsics matrix
- * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
- */
- get extrinsics() {
- return this._extrinsics;
- }
- /**
- * Convert coordinates from normalized space [-1,1]^3 to a
- * "3D pixel space" based on the dimensions of the AR screen.
- *
- * We perform a 180-degrees rotation around the x-axis so that
- * it looks nicer (the y-axis grows downwards in image space).
- *
- * The final camera matrix is P = K * [ R | t ] * C, where
- * C is this conversion matrix. The intent behind this is to
- * make tracking independent of target and screen sizes.
- *
- * Reminder: we use a right-handed coordinate system in 3D!
- * In 2D image space the coordinate system is left-handed.
- *
- * @returns 4x4 conversion matrix C
- */
- denormalizer() {
- const w = this._screenSize.width / 2; // half width, in pixels
- const h = this._screenSize.height / 2; // half height, in pixels
- const d = Math.min(w, h); // virtual unit length, in pixels
- /*
- return Speedy.Matrix(4, 4, [
- 1, 0, 0, 0,
- 0,-1, 0, 0,
- 0, 0,-1, 0,
- w/d, h/d, 0, 1/d
- ]);
- */
- return speedy_vision_default().Matrix(4, 4, [
- d, 0, 0, 0,
- 0, -d, 0, 0,
- 0, 0, -d, 0,
- w, h, 0, 1,
- ]);
- }
- /**
- * Size of the AR screen space, in pixels
- * @returns size in pixels
- */
- get screenSize() {
- return this._screenSize;
- }
- /**
- * Focal length in pixel units (projection distance in the pinhole camera model)
- * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
- * @returns focal length
- */
- get focalLength() {
- return this._intrinsics[FY]; // fx == fy
- }
- /**
- * Horizontal field-of-view, given in radians
- * @returns vertical field-of-view
- */
- get fovx() {
- return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
- }
- /**
- * Vertical field-of-view, given in radians
- * @returns vertical field-of-view
- */
- get fovy() {
- return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
- }
- /**
- * Principal point
- * @returns principal point, in pixel coordinates
- */
- principalPoint() {
- return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
- }
- /**
- * Reset camera extrinsics
- */
- _resetExtrinsics() {
- // set the rotation matrix to the identity
- this._extrinsics.fill(0);
- this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
- // reset filters
- this._partialRotationBuffer.length = 0;
- this._translationBuffer.length = 0;
- }
- /**
- * Reset camera intrinsics
- */
- _resetIntrinsics() {
- const u0 = this._screenSize.width / 2;
- const v0 = this._screenSize.height / 2;
- const f = v0 / Math.tan(DEG2RAD * FOVY_GUESS / 2);
- this._intrinsics[FX] = f;
- this._intrinsics[FY] = f;
- this._intrinsics[U0] = u0;
- this._intrinsics[V0] = v0;
- this._f.fill(this._intrinsics[FY]);
- this._fp = 0;
- }
- /**
- * Estimate the focal length
- * @param homography valid homography
- * @returns estimated focal length, or 0 on error
- */
- _estimateFocal(homography) {
- // auto-detect the focal length?
- if (!FOVY_AUTODETECT)
- return 0;
- // read the entries of the homography
- const h = homography.read();
- const h11 = h[0], h12 = h[3]; //, h13 = h[6];
- const h21 = h[1], h22 = h[4]; //, h23 = h[7];
- const h31 = h[2], h32 = h[5]; //, h33 = h[8];
- // read the principal point
- const u0 = this._intrinsics[U0];
- const v0 = this._intrinsics[V0];
- // estimate the focal length based on the orthogonality
- // constraint r1'r2 = 0 of a rotation matrix
- const f2 = -((h11 / h31 - u0) * (h12 / h32 - u0) + (h21 / h31 - v0) * (h22 / h32 - v0));
- // can't estimate it?
- if (f2 < 0)
- return this._intrinsics[FY];
- //return 0;
- // done!
- return Math.sqrt(f2);
- }
- /**
- * Store an estimated focal length
- * @param f estimated focal length
- */
- _storeFocal(f) {
- // store the focal length
- this._f[this._fp] = f;
- this._fp = (this._fp + 1) % INTRISICS_SAMPLES;
- // take the median of the estimated focal lengths
- const sorted = this._f.concat([]).sort((a, b) => a - b);
- const median = sorted[sorted.length >>> 1];
- // update the intrinsics matrix
- this._intrinsics[FX] = this._intrinsics[FY] = median;
- /*
- // test
- const u0 = this._intrinsics[U0];
- const v0 = this._intrinsics[V0];
- const fovx = 2 * Math.atan(u0 / median) * RAD2DEG;
- const fovy = 2 * Math.atan(v0 / median) * RAD2DEG;
- console.log('---------------');
- console.log("fov:",fovx,fovy);
- console.log("f:",median);
- */
- }
- /**
- * Compute a normalized homography H' = K^(-1) * H for an
- * ideal pinhole with f = 1 and principal point = (0,0)
- * @param homography homography H to be normalized
- * @param f focal length
- * @returns normalized homography H'
- */
- _normalizeHomography(homography, f = this._intrinsics[FY]) {
- const h = homography.read();
- const u0 = this._intrinsics[U0];
- const v0 = this._intrinsics[V0];
- const h11 = h[0] - u0 * h[2], h12 = h[3] - u0 * h[5], h13 = h[6] - u0 * h[8];
- const h21 = h[1] - v0 * h[2], h22 = h[4] - v0 * h[5], h23 = h[7] - v0 * h[8];
- const h31 = h[2] * f, h32 = h[5] * f, h33 = h[8] * f;
- return speedy_vision_default().Matrix(3, 3, [
- h11, h21, h31,
- h12, h22, h32,
- h13, h23, h33,
- ]);
- }
- /**
- * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
- * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
- * @returns a 3x3 matrix
- */
- _estimatePartialPose(normalizedHomography) {
- const h = normalizedHomography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6];
- const h21 = h[1], h22 = h[4], h23 = h[7];
- const h31 = h[2], h32 = h[5], h33 = h[8];
- // select the sign so that t3 = tz > 0
- const sign = h33 >= 0 ? 1 : -1;
- // compute the scale factor
- const h1norm = Math.sqrt(h11 * h11 + h21 * h21 + h31 * h31);
- const h2norm = Math.sqrt(h12 * h12 + h22 * h22 + h32 * h32);
- //const scale = sign * 2 / (h1norm + h2norm);
- //const scale = sign / h1norm;
- //const scale = sign / h2norm;
- const scale = sign / Math.max(h1norm, h2norm); // this seems to work. why?
- // invalid homography?
- if (Number.isNaN(scale))
- return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
- // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
- // if h1norm is not approximately h2norm, it means that the first two columns of
- // the normalized homography are not really encoding a rotation (up to a scale)
- // what is causing this? does h3 (and h33) tell us anything about it?
- // what about the intrinsics matrix? the principal point...? the fov...?
- //console.log("h1,h2",h1norm,h2norm);
- //console.log(normalizedHomography.toString());
- // recover the translation and the rotation
- const t1 = scale * h13;
- const t2 = scale * h23;
- const t3 = scale * h33;
- const r11 = scale * h11;
- const r21 = scale * h21;
- const r31 = scale * h31;
- const r12 = scale * h12;
- const r22 = scale * h22;
- const r32 = scale * h32;
- // refine the pose
- const r = this._refineRotation(r11, r21, r31, r12, r22, r32);
- const t = this._refineTranslation(normalizedHomography, r, [t1, t2, t3]);
- //const t = [t1, t2, t3]; // faster, but less accurate
- // done!
- return speedy_vision_default().Matrix(3, 3, r.concat(t)); // this is possibly NaN... why? homography...
- }
- /**
- * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
- * @param r11 x of r1
- * @param r21 y of r1
- * @param r31 z of r1
- * @param r12 x of r2
- * @param r22 y of r2
- * @param r32 z of r2
- * @returns a 3x2 matrix R such that R'R = I (column-major format)
- */
- _refineRotation(r11, r21, r31, r12, r22, r32) {
- /*
-
- A little technique I figured out to correct the rotation vectors
- ----------------------------------------------------------------
-
- We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
- R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
- because vectors r1 and r2 are not perfectly orthonormal due to noise.
-
- Let's first notice that R'R is symmetric. You can easily check that its
- two eigenvalues are both real and positive (as long as r1, r2 != 0 and
- r1 is not parallel to r2, but we never take such vectors as input).
-
- R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
- [ r1'r2 r2'r2 ]
-
- We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
- chosen to be orthogonal and D is a diagonal matrix whose entries are
- the eigenvalues of R'R.
-
- Let LL' be the Cholesky decomposition of D. Such decomposition exists
- and is trivially computed: just take the square roots of the entries of
- D. Since L is diagonal, we have L = L'. Its inverse is also trivially
- computed - call it Linv.
-
- Now, define a 2x2 correction matrix C as follows:
-
- C = Q * Linv * Q'
-
- This matrix rotates the input vector, scales it by some amount, and
- then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
-
- We compute RC in order to correct the rotation vectors. We take its
- two columns as the corrected vectors.
-
- In order to show that the two columns of RC are orthonormal, we can
- show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
- expand the expression:
-
- (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
- Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
- Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
-
- I have provided below a closed formula to correct the rotation vectors.
-
- What C does to R is very interesting: it makes the singular values
- become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
- values of R are the square roots of the eigenvalues of R'R. Letting
- S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
- means that RC is equivalent to the correction "trick" using the SVD
- found in the computer vision literature (i.e., compute the SVD and
- return U V'). That "trick" is known to return the rotation matrix that
- minimizes the Frobenius norm of the difference between the input and
- the output. Consequently, the technique I have just presented is also
- optimal in that sense!
-
- By the way, the input matrix R does not need to be 3x2.
-
- */
- // compute the entries of R'R
- const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
- const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
- const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
- // compute the two real eigenvalues of R'R
- const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
- const sqrt = Math.sqrt(delta); // delta >= 0 always
- const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
- const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
- // compute two unit eigenvectors qi = (xi,yi) of R'R
- const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
- const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
- const y1 = x1 / alpha1;
- const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
- const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
- const y2 = x2 / alpha2;
- // compute the Cholesky decomposition LL' of the diagonal matrix D
- // whose entries are the two eigenvalues of R'R and then invert L
- const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
- const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
- // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
- // is orthogonal and Linv is computed as above
- const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
- const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
- const C = Q.times(Linv).times(Qt);
- // correct the rotation vectors r1 and r2 using C
- const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
- return speedy_vision_default().Matrix(R.times(C)).read();
- }
- /**
- * Compute a refined translation vector
- * @param normalizedHomography ideal pinhole K = I
- * @param rot rotation vectors [ r1 | r2 ] in column-major format
- * @param t0 initial estimate for the translation vector
- * @returns 3x1 translation vector in column-major format
- */
- _refineTranslation(normalizedHomography, rot, t0) {
- /*
-
- Given a normalized homography H, the rotation vectors r1, r2, and a
- translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
- scale factor s.
-
- If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
- [ r1 | r2 | t ] u is parallel to H u, which means that their cross
- product is zero:
-
- [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
-
- The following code finds an optimal translation vector t based on the
- above observation. H, r1, r2 are known.
-
- */
- const B = TRANSLATION_REFINEMENT_BUFFERS;
- const n = TRANSLATION_REFINEMENT_SAMPLES;
- const n3 = TRANSLATION_REFINEMENT_SAMPLES_3X;
- Utils.assert(B.x.length === n);
- const h = normalizedHomography.read();
- const h11 = h[0], h12 = h[3], h13 = h[6];
- const h21 = h[1], h22 = h[4], h23 = h[7];
- const h31 = h[2], h32 = h[5], h33 = h[8];
- const r11 = rot[0], r12 = rot[3];
- const r21 = rot[1], r22 = rot[4];
- const r31 = rot[2], r32 = rot[5];
- // get sample points (xi, yi), 0 <= i < n
- const x = B.x, y = B.y;
- // set auxiliary values: ai = H [ xi yi 1 ]'
- const a1 = B.a1, a2 = B.a2, a3 = B.a3;
- for (let i = 0; i < n; i++) {
- a1[i] = x[i] * h11 + y[i] * h12 + h13;
- a2[i] = x[i] * h21 + y[i] * h22 + h23;
- a3[i] = x[i] * h31 + y[i] * h32 + h33;
- }
- // solve M t = v for t; M: 3n x 3, v: 3n x 1, t: 3 x 1 (linear least squares)
- const m = B.m, v = B.v;
- for (let i = 0, k = 0; k < n; i += 3, k++) {
- m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
- m[i + n3] = -(m[i + 1] = a3[k]);
- m[i + 2] = -(m[i + n3 + n3] = a2[k]);
- m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
- v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
- v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
- v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
- }
- /*
- // this works, but I want more lightweight
- const M = Speedy.Matrix(n3, 3, m);
- const v_ = Speedy.Matrix(n3, 1, v);
- return Speedy.Matrix(M.ldiv(v_)).read();
- */
- /*
-
- Gradient descent with optimal step size / learning rate
- -------------------------------------------------------
-
- Let's find the column-vector x that minimizes the error function
- E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
- least squares. We want to find x easily, QUICKLY and iteratively.
-
- The update rule of gradient descent is set to:
-
- x := x - w * grad(E)
-
- where w is the learning rate and grad(E) is the gradient of E(x):
-
- grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
-
- Let's adjust w to make x "converge quickly". Define function S(w) as:
-
- S(w) = x - w * grad(E) (step)
-
- and another function F(w) as:
-
- F(w) = E(S(w))
-
- which is the error of the step. We minimize F by setting its derivative
- to zero:
-
- 0 = dF = dF dS
- dw dS dw
-
- What follows is a fair amount of algebra. Do the math and you'll find
- the following optimal update rule:
-
- (c'c)
- x := x - --------- c
- (Ac)'(Ac)
-
- where c = A'r = A'(Ax - b)
-
- */
- // initial guess
- const t = B.t;
- t[0] = t0[0];
- t[1] = t0[1];
- t[2] = t0[2];
- // gradient descent: super lightweight implementation
- const r = B.r, c = B.c, Mc = B.Mc;
- for (let it = 0; it < TRANSLATION_REFINEMENT_ITERATIONS; it++) {
- // compute residual r = Mt - v
- for (let i = 0; i < n3; i++) {
- r[i] = 0;
- for (let j = 0; j < 3; j++)
- r[i] += m[j * n3 + i] * t[j];
- r[i] -= v[i];
- }
- // compute c = M'r
- for (let i = 0; i < 3; i++) {
- c[i] = 0;
- for (let j = 0; j < n3; j++)
- c[i] += m[i * n3 + j] * r[j];
- }
- // compute Mc
- for (let i = 0; i < n3; i++) {
- Mc[i] = 0;
- for (let j = 0; j < 3; j++)
- Mc[i] += m[j * n3 + i] * c[j];
- }
- // compute num = c'c and den = (Mc)'(Mc)
- let num = 0, den = 0;
- for (let i = 0; i < 3; i++)
- num += c[i] * c[i];
- for (let i = 0; i < n3; i++)
- den += Mc[i] * Mc[i];
- // compute num / den
- const frc = num / den;
- if (Number.isNaN(frc))
- break;
- // iterate: t = t - (num / den) * c
- for (let i = 0; i < 3; i++)
- t[i] -= frc * c[i];
- }
- //console.log("OLD t:\n\n",t0.join('\n'));
- //console.log("new t:\n\n",t.join('\n'));
- // done!
- return t;
- }
- /**
- * Apply a smoothing filter to the partial pose
- * @param partialPose 3x3 [ r1 | r2 | t ]
- * @returns filtered partial pose
- */
- _filterPartialPose(partialPose) {
- const avg = new Array(9).fill(0);
- const entries = partialPose.read();
- const rotationBlock = entries.slice(0, 6);
- const translationBlock = entries.slice(6, 9);
- // how many samples should we store, at most?
- const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
- const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
- const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
- // is it a valid partial pose?
- if (!Number.isNaN(entries[0])) {
- // store samples
- this._partialRotationBuffer.unshift(rotationBlock);
- if (this._partialRotationBuffer.length > N)
- this._partialRotationBuffer.length = N;
- this._translationBuffer.unshift(translationBlock);
- if (this._translationBuffer.length > M)
- this._translationBuffer.length = M;
- }
- else if (this._partialRotationBuffer.length == 0) {
- // invalid pose, no samples
- return speedy_vision_default().Matrix.Eye(3);
- }
- // average *nearby* rotations
- const n = this._partialRotationBuffer.length;
- for (let i = 0; i < n; i++) {
- const r = this._partialRotationBuffer[i];
- for (let j = 0; j < 6; j++)
- avg[j] += r[j] / n;
- }
- const r = this._refineRotation(avg[0], avg[1], avg[2], avg[3], avg[4], avg[5]);
- // average translations
- const m = this._translationBuffer.length;
- for (let i = 0; i < m; i++) {
- const t = this._translationBuffer[i];
- for (let j = 0; j < 3; j++)
- avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
- //avg[6 + j] += t[j] / m;
- }
- const t = [avg[6], avg[7], avg[8]];
- // done!
- return speedy_vision_default().Matrix(3, 3, r.concat(t));
- }
- /**
- * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
- * @param partialPose
- * @returns 3x4 matrix
- */
- _estimateFullPose(partialPose) {
- const p = partialPose.read();
- const r11 = p[0], r12 = p[3], t1 = p[6];
- const r21 = p[1], r22 = p[4], t2 = p[7];
- const r31 = p[2], r32 = p[5], t3 = p[8];
- // r3 = +- ( r1 x r2 )
- let r13 = r21 * r32 - r31 * r22;
- let r23 = r31 * r12 - r11 * r32;
- let r33 = r11 * r22 - r21 * r12;
- // let's make sure that det R = +1 (keep the orientation)
- const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
- if (det < 0) {
- r13 = -r13;
- r23 = -r23;
- r33 = -r33;
- }
- // done!
- return speedy_vision_default().Matrix(3, 4, [
- r11, r21, r31,
- r12, r22, r32,
- r13, r23, r33,
- t1, t2, t3,
- ]);
- }
- /**
- * Estimate the pose [ R | t ] given a homography in AR screen space
- * @param homography must be valid
- * @param f focal length
- * @returns 3x4 matrix
- */
- _estimatePose(homography, f = this._intrinsics[FY]) {
- const normalizedHomography = this._normalizeHomography(homography, f);
- const partialPose = speedy_vision_default().Matrix.Eye(3);
- // we want the estimated partial pose [ r1 | r2 | t ] to be as close
- // as possible to the normalized homography, up to a scale factor;
- // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
- // it won't be a perfect equality due to noise in the homography
- const residual = speedy_vision_default().Matrix(normalizedHomography);
- for (let k = 0; k < POSE_ITERATIONS; k++) {
- // incrementally improve the partial pose
- const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
- partialPose.setToSync(rt.times(partialPose));
- residual.setToSync(residual.times(rt.inverse()));
- //console.log("residual",residual.toString());
- }
- //console.log('-----------');
- /*
- // test
- const result = Speedy.Matrix.Zeros(3);
- result.setToSync(partialPose.times(normalizedHomography.inverse()));
- const m11 = result.at(0,0);
- result.setToSync(result.times(1/m11));
- console.log("Pose * NORMALIZED HOM^-1", result.toString());
- */
- /*
- const rt = partialPose.read();
- const r = rt.slice(0, 6);
- const t = this._refineTranslation(normalizedHomography, r, rt.slice(6, 9));
- const refinedPartialPose = Speedy.Matrix(3, 3, r.concat(t));
- const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
- */
- // filter the partial pose
- const filteredPartialPose = this._filterPartialPose(partialPose);
- // estimate the full pose
- return this._estimateFullPose(filteredPartialPose);
- }
- /**
- * Store an estimated pose
- * @param pose 3x4 matrix
- */
- _storePose(pose) {
- this._extrinsics = pose.read();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/pose.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * pose.ts
- * A pose represents a position and an orientation in a 3D space
- */
- /**
- * A pose represents a position and an orientation in a 3D space
- * (and sometimes a scale, too...)
- */
- class Pose {
- /**
- * Constructor
- * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
- */
- constructor(transform) {
- this._transform = transform;
- }
- /**
- * A transform describing the position and the orientation
- * of the pose relative to the 3D space to which it belongs
- */
- get transform() {
- return this._transform;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/transform.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * transform.ts
- * 3D geometrical transforms
- */
-
-
- /**
- * A 3D transformation
- */
- class BaseTransform {
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- if (matrix.rows != 4 || matrix.columns != 4)
- throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
- this._matrix = matrix;
- }
- /**
- * The 4x4 transformation matrix (read-only)
- */
- get matrix() {
- return this._matrix;
- }
- }
- /**
- * An invertible 3D transformation
- */
- class InvertibleTransform extends BaseTransform {
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes an invertible transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
- return new InvertibleTransform(inverseMatrix);
- }
- }
- /**
- * A 3D transformation described by translation, rotation and scale
- */
- class StandardTransform extends InvertibleTransform {
- // TODO: position, rotation and scale attributes
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes a standard transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- /*
-
- The inverse of a 4x4 standard transform T * R * S...
-
- [ RS t ] is [ ZR' -ZR't ]
- [ 0' 1 ] [ 0' 1 ]
-
- where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
-
- */
- return super.inverse;
- }
- }
- /**
- * A 3D transformation described by position and orientation
- */
- class RigidTransform extends StandardTransform {
- // TODO: position and rotation attributes (need to decompose the matrix)
- /**
- * Constructor
- * @param matrix a 4x4 matrix
- */
- constructor(matrix) {
- // WARNING: we do not check if the matrix actually encodes a rigid transform!
- super(matrix);
- }
- /**
- * The inverse of the transform
- */
- get inverse() {
- /*
-
- The inverse of a 4x4 rigid transform
-
- [ R t ] is [ R' -R't ]
- [ 0' 1 ] [ 0' 1 ]
-
- where R is 3x3, t is 3x1 and 0' is 1x3
-
- */
- const m = this._matrix.read();
- if (m[15] == 0) // error? abs()??
- throw new IllegalOperationError('Not a rigid transform');
- const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
- const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
- const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
- const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
- const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
- const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
- const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
- const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
- const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
- r11, r12, r13, 0,
- r21, r22, r23, 0,
- r31, r32, r33, 0,
- -rt1, -rt2, -rt3, 1
- ]);
- return new RigidTransform(inverseMatrix);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * viewer-pose.ts
- * The pose of a virtual camera in 3D world space at a moment in time
- */
-
-
-
- /**
- * The pose of a virtual camera in 3D world space at a moment in time
- */
- class ViewerPose extends Pose {
- /**
- * Constructor
- * @param camera camera model
- */
- constructor(camera) {
- // compute the view matrix and its inverse in AR screen space
- const viewMatrix = ViewerPose._computeViewMatrix(camera);
- const inverseTransform = new RigidTransform(viewMatrix);
- super(inverseTransform.inverse);
- this._viewMatrix = viewMatrix;
- }
- /**
- * This 4x4 matrix moves 3D points from world space to viewer space. We
- * assume that the camera is looking in the direction of the negative
- * z-axis (WebGL-friendly)
- */
- get viewMatrix() {
- return this._viewMatrix;
- }
- /**
- * Compute the view matrix in AR screen space, measured in pixels
- * @param camera
- * @returns a 4x4 matrix describing a rotation and a translation
- */
- static _computeViewMatrix(camera) {
- /*
-
- // this is the view matrix in AR screen space, measured in pixels
- // we augment the extrinsics matrix, making it 4x4 by adding a
- // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
- const V = Speedy.Matrix(4, 4, [
- E[0], E[1], E[2], 0,
- E[3], E[4], E[5], 0,
- E[6], E[7], E[8], 0,
- E[9], E[10], E[11], 1
- ]);
-
- // we premultiply V by F, which performs a rotation around the
- // x-axis by 180 degrees, so that we get the 3D objects in front
- // of the camera pointing in the direction of the negative z-axis
- const F = Speedy.Matrix(4, 4, [
- 1, 0, 0, 0,
- 0,-1, 0, 0,
- 0, 0,-1, 0,
- 0, 0, 0, 1
- ]);
-
- Matrix F * V is matrix V with the second and third rows negated
-
- */
- const E = camera.extrinsics;
- return speedy_vision_default().Matrix(4, 4, [
- E[0], -E[1], -E[2], 0,
- E[3], -E[4], -E[5], 0,
- E[6], -E[7], -E[8], 0,
- E[9], -E[10], -E[11], 1
- ]);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/view.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * view.ts
- * A view of the 3D world at a moment in time,
- * featuring the means to project points into clip space
- */
-
-
-
- /** Default distance in pixels of the near plane to the optical center of the camera */
- const DEFAULT_NEAR = 1;
- /** Default distance in pixels of the far plane to the optical center of the camera */
- const DEFAULT_FAR = 20000;
- /**
- * A PerspectiveView is a View defining a symmetric frustum around the z-axis
- * (perspective projection)
- */
- class PerspectiveView {
- /**
- * Constructor
- * @param camera camera model
- * @param near distance of the near plane
- * @param far distance of the far plane
- */
- constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
- const intrinsics = camera.intrinsics;
- const screenSize = camera.screenSize;
- this._near = Math.max(0, +near);
- this._far = Math.max(0, +far);
- if (this._near >= this._far)
- throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
- this._aspect = screenSize.width / screenSize.height;
- this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
- this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
- }
- /**
- * A 4x4 projection matrix for WebGL
- */
- get projectionMatrix() {
- return this._projectionMatrix;
- }
- /**
- * Aspect ratio of the frustum
- */
- get aspect() {
- return this._aspect;
- }
- /**
- * Vertical field-of-view of the frustum, measured in radians
- */
- get fovy() {
- return 2 * Math.atan(this._tanOfHalfFovy);
- }
- /**
- * Distance of the near plane
- */
- get near() {
- return this._near;
- }
- /**
- * Distance of the far plane
- */
- get far() {
- return this._far;
- }
- /**
- * Compute a perspective projection matrix for WebGL
- * @param K camera intrinsics
- * @param near distance of the near plane
- * @param far distance of the far plane
- */
- static _computeProjectionMatrix(K, near, far) {
- // we assume that the principal point is at the center of the image
- const top = near * (K[V0] / K[FY]);
- const right = near * (K[U0] / K[FX]);
- const bottom = -top, left = -right; // symmetric frustum
- // a derivation of this projection matrix can be found at
- // https://www.songho.ca/opengl/gl_projectionmatrix.html
- // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
- return speedy_vision_default().Matrix(4, 4, [
- 2 * near / (right - left), 0, 0, 0,
- 0, 2 * near / (top - bottom), 0, 0,
- (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
- 0, 0, -2 * far * near / (far - near), 0
- ]);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * view.ts
- * A viewer represents a virtual camera in 3D world space
- */
-
-
-
-
-
- /**
- * A viewer represents a virtual camera in 3D world space
- */
- class Viewer {
- /**
- * Constructor
- * @param camera camera model
- */
- constructor(camera) {
- this._pose = new ViewerPose(camera);
- this._views = [new PerspectiveView(camera)];
- }
- /**
- * The pose of this viewer
- */
- get pose() {
- return this._pose;
- }
- /**
- * The view of this viewer (only for monoscopic rendering)
- */
- get view() {
- /*
- if(this._views.length > 1)
- throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
- */
- return this._views[0];
- }
- /**
- * The views of this viewer
- */
- /*
- get views(): View[]
- {
- return this._views.concat([]);
- }
- */
- /**
- * Convert a pose from world space to viewer space
- * @param pose a pose in world space
- * @returns a pose in viewer space
- */
- convertToViewerSpace(pose) {
- const modelMatrix = pose.transform.matrix;
- const viewMatrix = this._pose.viewMatrix;
- const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
- const transform = new StandardTransform(modelViewMatrix);
- return new Pose(transform);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * tracking.ts
- * Tracking state of the Image Tracker
- */
-
-
-
-
-
-
-
-
-
-
-
- /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
- const USE_TURBO = true;
- /** Number of PBOs; meaningful only when using turbo */
- const NUMBER_OF_PBOS = 2;
- /** Frame skipping; meaningful only when using turbo */
- const TURBO_SKIP = 2;
- /**
- * The tracking state of the Image Tracker tracks
- * keypoints of the image target and updates the
- * rectification matrix
- */
- class ImageTrackerTrackingState extends ImageTrackerState {
- /**
- * Constructor
- * @param imageTracker
- */
- constructor(imageTracker) {
- super('tracking', imageTracker);
- this._referenceImage = null;
- this._warpHomography = speedy_vision_default().Matrix.Eye(3);
- this._poseHomography = speedy_vision_default().Matrix.Eye(3);
- this._initialHomography = speedy_vision_default().Matrix.Eye(3);
- this._initialKeypoints = [];
- this._counter = 0;
- this._camera = new CameraModel();
- this._predictedKeypoints = [];
- this._lastPipelineOutput = { keypoints: [] };
- this._pipelineCounter = 0;
- this._lastOutput = {};
- this._lostCounter = 0;
- // we need at least 4 correspondences of points to compute a homography matrix
- Utils.assert(TRACK_MIN_MATCHES >= 4);
- }
- /**
- * Called as soon as this becomes the active state, just before update() runs for the first time
- * @param settings
- */
- onEnterState(settings) {
- const homography = settings.homography;
- const referenceImage = settings.referenceImage;
- const templateKeypoints = settings.templateKeypoints;
- const keypointPortalSink = settings.keypointPortalSink;
- const screenSize = settings.screenSize; // this.screenSize is not yet set
- const keypointPortalSource = this._pipeline.node('keypointPortalSource');
- // this shouldn't happen
- if (!referenceImage)
- throw new IllegalOperationError(`Can't track a null reference image`);
- // set attributes
- this._referenceImage = referenceImage;
- this._warpHomography = speedy_vision_default().Matrix(homography);
- this._poseHomography = speedy_vision_default().Matrix(homography);
- this._initialHomography = speedy_vision_default().Matrix(homography);
- this._initialKeypoints = templateKeypoints;
- this._counter = 0;
- this._predictedKeypoints = [];
- this._lastPipelineOutput = { keypoints: [] };
- this._pipelineCounter = 0;
- this._lastOutput = {};
- this._lostCounter = 0;
- // setup portals
- keypointPortalSource.source = keypointPortalSink;
- // setup camera
- this._camera.init(screenSize);
- // emit event
- const ev = new ImageTrackerEvent('targetfound', referenceImage);
- this._imageTracker.dispatchEvent(ev);
- // log
- Utils.log(`Tracking image "${referenceImage.name}"...`);
- }
- /**
- * Called when leaving the state
- */
- onLeaveState() {
- const referenceImage = this._referenceImage;
- // release the camera
- this._camera.release();
- // emit event
- const ev = new ImageTrackerEvent('targetlost', referenceImage);
- this._imageTracker.dispatchEvent(ev);
- }
- /**
- * Called just before the GPU processing
- * @returns promise
- */
- _beforeUpdate() {
- const imageRectifier = this._pipeline.node('imageRectifier');
- const borderClipper = this._pipeline.node('borderClipper');
- const keypointRectifier = this._pipeline.node('keypointRectifier');
- const screenSize = this.screenSize;
- /*
- // pause media (test)
- const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
- const media = source.media as SpeedyMedia;
- (media.source as HTMLVideoElement).pause();
- */
- // clip keypoints from the borders of the target image
- borderClipper.imageSize = screenSize;
- borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
- // rectify the image
- return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
- imageRectifier.transform = warp;
- });
- }
- /**
- * GPU processing
- * @returns promise with the pipeline results
- */
- _gpuUpdate() {
- //return super._gpuUpdate();
- // No turbo?
- if (!USE_TURBO || Settings.powerPreference == 'low-power')
- return super._gpuUpdate();
- // When using turbo, we reduce the GPU usage by skipping every other frame
- const counter = this._pipelineCounter;
- this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
- // Skip frame
- if (counter != 0) {
- if (this._lastPipelineOutput.keypoints !== undefined) {
- this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
- }
- else
- this._predictedKeypoints.length = 0;
- this._lastPipelineOutput.keypoints = this._predictedKeypoints;
- return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
- }
- // Run the pipeline and store the results
- return super._gpuUpdate().then(results => {
- this._lastPipelineOutput = results;
- return results;
- });
- }
- /**
- * Post processing that takes place just after the GPU processing
- * @param result pipeline results
- * @returns state output
- */
- _afterUpdate(result) {
- const imageRectifier = this._pipeline.node('imageRectifier');
- const keypoints = result.keypoints;
- const image = result.image;
- const referenceImage = this._referenceImage;
- // find the best keypoint matches
- return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
- // find motion models
- return speedy_vision_default().Promise.all([
- this._findAffineMotion(matches),
- this._findPerspectiveMotion(matches)
- ]);
- }).then(([affineMotion, perspectiveMotion]) => {
- const lowPower = (Settings.powerPreference == 'low-power');
- const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
- // update warp homography
- const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
- const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
- if (!USE_TURBO || this._counter % delay == remainder)
- this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
- // update pose homography
- if (!frozen)
- this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
- // update counter
- this._counter = (this._counter + 1) % delay;
- // update the camera
- if (!frozen)
- return this._camera.update(this._poseHomography, this.screenSize);
- else
- return this._camera.matrix;
- }).then(_ => {
- // find the inverse of the rectification matrix
- const rectificationMatrix = imageRectifier.transform;
- const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
- // move keypoints from rectified space back to image space
- const n = keypoints.length;
- const coords = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- coords[j] = keypoints[i].position.x;
- coords[j + 1] = keypoints[i].position.y;
- }
- return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
- /*
- // test image center
- const coords2: number[] = new Array(2 * n);
- for(let i = 0, j = 0; i < n; i++, j += 2) {
- coords2[j] = this._imageTracker.screenSize.width / 2;
- coords2[j+1] = this._imageTracker.screenSize.height / 2;
- if(i % 2 == 0) {
- coords2[j] = this._imageTracker.screenSize.width / 4;
- coords2[j+1] = this._imageTracker.screenSize.height / 4;
- }
- }
-
- return Speedy.Matrix.applyPerspectiveTransform(
- Speedy.Matrix.Zeros(2, n),
- Speedy.Matrix(2, n, coords2),
- this._poseHomography
- //this._warpHomography
- );
- */
- }).then(mat => {
- /*
-
- const n = keypoints.length;
- const coords = mat.read();
-
- // ** this will interfere with the calculations when frame skipping is on **
-
- // get keypoints in image space
- for(let i = 0, j = 0; i < n; i++, j += 2) {
- keypoints[i].position.x = coords[j];
- keypoints[i].position.y = coords[j+1];
- }
-
- */
- // find a polyline surrounding the target
- return this._findPolyline(this._poseHomography, this.screenSize);
- //return this._findPolyline(this._warpHomography, this.screenSize);
- }).then(polyline => {
- // we let the target object be at the origin of the world space
- // (identity transform). We also perform a change of coordinates,
- // so that we move out from pixel space and into normalized space
- const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
- const transform = new StandardTransform(modelMatrix);
- const pose = new Pose(transform);
- // given the current state of the camera model, we get a viewer
- // compatible with the pose of the target
- const viewer = new Viewer(this._camera);
- // the trackable object
- const trackable = {
- pose: pose,
- referenceImage: referenceImage
- };
- // the result generated by the image tracker
- const result = {
- tracker: this._imageTracker,
- trackables: [trackable],
- viewer: viewer
- };
- // build and save the output
- this._lastOutput = {
- exports: result,
- cameraMatrix: this._camera.matrix,
- homography: this._warpHomography,
- //keypoints: keypoints,
- screenSize: this.screenSize,
- image: image,
- polyline: polyline,
- };
- // we have successfully tracked the target in this frame
- this._lostCounter = 0;
- // done!
- return {
- nextState: 'tracking',
- trackerOutput: this._lastOutput
- };
- }).catch(err => {
- // give some tolerance to tracking errors
- if (err instanceof TrackingError) {
- if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
- //console.log("ABSORB",this._lostCounter,err.toString())
- // absorb the error
- return {
- nextState: 'tracking',
- trackerOutput: this._lastOutput
- };
- }
- }
- // lost tracking
- Utils.warning(`The target has been lost! ${err.toString()}`);
- this._camera.reset();
- // go back to the scanning state
- return {
- nextState: 'scanning',
- trackerOutput: {
- image: image,
- screenSize: this.screenSize,
- },
- };
- });
- }
- /**
- * Find quality matches between two sets of keypoints
- * @param currKeypoints keypoints of the current frame
- * @param prevKeypoints keypoints of the previous frame
- * @returns quality matches
- */
- _findQualityMatches(currKeypoints, prevKeypoints) {
- const result = [[], []];
- const n = currKeypoints.length;
- for (let i = 0; i < n; i++) {
- const currKeypoint = currKeypoints[i];
- if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
- const d1 = currKeypoint.matches[0].distance;
- const d2 = currKeypoint.matches[1].distance;
- if (d1 <= TRACK_MATCH_RATIO * d2) {
- const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
- result[0].push(currKeypoint);
- result[1].push(prevKeypoint);
- }
- }
- }
- return result;
- }
- /**
- * Find a better spatial distribution of the input matches
- * @param matches quality matches
- * @returns refined quality matches
- */
- _refineQualityMatches(matches) {
- const currKeypoints = matches[0];
- const prevKeypoints = matches[1];
- // find a better spatial distribution of the keypoints
- const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
- const n = indices.length; // number of refined matches
- // assemble output
- const result = [new Array(n), new Array(n)];
- for (let i = 0; i < n; i++) {
- result[0][i] = currKeypoints[indices[i]];
- result[1][i] = prevKeypoints[indices[i]];
- }
- // done!
- return result;
- }
- /**
- * Spatially distribute keypoints over a grid
- * @param keypoints keypoints to be distributed
- * @param gridCells number of grid elements in each axis
- * @returns a list of indices of keypoints[]
- */
- _distributeKeypoints(keypoints, gridCells) {
- // get the coordinates of the keypoints
- const n = keypoints.length;
- const points = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- points[j] = keypoints[i].x;
- points[j + 1] = keypoints[i].y;
- }
- // normalize the coordinates to [0,1] x [0,1]
- this._normalizePoints(points);
- // distribute the keypoints over a grid
- const numberOfCells = gridCells * gridCells;
- const grid = (new Array(numberOfCells)).fill(-1);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- // find the grid location of the i-th point
- const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
- const yg = Math.floor(points[j + 1] * gridCells);
- // store the index of the i-th point in the grid
- grid[yg * gridCells + xg] = i;
- }
- // retrieve points of the grid
- const indices = [];
- for (let g = 0; g < numberOfCells; g++) {
- if (grid[g] >= 0) {
- const i = grid[g];
- indices.push(i);
- }
- }
- // done!
- return indices;
- }
- /**
- * Normalize points to [0,1)^2
- * @param points 2 x n matrix of points in column-major format
- * @returns points
- */
- _normalizePoints(points) {
- Utils.assert(points.length % 2 == 0);
- const n = points.length / 2;
- if (n == 0)
- return points;
- let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
- let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- const x = points[j], y = points[j + 1];
- xmin = x < xmin ? x : xmin;
- ymin = y < ymin ? y : ymin;
- xmax = x > xmax ? x : xmax;
- ymax = y > ymax ? y : ymax;
- }
- const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
- const ylen = ymax - ymin + 1;
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- points[j] = (points[j] - xmin) / xlen;
- points[j + 1] = (points[j + 1] - ymin) / ylen;
- }
- return points;
- }
- /**
- * Find a matrix with the coordinates of quality matches
- * @param matches n quality matches
- * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
- */
- _findMatrixOfMatches(matches) {
- const n = matches[0].length;
- Utils.assert(n > 0);
- // sets of keypoints
- const currKeypoints = matches[0];
- const prevKeypoints = matches[1];
- // get the coordinates of the keypoints of the set of refined matches
- const src = new Array(2 * n);
- const dst = new Array(2 * n);
- for (let i = 0, j = 0; i < n; i++, j += 2) {
- src[j] = prevKeypoints[i].x;
- src[j + 1] = prevKeypoints[i].y;
- dst[j] = currKeypoints[i].x;
- dst[j + 1] = currKeypoints[i].y;
- }
- // assemble the matrix
- return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
- }
- /**
- * Preprocess keypoint matches
- * @param currKeypoints keypoints of the current frame
- * @param prevKeypoints keypoints of the previous frame
- * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
- * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
- */
- _preprocessMatches(currKeypoints, prevKeypoints) {
- // find and refine quality matches
- const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
- const refinedMatches = this._refineQualityMatches(qualityMatches);
- // not enough matches?
- const n = refinedMatches[0].length;
- if (n < TRACK_MIN_MATCHES)
- return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
- // find matrix of matches
- const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
- // warp matrix of matches
- const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
- return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
- }
- /**
- * Find an affine motion model of the target image
- * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
- * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
- */
- _findAffineMotion(preprocessedMatches) {
- const model = speedy_vision_default().Matrix.Eye(3);
- const n = preprocessedMatches.columns / 2; // number of preprocessed matches
- // find motion model
- return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512,
- bundleSize: 128,
- }).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a perspective motion model of the target image
- * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
- * @returns a promise that resolves to a 3x3 perspective motion model
- */
- _findPerspectiveMotion(preprocessedMatches) {
- /*
-
- We can probably get more accurate motion estimates if we
- work in 3D rather than in 2D. We're currently estimating
- an affine transform in image space. What if we projected
- the keypoints into world space, estimated the camera motion
- (rotation and translation) that best describes the observed
- observed motion of the keypoints, and then projected things
- back to image space? Need to figure this out; we'll get a
- homography matrix.
-
- Note: keypoints are in rectified image space.
-
- Note: work with a 6 DoF perspective transform instead of 8.
-
- */
- const model = speedy_vision_default().Matrix.Zeros(3);
- const n = preprocessedMatches.columns / 2; // number of preprocessed matches
- // find motion model
- return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
- method: 'pransac',
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
- numberOfHypotheses: 512 * 2,
- bundleSize: 128 * 4, //*4
- }).then(_ => {
- // validate the model
- const a00 = model.at(0, 0);
- if (Number.isNaN(a00))
- throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
- // done!
- return model;
- });
- }
- /**
- * Find a rectification matrix to be applied to the target image
- * @param homography maps a reference image to the AR screen
- * @param media target
- * @param screenSize AR screen
- * @returns promise that resolves to a rectification matrix
- */
- _findImageWarp(homography, screenSize) {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const mat = speedy_vision_default().Matrix.Zeros(3);
- return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
- }
- /**
- * Find a warp to be applied to the keypoints
- * @returns affine transform
- */
- _findKeypointWarp() {
- const referenceImage = this._referenceImage;
- const media = this._imageTracker.database._findMedia(referenceImage.name);
- const screenSize = this.screenSize;
- const sw = screenSize.width, sh = screenSize.height;
- const mat = speedy_vision_default().Matrix.Eye(3, 3);
- // no rotation is needed
- if (!this._mustRotateWarpedImage(media, screenSize))
- return speedy_vision_default().Promise.resolve(mat);
- // rotate by 90 degrees clockwise and scale
- return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
- }
- /**
- * Predict the keypoints without actually looking at the image
- * @param curr keypoints at time t (will modify the contents)
- * @param initial keypoints at time t-1 (not just t = 0)
- * @returns keypoints at time t+1
- */
- _predictKeypoints(curr, initial) {
- // the target image is likely to be moving roughly in
- // the same manner as it was in the previous frame
- const next = [];
- const n = curr.length;
- for (let i = 0; i < n; i++) {
- const cur = curr[i];
- if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
- continue;
- /*
- else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
- continue;
- */
- const ini = initial[cur.matches[0].index];
- const dx = cur.position.x - ini.position.x;
- const dy = cur.position.y - ini.position.y;
- // a better mathematical model is needed
- const alpha = 0.8; //0.2;
- cur.position.x = ini.position.x + alpha * dx;
- cur.position.y = ini.position.y + alpha * dy;
- next.push(cur);
- }
- // done!
- return next;
- }
- /**
- * Create & setup the pipeline
- * @returns pipeline
- */
- _createPipeline() {
- const pipeline = speedy_vision_default().Pipeline();
- const source = speedy_vision_default().Image.Source('source');
- const screen = speedy_vision_default().Transform.Resize('screen');
- const greyscale = speedy_vision_default().Filter.Greyscale();
- const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
- const nightvision = speedy_vision_default().Filter.Nightvision();
- const nightvisionMux = speedy_vision_default().Image.Multiplexer();
- const blur = speedy_vision_default().Filter.GaussianBlur();
- const detector = speedy_vision_default().Keypoint.Detector.Harris();
- const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
- const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
- const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
- const denoiser = speedy_vision_default().Filter.GaussianBlur();
- const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
- const clipper = speedy_vision_default().Keypoint.Clipper();
- const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
- const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
- const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
- const imageSink = speedy_vision_default().Image.Sink('image');
- source.media = null;
- screen.size = speedy_vision_default().Size(0, 0);
- imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- nightvision.gain = NIGHTVISION_GAIN;
- nightvision.offset = NIGHTVISION_OFFSET;
- nightvision.decay = NIGHTVISION_DECAY;
- nightvision.quality = NIGHTVISION_QUALITY;
- nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
- blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
- blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
- denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
- denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
- detector.quality = TRACK_HARRIS_QUALITY;
- detector.capacity = TRACK_DETECTOR_CAPACITY;
- subpixel.method = SUBPIXEL_METHOD;
- clipper.size = TRACK_MAX_KEYPOINTS;
- borderClipper.imageSize = screen.size;
- borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
- keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
- matcher.k = 2;
- keypointPortalSource.source = null;
- keypointSink.turbo = USE_TURBO;
- // prepare input
- source.output().connectTo(screen.input());
- screen.output().connectTo(greyscale.input());
- // preprocess images
- greyscale.output().connectTo(imageRectifier.input());
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
- imageRectifier.output().connectTo(nightvision.input());
- nightvision.output().connectTo(nightvisionMux.input('in1'));
- // keypoint detection & clipping
- nightvisionMux.output().connectTo(detector.input());
- detector.output().connectTo(borderClipper.input());
- borderClipper.output().connectTo(clipper.input());
- // keypoint refinement
- imageRectifier.output().connectTo(denoiser.input());
- denoiser.output().connectTo(subpixel.input('image'));
- clipper.output().connectTo(subpixel.input('keypoints'));
- // keypoint description
- imageRectifier.output().connectTo(blur.input());
- blur.output().connectTo(descriptor.input('image'));
- subpixel.output().connectTo(descriptor.input('keypoints'));
- // keypoint matching
- keypointPortalSource.output().connectTo(matcher.input('database'));
- descriptor.output().connectTo(matcher.input('keypoints'));
- // prepare output
- descriptor.output().connectTo(keypointRectifier.input());
- //preMatcher.output().connectTo(keypointRectifier.input());
- keypointRectifier.output().connectTo(keypointSink.input());
- matcher.output().connectTo(keypointSink.input('matches'));
- //imageRectifier.output().connectTo(imageSink.input());
- // done!
- pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
- return pipeline;
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * image-tracker.ts
- * Image Tracker
- */
-
-
-
-
-
-
-
-
-
-
-
- /** A helper */
- const formatSize = (size) => `${size.width}x${size.height}`;
- /**
- * The ImageTracker tracks an image (one at a time)
- */
- class ImageTracker extends AREventTarget {
- /**
- * Constructor
- */
- constructor() {
- super();
- // the states
- this._state = {
- 'initial': new ImageTrackerInitialState(this),
- 'training': new ImageTrackerTrainingState(this),
- 'scanning': new ImageTrackerScanningState(this),
- 'pre-tracking': new ImageTrackerPreTrackingState(this),
- 'tracking': new ImageTrackerTrackingState(this),
- };
- // initial setup
- this._session = null;
- this._activeStateName = 'initial';
- this._lastOutput = {};
- this._database = new ReferenceImageDatabase();
- // user settings
- this._resolution = DEFAULT_TRACKING_RESOLUTION;
- }
- /**
- * The type of the tracker
- */
- get type() {
- return 'image-tracker';
- }
- /**
- * Current state name
- */
- get state() {
- return this._activeStateName;
- }
- /**
- * Reference Image Database
- * Must be configured before training the tracker
- */
- get database() {
- return this._database;
- }
- /**
- * Resolution of the AR screen space
- */
- get resolution() {
- return this._resolution;
- }
- /**
- * Resolution of the AR screen space
- */
- set resolution(resolution) {
- this._resolution = resolution;
- }
- /**
- * Size of the AR screen space, in pixels
- * @internal
- */
- get screenSize() {
- return this._state[this._activeStateName].screenSize;
- }
- /**
- * Last emitted output
- * @internal
- */
- get _output() {
- return this._lastOutput;
- }
- /**
- * Stats related to this tracker
- * @internal
- */
- get _stats() {
- return `${formatSize(this.screenSize)} ${this.state}`;
- }
- /**
- * Initialize this tracker
- * @param session
- * @returns promise that resolves after the tracker has been initialized
- * @internal
- */
- _init(session) {
- // store the session
- this._session = session;
- // initialize states
- for (const state of Object.values(this._state))
- state.init();
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Release this tracker
- * @returns promise that resolves after the tracker has been released
- * @internal
- */
- _release() {
- // release states
- for (const state of Object.values(this._state))
- state.release();
- // unlink session
- this._session = null;
- // done!
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Update the tracker
- * @returns promise
- * @internal
- */
- _update() {
- // validate
- if (this._session == null)
- return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
- // compute the screen size for image processing purposes
- // note: this may change over time...!
- const media = this._session.media;
- const aspectRatio = media.width / media.height;
- const screenSize = Utils.resolution(this._resolution, aspectRatio);
- // run the active state
- const activeState = this._state[this._activeStateName];
- return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
- // update the output of the tracker
- this._lastOutput = trackerOutput;
- // need to change the state?
- if (this._activeStateName != nextState) {
- activeState.onLeaveState();
- this._activeStateName = nextState;
- this._state[nextState].onEnterState(nextStateSettings || {});
- }
- });
- }
- /**
- * Get reference image
- * @param keypointIndex -1 if not found
- * @returns reference image
- * @internal
- */
- _referenceImageOfKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceImageOfKeypoint(keypointIndex);
- }
- /**
- * Get reference image index
- * @param keypointIndex -1 if not found
- * @returns reference image index, or -1 if not found
- * @internal
- */
- _referenceImageIndexOfKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceImageIndexOfKeypoint(keypointIndex);
- }
- /**
- * Get a keypoint of the trained set
- * @param keypointIndex
- * @returns a keypoint
- * @internal
- */
- _referenceKeypoint(keypointIndex) {
- const training = this._state.training;
- return training.referenceKeypoint(keypointIndex);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * tracker-factory.ts
- * Tracker factory
- */
-
- /**
- * Tracker factory
- */
- class TrackerFactory {
- /**
- * Create an Image Tracker
- */
- static ImageTracker() {
- return new ImageTracker();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/video-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * video-source.ts
- * HTMLVideoElement-based source of data
- */
-
-
-
- /** A message to be displayed if a video can't autoplay and user interaction is required */
- const ALERT_MESSAGE = 'Tap on the screen to start';
- /** Whether or not we have displayed the ALERT_MESSAGE */
- let displayedAlertMessage = false;
- /**
- * HTMLVideoElement-based source of data
- */
- class VideoSource {
- /**
- * Constructor
- */
- constructor(video) {
- Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
- this._video = video;
- this._media = null;
- }
- /**
- * A type-identifier of the source of data
- * @internal
- */
- get _type() {
- return 'video';
- }
- /**
- * Get media
- * @internal
- */
- get _data() {
- if (this._media == null)
- throw new IllegalOperationError(`The media of the source of data isn't loaded`);
- return this._media;
- }
- /**
- * Stats related to this source of data
- * @internal
- */
- get _stats() {
- const media = this._media;
- if (media != null)
- return `${media.width}x${media.height} video`;
- else
- return 'uninitialized video';
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- Utils.log(`Initializing ${this._type} source...`);
- // prepare the video before loading the SpeedyMedia!
- return this._prepareVideo(this._video).then(video => {
- Utils.log('The video is prepared');
- return speedy_vision_default().load(video).then(media => {
- Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
- this._media = media;
- });
- });
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- if (this._media)
- this._media.release();
- this._media = null;
- return speedy_vision_default().Promise.resolve();
- }
- /**
- * Handle browser-specific quirks for <video> elements
- * @param video a video element
- * @returns a promise that resolves to the input video
- */
- _prepareVideo(video) {
- // WebKit <video> policies for iOS:
- // https://webkit.org/blog/6784/new-video-policies-for-ios/
- // required on iOS; nice to have in all browsers
- video.setAttribute('playsinline', '');
- // handle autoplay
- return this._handleAutoPlay(video).then(video => {
- // handle WebKit quirks
- if (Utils.isWebKit()) {
- // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
- // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
- if (video.hidden) {
- video.hidden = false;
- video.style.setProperty('opacity', '0');
- video.style.setProperty('position', 'fixed'); // make sure that it's visible on-screen
- video.style.setProperty('left', '0');
- video.style.setProperty('top', '0');
- //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
- //video.style.setProperty('visibility', 'hidden'); // doesn't work either
- }
- }
- // done
- return video;
- });
- }
- /**
- * Handle browser-specific quirks for videos marked with autoplay
- * @param video a <video> marked with autoplay
- * @returns a promise that resolves to the input video
- */
- _handleAutoPlay(video) {
- // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
- // Chrome policy: https://developer.chrome.com/blog/autoplay/
- // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
- // nothing to do?
- if (!video.autoplay)
- return speedy_vision_default().Promise.resolve(video);
- // videos marked with autoplay should be muted
- if (!video.muted) {
- Utils.warning('Videos marked with autoplay should be muted', video);
- video.muted = true;
- }
- // the browser may not honor the autoplay attribute if the video is not
- // visible on-screen. So, let's try to play the video in any case.
- return this._waitUntilPlayable(video).then(video => {
- // try to play the video
- const promise = video.play();
- // handle older browsers
- if (promise === undefined)
- return video;
- // resolve if successful
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- promise.then(() => resolve(video), error => {
- // can't play the video
- Utils.error(`Can't autoplay video!`, error, video);
- // autoplay is blocked for some reason
- if (error.name == 'NotAllowedError') {
- Utils.warning('Tip: allow manual playback');
- if (Utils.isIOS())
- Utils.warning('Is low power mode on?');
- // User interaction is required to play the video. We can
- // solve this here (easy and convenient to do) or at the
- // application layer (for a better user experience). If the
- // latter is preferred, just disable autoplay and play the
- // video programatically.
- if (video.hidden || !video.controls || video.parentNode === null) {
- // this is added for convenience
- document.body.addEventListener('pointerdown', () => video.play());
- // ask only once for user interaction
- if (!displayedAlertMessage) {
- alert(ALERT_MESSAGE);
- displayedAlertMessage = true;
- }
- // XXX what if the Session mode is inline? In this
- // case, this convenience code may be undesirable.
- // A workaround is to disable autoplay.
- }
- /*else {
- // play the video after the first interaction with the page
- const polling = setInterval(() => {
- video.play().then(() => clearInterval(polling));
- }, 500);
- }*/
- }
- // unsupported media source
- else if (error.name == 'NotSupportedError') {
- reject(new NotSupportedError('Unsupported video format', error));
- return;
- }
- // done
- resolve(video);
- });
- });
- });
- }
- /**
- * Wait for the input video to be playable
- * @param video
- * @returns a promise that resolves to the input video when it can be played
- */
- _waitUntilPlayable(video) {
- const TIMEOUT = 15000, INTERVAL = 500;
- if (video.readyState >= 3)
- return speedy_vision_default().Promise.resolve(video);
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- let ms = 0, t = setInterval(() => {
- //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
- if (video.readyState >= 3) {
- clearInterval(t);
- resolve(video);
- }
- else if ((ms += INTERVAL) >= TIMEOUT) {
- clearInterval(t);
- reject(new TimeoutError('The video took too long to load'));
- }
- }, INTERVAL);
- });
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * canvas-source.ts
- * HTMLCanvasElement-based source of data
- */
-
-
-
- /**
- * HTMLCanvasElement-based source of data
- */
- class CanvasSource {
- /**
- * Constructor
- */
- constructor(canvas) {
- Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
- this._canvas = canvas;
- this._media = null;
- }
- /**
- * A type-identifier of the source of data
- * @internal
- */
- get _type() {
- return 'canvas';
- }
- /**
- * Get media
- * @internal
- */
- get _data() {
- if (this._media == null)
- throw new IllegalOperationError(`The media of the source of data isn't loaded`);
- return this._media;
- }
- /**
- * Stats related to this source of data
- * @internal
- */
- get _stats() {
- const media = this._media;
- if (media != null)
- return `${media.width}x${media.height} canvas`;
- else
- return 'uninitialized canvas';
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- return speedy_vision_default().load(this._canvas).then(media => {
- Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
- this._media = media;
- });
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- if (this._media)
- this._media.release();
- this._media = null;
- return speedy_vision_default().Promise.resolve();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * camera-source.ts
- * Webcam-based source of data
- */
-
-
-
-
- /** Default options for camera sources */
- const DEFAULT_CAMERA_OPTIONS = {
- resolution: 'md',
- aspectRatio: 16 / 9,
- constraints: { facingMode: 'environment' },
- };
- /**
- * Webcam-based source of data
- */
- class CameraSource extends VideoSource {
- /**
- * Constructor
- */
- constructor(options) {
- const video = document.createElement('video');
- super(video);
- this._cameraVideo = video;
- this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
- }
- /**
- * Camera resolution
- */
- get resolution() {
- return this._options.resolution;
- }
- /**
- * Initialize this source of data
- * @returns a promise that resolves as soon as this source of data is initialized
- * @internal
- */
- _init() {
- Utils.log('Accessing the webcam...');
- // validate
- if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
- throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
- // set up media constraints
- const options = this._options;
- const size = Utils.resolution(options.resolution, options.aspectRatio);
- const constraints = {
- audio: false,
- video: Object.assign({ width: size.width, height: size.height }, options.constraints)
- };
- // load camera stream
- return new (speedy_vision_default()).Promise((resolve, reject) => {
- navigator.mediaDevices.getUserMedia(constraints).then(stream => {
- const video = this._cameraVideo;
- video.onloadedmetadata = () => {
- const promise = video.play();
- const success = 'Access to the webcam has been granted.';
- // handle older browsers
- if (promise === undefined) {
- Utils.log(success);
- resolve(video);
- return;
- }
- // handle promise
- promise.then(() => {
- Utils.log(success);
- resolve(video);
- }).catch(error => {
- reject(new IllegalOperationError('Webcam error!', error));
- });
- };
- video.setAttribute('playsinline', '');
- video.setAttribute('autoplay', '');
- video.setAttribute('muted', '');
- video.srcObject = stream;
- }).catch(error => {
- reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
- });
- }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
- }
- /**
- * Release this source of data
- * @returns a promise that resolves as soon as this source of data is released
- * @internal
- */
- _release() {
- const stream = this._cameraVideo.srcObject;
- const tracks = stream.getTracks();
- // stop camera feed
- tracks.forEach(track => track.stop());
- this._cameraVideo.onloadedmetadata = null;
- this._cameraVideo.srcObject = null;
- // release the media
- return super._release();
- }
- }
-
- ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * source-factory.ts
- * Factory of sources of data
- */
-
-
-
- /**
- * Factory of sources of data
- */
- class SourceFactory {
- /**
- * Create a <video>-based source of data
- * @param video video element
- */
- static Video(video) {
- return new VideoSource(video);
- }
- /**
- * Create a <canvas>-based source of data
- * @param canvas canvas element
- */
- static Canvas(canvas) {
- return new CanvasSource(canvas);
- }
- /**
- * Create a Webcam-based source of data
- * @param options optional options object
- */
- static Camera(options = {}) {
- return new CameraSource(options);
- }
- }
-
- ;// CONCATENATED MODULE: ./src/main.ts
- /*
- * MARTINS.js
- * GPU-accelerated Augmented Reality for the web
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published
- * by the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * main.ts
- * Entry point
- */
-
-
-
-
-
-
-
- /**
- * GPU-accelerated Augmented Reality for the web
- */
- class Martins {
- /**
- * Start a new session
- * @param options
- * @returns a promise that resolves to a new session
- */
- static startSession(options) {
- return Session.instantiate(options);
- }
- /**
- * Trackers
- */
- static get Tracker() {
- return TrackerFactory;
- }
- /**
- * Sources of data
- */
- static get Source() {
- return SourceFactory;
- }
- /**
- * Create a viewport
- * @param settings
- * @returns a new viewport with the specified settings
- */
- static Viewport(settings) {
- return new BaseViewport(settings);
- }
- /**
- * Global Settings
- */
- static get Settings() {
- return Settings;
- }
- /**
- * Engine version
- */
- static get version() {
- if (false)
- {}
- else
- return "0.2.1-wip";
- }
- /**
- * Speedy Vision
- */
- static get Speedy() {
- return (speedy_vision_default());
- }
- /**
- * Checks if the engine can be run in the browser the client is using
- * @returns true if the engine is compatible with the browser
- */
- static isSupported() {
- return Session.isSupported();
- }
- }
- // Freeze the namespace
- Object.freeze(Martins);
- // Add Speedy Vision to global scope
- ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
- // Display a notice
- Utils.log(`MARTINS.js version ${Martins.version}. ` +
- `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
- "https://github.com/alemart/martins-js");
-
- })();
-
- __webpack_exports__ = __webpack_exports__["default"];
- /******/ return __webpack_exports__;
- /******/ })()
- ;
- });
|