您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

martins.js 1.0MB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076260772607826079260802608126082260832608426085260862608726088260892609026091260922609326094260952609626097260982609926100261012610226103261042610526106261072610826109261102611126112261132611426115261162611726118261192612026121261222612326124261252612626127261282612926130261312613226133261342613526136261372613826139261402614126142261432614426145261462614726148261492615026151261522615326154261552615626157261582615926160261612616226163261642616526166261672616826169261702617126172261732617426175261762617726178261792618026181261822618326184261852618626187261882618926190261912619226193261942619526196261972619826199262002620126202262032620426205262062620726208262092621026211262122621326214262152621626217262182621926220262212622226223262242622526226262272622826229262302623126232262332623426235262362623726238262392624026241262422624326244262452624626247262482624926250262512625226253262542625526256262572625826259262602626126262262632626426265262662626726268262692627026271262722627326274262752627626277262782627926280262812628226283262842628526286262872628826289262902629126292262932629426295262962629726298262992630026301263022630326304263052630626307263082630926310263112631226313263142631526316263172631826319263202632126322263232632426325263262632726328263292633026331263322633326334263352633626337263382633926340263412634226343263442634526346263472634826349263502635126352263532635426355263562635726358263592636026361263622636326364263652636626367263682636926370263712637226373263742637526376263772637826379263802638126382263832638426385263862638726388263892639026391263922639326394263952639626397263982639926400264012640226403264042640526406264072640826409264102641126412264132641426415264162641726418264192642026421264222642326424264252642626427264282642926430264312643226433264342643526436264372643826439264402644126442264432644426445264462644726448264492645026451264522645326454264552645626457264582645926460264612646226463264642646526466264672646826469264702647126472264732647426475264762647726478264792648026481264822648326484264852648626487264882648926490264912649226493264942649526496264972649826499265002650126502265032650426505265062650726508265092651026511265122651326514265152651626517265182651926520265212652226523265242652526526265272652826529265302653126532265332653426535265362653726538265392654026541265422654326544265452654626547265482654926550265512655226553265542655526556265572655826559265602656126562265632656426565265662656726568265692657026571265722657326574265752657626577265782657926580265812658226583265842658526586265872658826589265902659126592265932659426595265962659726598265992660026601266022660326604266052660626607266082660926610266112661226613266142661526616266172661826619266202662126622266232662426625266262662726628266292663026631266322663326634266352663626637266382663926640266412664226643266442664526646266472664826649266502665126652266532665426655266562665726658266592666026661266622666326664266652666626667266682666926670266712667226673266742667526676266772667826679266802668126682266832668426685266862668726688266892669026691266922669326694266952669626697266982669926700267012670226703267042670526706267072670826709267102671126712267132671426715267162671726718267192672026721267222672326724267252672626727267282672926730267312673226733267342673526736267372673826739267402674126742267432674426745267462674726748267492675026751267522675326754267552675626757267582675926760267612676226763267642676526766267672676826769267702677126772267732677426775267762677726778267792678026781267822678326784267852678626787267882678926790267912679226793267942679526796267972679826799268002680126802268032680426805268062680726808268092681026811268122681326814268152681626817268182681926820268212682226823268242682526826268272682826829268302683126832268332683426835268362683726838268392684026841268422684326844268452684626847268482684926850268512685226853268542685526856268572685826859268602686126862268632686426865268662686726868268692687026871268722687326874268752687626877268782687926880268812688226883268842688526886268872688826889268902689126892268932689426895268962689726898268992690026901269022690326904269052690626907269082690926910269112691226913269142691526916269172691826919269202692126922269232692426925269262692726928269292693026931269322693326934269352693626937269382693926940269412694226943269442694526946269472694826949269502695126952269532695426955269562695726958269592696026961269622696326964269652696626967269682696926970269712697226973269742697526976269772697826979269802698126982269832698426985269862698726988269892699026991269922699326994269952699626997269982699927000270012700227003270042700527006270072700827009270102701127012270132701427015270162701727018270192702027021270222702327024270252702627027270282702927030270312703227033270342703527036270372703827039270402704127042270432704427045270462704727048270492705027051270522705327054270552705627057270582705927060270612706227063270642706527066270672706827069270702707127072270732707427075270762707727078270792708027081270822708327084270852708627087270882708927090270912709227093270942709527096270972709827099271002710127102271032710427105271062710727108271092711027111271122711327114271152711627117271182711927120271212712227123271242712527126271272712827129271302713127132271332713427135271362713727138271392714027141271422714327144271452714627147271482714927150271512715227153271542715527156271572715827159271602716127162271632716427165271662716727168271692717027171271722717327174271752717627177271782717927180271812718227183271842718527186271872718827189271902719127192271932719427195271962719727198271992720027201272022720327204272052720627207272082720927210272112721227213272142721527216272172721827219272202722127222272232722427225272262722727228272292723027231272322723327234272352723627237272382723927240272412724227243272442724527246272472724827249272502725127252272532725427255272562725727258272592726027261272622726327264272652726627267272682726927270272712727227273272742727527276272772727827279272802728127282272832728427285272862728727288272892729027291272922729327294272952729627297272982729927300273012730227303273042730527306273072730827309273102731127312273132731427315273162731727318273192732027321273222732327324273252732627327273282732927330273312733227333273342733527336273372733827339273402734127342273432734427345273462734727348273492735027351273522735327354273552735627357273582735927360273612736227363273642736527366273672736827369273702737127372273732737427375273762737727378273792738027381273822738327384273852738627387273882738927390273912739227393273942739527396273972739827399274002740127402274032740427405274062740727408274092741027411274122741327414274152741627417274182741927420274212742227423274242742527426274272742827429274302743127432274332743427435274362743727438274392744027441274422744327444274452744627447274482744927450274512745227453274542745527456274572745827459274602746127462274632746427465274662746727468274692747027471274722747327474274752747627477274782747927480274812748227483274842748527486274872748827489274902749127492274932749427495274962749727498274992750027501275022750327504275052750627507275082750927510275112751227513275142751527516275172751827519275202752127522275232752427525275262752727528275292753027531275322753327534275352753627537275382753927540275412754227543275442754527546275472754827549275502755127552275532755427555275562755727558275592756027561275622756327564275652756627567275682756927570275712757227573275742757527576275772757827579275802758127582275832758427585275862758727588275892759027591275922759327594275952759627597275982759927600276012760227603276042760527606276072760827609276102761127612276132761427615276162761727618276192762027621276222762327624276252762627627276282762927630276312763227633276342763527636276372763827639276402764127642276432764427645276462764727648276492765027651276522765327654276552765627657276582765927660276612766227663276642766527666276672766827669276702767127672276732767427675276762767727678276792768027681276822768327684276852768627687276882768927690276912769227693276942769527696276972769827699277002770127702277032770427705277062770727708277092771027711277122771327714277152771627717277182771927720277212772227723277242772527726277272772827729277302773127732277332773427735277362773727738277392774027741277422774327744277452774627747277482774927750277512775227753277542775527756277572775827759277602776127762277632776427765277662776727768277692777027771277722777327774277752777627777277782777927780277812778227783277842778527786277872778827789277902779127792277932779427795277962779727798277992780027801278022780327804278052780627807278082780927810278112781227813278142781527816278172781827819278202782127822278232782427825278262782727828278292783027831278322783327834278352783627837278382783927840278412784227843278442784527846278472784827849278502785127852278532785427855278562785727858278592786027861278622786327864278652786627867278682786927870278712787227873278742787527876278772787827879278802788127882278832788427885278862788727888278892789027891278922789327894278952789627897278982789927900279012790227903279042790527906279072790827909279102791127912279132791427915279162791727918279192792027921279222792327924279252792627927279282792927930279312793227933279342793527936279372793827939279402794127942279432794427945279462794727948279492795027951279522795327954279552795627957279582795927960279612796227963279642796527966279672796827969279702797127972279732797427975279762797727978
  1. /*!
  2. * MARTINS.js version 0.2.0
  3. * GPU-accelerated Augmented Reality for the web
  4. * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  5. * https://github.com/alemart/martins-js
  6. *
  7. * @license LGPL-3.0-or-later
  8. * Date: 2024-06-28T19:56:07.259Z
  9. */
  10. (function webpackUniversalModuleDefinition(root, factory) {
  11. if(typeof exports === 'object' && typeof module === 'object')
  12. module.exports = factory();
  13. else if(typeof define === 'function' && define.amd)
  14. define([], factory);
  15. else if(typeof exports === 'object')
  16. exports["Martins"] = factory();
  17. else
  18. root["Martins"] = factory();
  19. })(self, () => {
  20. return /******/ (() => { // webpackBootstrap
  21. /******/ var __webpack_modules__ = ({
  22. /***/ 774:
  23. /***/ ((module) => {
  24. /*!
  25. * Speedy Vision version 0.9.1
  26. * GPU-accelerated Computer Vision for JavaScript
  27. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  28. * https://github.com/alemart/speedy-vision
  29. *
  30. * @license Apache-2.0
  31. * Date: 2024-06-28T15:13:44.513Z
  32. */
  33. (function webpackUniversalModuleDefinition(root, factory) {
  34. if(true)
  35. module.exports = factory();
  36. else {}
  37. })(self, () => {
  38. return /******/ (() => { // webpackBootstrap
  39. /******/ var __webpack_modules__ = ({
  40. /***/ 5637:
  41. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
  42. "use strict";
  43. /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
  44. /* harmony export */ w: () => (/* binding */ Settings)
  45. /* harmony export */ });
  46. /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(416);
  47. /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1567);
  48. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(2191);
  49. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(5619);
  50. /*
  51. * speedy-vision.js
  52. * GPU-accelerated Computer Vision for JavaScript
  53. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  54. *
  55. * Licensed under the Apache License, Version 2.0 (the "License");
  56. * you may not use this file except in compliance with the License.
  57. * You may obtain a copy of the License at
  58. *
  59. * http://www.apache.org/licenses/LICENSE-2.0
  60. *
  61. * Unless required by applicable law or agreed to in writing, software
  62. * distributed under the License is distributed on an "AS IS" BASIS,
  63. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  64. * See the License for the specific language governing permissions and
  65. * limitations under the License.
  66. *
  67. * settings.js
  68. * Global settings
  69. */
  70. /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
  71. /** @typedef {"raf" | "asap"} GPUPollingMode */
  72. /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
  73. /** @type {GPUPollingMode} Default GPU polling mode */
  74. const DEFAULT_GPU_POLLING_MODE = 'raf';
  75. /** @type {GPUPollingMode} GPU polling mode */
  76. let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
  77. /** @type {LoggingMode} logging mode */
  78. let loggingMode = 'default';
  79. /**
  80. * Global settings
  81. */
  82. class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q
  83. {
  84. /**
  85. * Power preference of the WebGL context
  86. * @returns {PowerPreference}
  87. */
  88. static get powerPreference()
  89. {
  90. return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
  91. }
  92. /**
  93. * Power preference of the WebGL context
  94. * @param {PowerPreference} value
  95. */
  96. static set powerPreference(value)
  97. {
  98. _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
  99. }
  100. /**
  101. * GPU polling mode
  102. * @returns {GPUPollingMode}
  103. */
  104. static get gpuPollingMode()
  105. {
  106. return gpuPollingMode;
  107. }
  108. /**
  109. * GPU polling mode
  110. * @param {GPUPollingMode} value
  111. */
  112. static set gpuPollingMode(value)
  113. {
  114. if(value !== 'raf' && value !== 'asap')
  115. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
  116. gpuPollingMode = value;
  117. }
  118. /**
  119. * Logging mode
  120. * @returns {LoggingMode}
  121. */
  122. static get logging()
  123. {
  124. return loggingMode;
  125. }
  126. /**
  127. * Logging mode
  128. * @param {LoggingMode} mode
  129. */
  130. static set logging(mode)
  131. {
  132. if(mode !== 'default' && mode !== 'none' && mode !== 'diagnostic')
  133. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);
  134. else if(mode === 'diagnostic')
  135. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
  136. loggingMode = mode;
  137. }
  138. }
  139. /***/ }),
  140. /***/ 4292:
  141. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4424__) => {
  142. "use strict";
  143. /* harmony export */ __nested_webpack_require_4424__.d(__nested_webpack_exports__, {
  144. /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
  145. /* harmony export */ });
  146. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4424__(4247);
  147. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4424__(2191);
  148. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4424__(5619);
  149. /*
  150. * speedy-vision.js
  151. * GPU-accelerated Computer Vision for JavaScript
  152. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  153. *
  154. * Licensed under the Apache License, Version 2.0 (the "License");
  155. * you may not use this file except in compliance with the License.
  156. * You may obtain a copy of the License at
  157. *
  158. * http://www.apache.org/licenses/LICENSE-2.0
  159. *
  160. * Unless required by applicable law or agreed to in writing, software
  161. * distributed under the License is distributed on an "AS IS" BASIS,
  162. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  163. * See the License for the specific language governing permissions and
  164. * limitations under the License.
  165. *
  166. * speedy-matrix-expr.js
  167. * Symbolic matrix expressions
  168. */
  169. /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
  170. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
  171. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
  172. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  173. /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
  174. /** @const {Dtype2BufferType} */
  175. const DTYPE_TO_BUFFER_TYPE = Object.freeze({
  176. 'float32': Float32Array
  177. });
  178. /**
  179. * @abstract Matrix expression
  180. * It's an opaque object representing an algebraic
  181. * expression. It has no data attached to it.
  182. */
  183. class SpeedyMatrixExpr
  184. {
  185. /**
  186. * Constructor
  187. * @param {number} rows
  188. * @param {number} columns
  189. * @param {SpeedyMatrixDtype} dtype
  190. */
  191. constructor(rows, columns, dtype)
  192. {
  193. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
  194. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
  195. /** @type {number} number of rows */
  196. this._rows = rows | 0;
  197. /** @type {number} number of columns */
  198. this._columns = columns | 0;
  199. /** @type {SpeedyMatrixDtype} data type */
  200. this._dtype = dtype;
  201. }
  202. /**
  203. * Number of rows
  204. * @returns {number}
  205. */
  206. get rows()
  207. {
  208. return this._rows;
  209. }
  210. /**
  211. * Number of columns
  212. * @returns {number}
  213. */
  214. get columns()
  215. {
  216. return this._columns;
  217. }
  218. /**
  219. * Data type
  220. * @returns {SpeedyMatrixDtype}
  221. */
  222. get dtype()
  223. {
  224. return this._dtype;
  225. }
  226. /**
  227. * Default data type
  228. * @returns {SpeedyMatrixDtype}
  229. */
  230. static get DEFAULT_DTYPE()
  231. {
  232. return 'float32';
  233. }
  234. /**
  235. * Buffer types
  236. * @returns {Dtype2BufferType}
  237. */
  238. static get BUFFER_TYPE()
  239. {
  240. return DTYPE_TO_BUFFER_TYPE;
  241. }
  242. /**
  243. * Matrix addition
  244. * @param {SpeedyMatrixExpr} expr
  245. * @returns {SpeedyMatrixExpr}
  246. */
  247. plus(expr)
  248. {
  249. return new SpeedyMatrixAddExpr(this, expr);
  250. }
  251. /**
  252. * Matrix subtraction
  253. * @param {SpeedyMatrixExpr} expr
  254. * @returns {SpeedyMatrixExpr}
  255. */
  256. minus(expr)
  257. {
  258. return new SpeedyMatrixSubtractExpr(this, expr);
  259. }
  260. /**
  261. * Matrix multiplication
  262. * @param {SpeedyMatrixExpr|number} expr
  263. * @returns {SpeedyMatrixExpr}
  264. */
  265. times(expr)
  266. {
  267. if(typeof expr === 'number')
  268. return new SpeedyMatrixScaleExpr(this, expr);
  269. else
  270. return new SpeedyMatrixMultiplyExpr(this, expr);
  271. }
  272. /**
  273. * Matrix transposition
  274. * @returns {SpeedyMatrixExpr}
  275. */
  276. transpose()
  277. {
  278. return new SpeedyMatrixTransposeExpr(this);
  279. }
  280. /**
  281. * Matrix inversion
  282. * @returns {SpeedyMatrixExpr}
  283. */
  284. inverse()
  285. {
  286. return new SpeedyMatrixInvertExpr(this);
  287. }
  288. /**
  289. * Component-wise multiplication
  290. * @param {SpeedyMatrixExpr} expr
  291. * @returns {SpeedyMatrixExpr}
  292. */
  293. compMult(expr)
  294. {
  295. return new SpeedyMatrixCompMultExpr(this, expr);
  296. }
  297. /**
  298. * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
  299. * @param {SpeedyMatrixExpr} expr
  300. * @returns {SpeedyMatrixExpr}
  301. */
  302. ldiv(expr)
  303. {
  304. return new SpeedyMatrixLdivExpr(this, expr);
  305. }
  306. /**
  307. * Returns a human-readable string representation of the matrix expression
  308. * @returns {string}
  309. */
  310. toString()
  311. {
  312. return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
  313. }
  314. /**
  315. * Evaluate this expression
  316. * @abstract
  317. * @param {WebAssembly.Instance} wasm
  318. * @param {SpeedyMatrixWASMMemory} memory
  319. * @returns {SpeedyMatrix}
  320. */
  321. _evaluate(wasm, memory)
  322. {
  323. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  324. }
  325. }
  326. const { SpeedyMatrix } = __nested_webpack_require_4424__(3286);
  327. /**
  328. * @abstract operation storing a temporary matrix
  329. */
  330. class SpeedyMatrixTempExpr extends SpeedyMatrixExpr
  331. {
  332. /**
  333. * Constructor
  334. * @param {number} rows
  335. * @param {number} columns
  336. * @param {SpeedyMatrixDtype} dtype
  337. */
  338. constructor(rows, columns, dtype)
  339. {
  340. super(rows, columns, dtype);
  341. /** @type {SpeedyMatrix} holds the results of a computation */
  342. this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
  343. }
  344. }
  345. /**
  346. * @abstract unary operation
  347. */
  348. class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr
  349. {
  350. /**
  351. * Constructor
  352. * @param {number} rows rows of the output matrix
  353. * @param {number} columns columns of the output matrix
  354. * @param {SpeedyMatrixExpr} operand
  355. */
  356. constructor(rows, columns, operand)
  357. {
  358. super(rows, columns, operand.dtype);
  359. /** @type {SpeedyMatrixExpr} operand */
  360. this._operand = operand;
  361. }
  362. /**
  363. * Evaluate this expression
  364. * @param {WebAssembly.Instance} wasm
  365. * @param {SpeedyMatrixWASMMemory} memory
  366. * @returns {SpeedyMatrix}
  367. */
  368. _evaluate(wasm, memory)
  369. {
  370. const operand = this._operand._evaluate(wasm, memory);
  371. const result = this._tempMatrix;
  372. // allocate matrices
  373. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  374. const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
  375. // copy operand to WASM memory
  376. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
  377. // run the WASM routine
  378. this._compute(wasm, memory, resultptr, operandptr);
  379. // copy result from WASM memory
  380. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  381. // deallocate matrices
  382. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
  383. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  384. // done!
  385. return result;
  386. }
  387. /**
  388. * Compute the result of this operation
  389. * @abstract
  390. * @param {WebAssembly.Instance} wasm
  391. * @param {SpeedyMatrixWASMMemory} memory
  392. * @param {number} resultptr pointer to Mat32
  393. * @param {number} operandptr pointer to Mat32
  394. */
  395. _compute(wasm, memory, resultptr, operandptr)
  396. {
  397. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  398. }
  399. }
  400. /**
  401. * @abstract binary operation
  402. */
  403. class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr
  404. {
  405. /**
  406. * Constructor
  407. * @param {number} rows rows of the output matrix
  408. * @param {number} columns columns of the output matrix
  409. * @param {SpeedyMatrixExpr} left left operand
  410. * @param {SpeedyMatrixExpr} right right operand
  411. */
  412. constructor(rows, columns, left, right)
  413. {
  414. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
  415. super(rows, columns, left.dtype);
  416. /** @type {SpeedyMatrixExpr} left operand */
  417. this._left = left;
  418. /** @type {SpeedyMatrixExpr} right operand */
  419. this._right = right;
  420. }
  421. /**
  422. * Evaluate this expression
  423. * @param {WebAssembly.Instance} wasm
  424. * @param {SpeedyMatrixWASMMemory} memory
  425. * @returns {SpeedyMatrix}
  426. */
  427. _evaluate(wasm, memory)
  428. {
  429. const left = this._left._evaluate(wasm, memory);
  430. const right = this._right._evaluate(wasm, memory);
  431. const result = this._tempMatrix;
  432. // allocate matrices
  433. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  434. const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
  435. const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
  436. // copy input matrices to WASM memory
  437. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
  438. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
  439. // run the WASM routine
  440. this._compute(wasm, memory, resultptr, leftptr, rightptr);
  441. // copy output matrix from WASM memory
  442. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  443. // deallocate matrices
  444. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
  445. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
  446. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  447. // done!
  448. return result;
  449. }
  450. /**
  451. * Compute the result of this operation
  452. * @abstract
  453. * @param {WebAssembly.Instance} wasm
  454. * @param {SpeedyMatrixWASMMemory} memory
  455. * @param {number} resultptr pointer to Mat32
  456. * @param {number} leftptr pointer to Mat32
  457. * @param {number} rightptr pointer to Mat32
  458. */
  459. _compute(wasm, memory, resultptr, leftptr, rightptr)
  460. {
  461. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  462. }
  463. }
  464. /**
  465. * Transpose matrix
  466. */
  467. class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr
  468. {
  469. /**
  470. * Constructor
  471. * @param {SpeedyMatrixExpr} operand
  472. */
  473. constructor(operand)
  474. {
  475. super(operand.columns, operand.rows, operand);
  476. }
  477. /**
  478. * Compute result = operand^T
  479. * @param {WebAssembly.Instance} wasm
  480. * @param {SpeedyMatrixWASMMemory} memory
  481. * @param {number} resultptr pointer to Mat32
  482. * @param {number} operandptr pointer to Mat32
  483. */
  484. _compute(wasm, memory, resultptr, operandptr)
  485. {
  486. wasm.exports.Mat32_transpose(resultptr, operandptr);
  487. }
  488. }
  489. /**
  490. * Invert square matrix
  491. */
  492. class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr
  493. {
  494. /**
  495. * Constructor
  496. * @param {SpeedyMatrixExpr} operand
  497. */
  498. constructor(operand)
  499. {
  500. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
  501. super(operand.rows, operand.columns, operand);
  502. /** @type {number} size of the matrix */
  503. this._size = operand.rows;
  504. }
  505. /**
  506. * Compute result = operand ^ (-1)
  507. * @param {WebAssembly.Instance} wasm
  508. * @param {SpeedyMatrixWASMMemory} memory
  509. * @param {number} resultptr pointer to Mat32
  510. * @param {number} operandptr pointer to Mat32
  511. */
  512. _compute(wasm, memory, resultptr, operandptr)
  513. {
  514. switch(this._size) {
  515. case 0: break;
  516. case 1:
  517. wasm.exports.Mat32_inverse1(resultptr, operandptr);
  518. break;
  519. case 2:
  520. wasm.exports.Mat32_inverse2(resultptr, operandptr);
  521. break;
  522. case 3:
  523. wasm.exports.Mat32_inverse3(resultptr, operandptr);
  524. break;
  525. default:
  526. wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
  527. break;
  528. }
  529. }
  530. }
  531. /**
  532. * Multiply matrix by a scalar value
  533. */
  534. class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr
  535. {
  536. /**
  537. * Constructor
  538. * @param {SpeedyMatrixExpr} operand
  539. * @param {number} scalar
  540. */
  541. constructor(operand, scalar)
  542. {
  543. super(operand.rows, operand.columns, operand);
  544. /** @type {number} scalar value */
  545. this._scalar = +scalar;
  546. }
  547. /**
  548. * Compute result = scalar * operand
  549. * @param {WebAssembly.Instance} wasm
  550. * @param {SpeedyMatrixWASMMemory} memory
  551. * @param {number} resultptr pointer to Mat32
  552. * @param {number} operandptr pointer to Mat32
  553. */
  554. _compute(wasm, memory, resultptr, operandptr)
  555. {
  556. wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
  557. }
  558. }
  559. /**
  560. * Matrix addition
  561. */
  562. class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr
  563. {
  564. /**
  565. * Constructor
  566. * @param {SpeedyMatrixExpr} left left operand
  567. * @param {SpeedyMatrixExpr} right right operand
  568. */
  569. constructor(left, right)
  570. {
  571. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  572. super(left.rows, left.columns, left, right);
  573. }
  574. /**
  575. * Compute result = left + right
  576. * @param {WebAssembly.Instance} wasm
  577. * @param {SpeedyMatrixWASMMemory} memory
  578. * @param {number} resultptr pointer to Mat32
  579. * @param {number} leftptr pointer to Mat32
  580. * @param {number} rightptr pointer to Mat32
  581. */
  582. _compute(wasm, memory, resultptr, leftptr, rightptr)
  583. {
  584. wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
  585. }
  586. }
  587. /**
  588. * Matrix subtraction
  589. */
  590. class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr
  591. {
  592. /**
  593. * Constructor
  594. * @param {SpeedyMatrixExpr} left left operand
  595. * @param {SpeedyMatrixExpr} right right operand
  596. */
  597. constructor(left, right)
  598. {
  599. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  600. super(left.rows, left.columns, left, right);
  601. }
  602. /**
  603. * Compute result = left - right
  604. * @param {WebAssembly.Instance} wasm
  605. * @param {SpeedyMatrixWASMMemory} memory
  606. * @param {number} resultptr pointer to Mat32
  607. * @param {number} leftptr pointer to Mat32
  608. * @param {number} rightptr pointer to Mat32
  609. */
  610. _compute(wasm, memory, resultptr, leftptr, rightptr)
  611. {
  612. wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
  613. }
  614. }
  615. /**
  616. * Matrix multiplication
  617. */
  618. class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr
  619. {
  620. /**
  621. * Constructor
  622. * @param {SpeedyMatrixExpr} left left operand
  623. * @param {SpeedyMatrixExpr} right right operand
  624. */
  625. constructor(left, right)
  626. {
  627. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
  628. super(left.rows, right.columns, left, right);
  629. }
  630. /**
  631. * Compute result = left * right
  632. * @param {WebAssembly.Instance} wasm
  633. * @param {SpeedyMatrixWASMMemory} memory
  634. * @param {number} resultptr pointer to Mat32
  635. * @param {number} leftptr pointer to Mat32
  636. * @param {number} rightptr pointer to Mat32
  637. */
  638. _compute(wasm, memory, resultptr, leftptr, rightptr)
  639. {
  640. wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
  641. }
  642. }
  643. /**
  644. * Component-wise multiplication
  645. */
  646. class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr
  647. {
  648. /**
  649. * Constructor
  650. * @param {SpeedyMatrixExpr} left left operand
  651. * @param {SpeedyMatrixExpr} right right operand
  652. */
  653. constructor(left, right)
  654. {
  655. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  656. super(right.rows, right.columns, left, right);
  657. }
  658. /**
  659. * Compute result = left <compMult> right
  660. * @param {WebAssembly.Instance} wasm
  661. * @param {SpeedyMatrixWASMMemory} memory
  662. * @param {number} resultptr pointer to Mat32
  663. * @param {number} leftptr pointer to Mat32
  664. * @param {number} rightptr pointer to Mat32
  665. */
  666. _compute(wasm, memory, resultptr, leftptr, rightptr)
  667. {
  668. wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
  669. }
  670. }
  671. /**
  672. * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
  673. */
  674. class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr
  675. {
  676. /**
  677. * Constructor
  678. * @param {SpeedyMatrixExpr} left left operand
  679. * @param {SpeedyMatrixExpr} right right operand
  680. */
  681. constructor(left, right)
  682. {
  683. const m = left.rows, n = left.columns;
  684. // TODO right doesn't need to be a column vector
  685. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
  686. super(n, 1, left, right);
  687. }
  688. /**
  689. * Compute result = left \ right
  690. * @param {WebAssembly.Instance} wasm
  691. * @param {SpeedyMatrixWASMMemory} memory
  692. * @param {number} resultptr pointer to Mat32
  693. * @param {number} leftptr pointer to Mat32
  694. * @param {number} rightptr pointer to Mat32
  695. */
  696. _compute(wasm, memory, resultptr, leftptr, rightptr)
  697. {
  698. wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
  699. }
  700. }
  701. /***/ }),
  702. /***/ 4247:
  703. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_23026__) => {
  704. "use strict";
  705. /* harmony export */ __nested_webpack_require_23026__.d(__nested_webpack_exports__, {
  706. /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
  707. /* harmony export */ });
  708. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_23026__(8902);
  709. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_23026__(5619);
  710. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_23026__(2191);
  711. /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_23026__(1814);
  712. /*
  713. * speedy-vision.js
  714. * GPU-accelerated Computer Vision for JavaScript
  715. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  716. *
  717. * Licensed under the Apache License, Version 2.0 (the "License");
  718. * you may not use this file except in compliance with the License.
  719. * You may obtain a copy of the License at
  720. *
  721. * http://www.apache.org/licenses/LICENSE-2.0
  722. *
  723. * Unless required by applicable law or agreed to in writing, software
  724. * distributed under the License is distributed on an "AS IS" BASIS,
  725. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  726. * See the License for the specific language governing permissions and
  727. * limitations under the License.
  728. *
  729. * speedy-matrix-wasm.js
  730. * WebAssembly bridge
  731. */
  732. /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  733. /**
  734. * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
  735. * @property {object} as
  736. * @property {WebAssembly.Memory} as.object
  737. * @property {Uint8Array} as.uint8
  738. * @property {Int32Array} as.int32
  739. * @property {Uint32Array} as.uint32
  740. * @property {Float32Array} as.float32
  741. * @property {Float64Array} as.float64
  742. */
  743. /**
  744. * @typedef {object} SpeedyMatrixWASMHandle
  745. * @property {WebAssembly.Instance} wasm
  746. * @property {SpeedyMatrixWASMMemory} memory
  747. * @property {WebAssembly.Module} module
  748. */
  749. /** @type {Uint8Array} WebAssembly binary */
  750. const WASM_BINARY = __nested_webpack_require_23026__(3575);
  751. /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
  752. let _instance = null;
  753. /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
  754. let _module = null;
  755. /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
  756. const _memory = (mem => ({
  757. as: {
  758. object: mem,
  759. uint8: new Uint8Array(mem.buffer),
  760. int32: new Int32Array(mem.buffer),
  761. uint32: new Uint32Array(mem.buffer),
  762. float32: new Float32Array(mem.buffer),
  763. float64: new Float64Array(mem.buffer),
  764. },
  765. }))(new WebAssembly.Memory({
  766. initial: 16, // 1 MB
  767. maximum: 256
  768. }));
  769. /**
  770. * WebAssembly utilities
  771. */
  772. class SpeedyMatrixWASM
  773. {
  774. /**
  775. * Gets you the WASM instance, augmented memory & module
  776. * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
  777. */
  778. static ready()
  779. {
  780. return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
  781. SpeedyMatrixWASM._ready(resolve, reject);
  782. });
  783. }
  784. /**
  785. * Synchronously gets you the WASM instance, augmented memory & module
  786. * @returns {SpeedyMatrixWASMHandle}
  787. */
  788. static get handle()
  789. {
  790. if(!_instance || !_module)
  791. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
  792. return {
  793. wasm: _instance,
  794. memory: _memory,
  795. module: _module,
  796. };
  797. }
  798. /**
  799. * Gets you the WASM imports bound to a memory object
  800. * @param {SpeedyMatrixWASMMemory} memory
  801. * @returns {Object<string,Function>}
  802. */
  803. static imports(memory)
  804. {
  805. const obj = new SpeedyMatrixWASMImports(memory);
  806. return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype)
  807. .filter(property => typeof obj[property] === 'function' && property !== 'constructor')
  808. .reduce(
  809. (imports, methodName) => ((imports[methodName] = obj[methodName]), imports),
  810. Object.create(null)
  811. );
  812. }
  813. /**
  814. * Allocate a Mat32 in WebAssembly memory without copying any data
  815. * @param {WebAssembly.Instance} wasm
  816. * @param {SpeedyMatrixWASMMemory} memory
  817. * @param {SpeedyMatrix} matrix
  818. * @returns {number} pointer to the new Mat32
  819. */
  820. static allocateMat32(wasm, memory, matrix)
  821. {
  822. const dataptr = wasm.exports.malloc(matrix.data.byteLength);
  823. const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
  824. return matptr;
  825. }
  826. /**
  827. * Deallocate a Mat32 in WebAssembly
  828. * @param {WebAssembly.Instance} wasm
  829. * @param {SpeedyMatrixWASMMemory} memory
  830. * @param {number} matptr pointer to the allocated Mat32
  831. * @returns {number} NULL
  832. */
  833. static deallocateMat32(wasm, memory, matptr)
  834. {
  835. const dataptr = wasm.exports.Mat32_data(matptr);
  836. wasm.exports.free(matptr);
  837. wasm.exports.free(dataptr);
  838. return 0;
  839. }
  840. /**
  841. * Copy the data of a matrix to a WebAssembly Mat32
  842. * @param {WebAssembly.Instance} wasm
  843. * @param {SpeedyMatrixWASMMemory} memory
  844. * @param {number} matptr pointer to a Mat32
  845. * @param {SpeedyMatrix} matrix
  846. * @returns {number} matptr
  847. */
  848. static copyToMat32(wasm, memory, matptr, matrix)
  849. {
  850. // We assume the following:
  851. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  852. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  853. // 3. the data type is float32
  854. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  855. //matrix.dtype === 'float32' &&
  856. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr)
  857. );
  858. const dataptr = wasm.exports.Mat32_data(matptr);
  859. memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
  860. return matptr;
  861. }
  862. /**
  863. * Copy the data of a WebAssembly Mat32 to a matrix
  864. * @param {WebAssembly.Instance} wasm
  865. * @param {SpeedyMatrixWASMMemory} memory
  866. * @param {number} matptr pointer to a Mat32
  867. * @param {SpeedyMatrix} matrix
  868. * @returns {number} matptr
  869. */
  870. static copyFromMat32(wasm, memory, matptr, matrix)
  871. {
  872. // We assume the following:
  873. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  874. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  875. // 3. the data type is float32
  876. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  877. //matrix.dtype === 'float32' &&
  878. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr)
  879. );
  880. const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
  881. for(let offset = matrix.data.length - 1; offset >= 0; offset--)
  882. matrix.data[offset] = memory.as.float32[base + offset];
  883. return matptr;
  884. }
  885. /**
  886. * Polls the WebAssembly instance until it's ready
  887. * @param {function(SpeedyMatrixWASMHandle): void} resolve
  888. * @param {function(Error): void} reject
  889. * @param {number} [counter]
  890. */
  891. static _ready(resolve, reject, counter = 1000)
  892. {
  893. if(_instance !== null && _module !== null)
  894. resolve({ wasm: _instance, memory: _memory, module: _module });
  895. else if(counter <= 0)
  896. reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));
  897. else
  898. setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
  899. }
  900. }
  901. /**
  902. * Methods called from WASM
  903. */
  904. class SpeedyMatrixWASMImports
  905. {
  906. /**
  907. * Constructor
  908. * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
  909. */
  910. constructor(memory)
  911. {
  912. // find all methods of this object
  913. const methodNames = Object.getOwnPropertyNames(this.constructor.prototype)
  914. .filter(property => typeof this[property] === 'function')
  915. .filter(property => property !== 'constructor');
  916. // bind all methods to this object
  917. methodNames.forEach(methodName => {
  918. this[methodName] = this[methodName].bind(this);
  919. });
  920. /** @type {SpeedyMatrixWASMMemory} WASM memory */
  921. this.memory = memory;
  922. /** @type {CStringUtils} utilities related to C strings */
  923. this.cstring = new CStringUtils(memory);
  924. // done!
  925. return Object.freeze(this);
  926. }
  927. /**
  928. * Prints a message
  929. * @param {number} ptr pointer to char
  930. */
  931. print(ptr)
  932. {
  933. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
  934. }
  935. /**
  936. * Throws an error
  937. * @param {number} ptr pointer to char
  938. */
  939. fatal(ptr)
  940. {
  941. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
  942. }
  943. /**
  944. * Fills a memory segment with a byte
  945. * @param {number} value byte
  946. * @param {number} start memory address, inclusive
  947. * @param {number} end memory address greater than start, exclusive
  948. */
  949. bytefill(value, start, end)
  950. {
  951. this.memory.as.uint8.fill(value, start, end);
  952. }
  953. /**
  954. * Copy a memory segment to another segment
  955. * @param {number} target memory address, where we'll start writing
  956. * @param {number} start memory address, where we'll start copying (inclusive)
  957. * @param {number} end memory address, where we'll end the copy (exclusive)
  958. */
  959. copyWithin(target, start, end)
  960. {
  961. this.memory.as.uint8.copyWithin(target, start, end);
  962. }
  963. }
  964. /**
  965. * Utilities related to C strings
  966. */
  967. class CStringUtils
  968. {
  969. /**
  970. * Constructor
  971. * @param {SpeedyMatrixWASMMemory} memory
  972. */
  973. constructor(memory)
  974. {
  975. /** @type {TextDecoder} */
  976. this._decoder = new TextDecoder('utf-8');
  977. /** @type {SpeedyMatrixWASMMemory} */
  978. this._memory = memory;
  979. }
  980. /**
  981. * Convert a C string to a JavaScript string
  982. * @param {number} ptr pointer to char
  983. * @returns {string}
  984. */
  985. get(ptr)
  986. {
  987. const byte = this._memory.as.uint8;
  988. const size = this._memory.as.uint8.byteLength;
  989. let p = ptr;
  990. while(p < size && 0 !== byte[p])
  991. ++p;
  992. return this._decoder.decode(byte.subarray(ptr, p));
  993. }
  994. }
  995. /**
  996. * WebAssembly loader
  997. * @param {SpeedyMatrixWASMMemory} memory
  998. */
  999. (function loadWASM(memory) {
  1000. const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
  1001. // Endianness check
  1002. if(!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN)
  1003. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`);
  1004. // Load the WASM binary
  1005. _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY)
  1006. .then(data => base64decode(data))
  1007. .then(bytes => WebAssembly.instantiate(bytes, {
  1008. env: {
  1009. memory: memory.as.object,
  1010. ...SpeedyMatrixWASM.imports(memory),
  1011. }
  1012. }))
  1013. .then(wasm => {
  1014. _instance = wasm.instance;
  1015. _module = wasm.module;
  1016. wasm.instance.exports.srand((Date.now() * 0.001) & 0xffffffff); // srand(time(NULL))
  1017. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
  1018. })
  1019. .catch(err => {
  1020. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
  1021. });
  1022. })(_memory);
  1023. /***/ }),
  1024. /***/ 3286:
  1025. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_34996__) => {
  1026. "use strict";
  1027. __nested_webpack_require_34996__.r(__nested_webpack_exports__);
  1028. /* harmony export */ __nested_webpack_require_34996__.d(__nested_webpack_exports__, {
  1029. /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
  1030. /* harmony export */ });
  1031. /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_34996__(4292);
  1032. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_34996__(4247);
  1033. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_34996__(8902);
  1034. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_34996__(2191);
  1035. /*
  1036. * speedy-vision.js
  1037. * GPU-accelerated Computer Vision for JavaScript
  1038. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1039. *
  1040. * Licensed under the Apache License, Version 2.0 (the "License");
  1041. * you may not use this file except in compliance with the License.
  1042. * You may obtain a copy of the License at
  1043. *
  1044. * http://www.apache.org/licenses/LICENSE-2.0
  1045. *
  1046. * Unless required by applicable law or agreed to in writing, software
  1047. * distributed under the License is distributed on an "AS IS" BASIS,
  1048. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1049. * See the License for the specific language governing permissions and
  1050. * limitations under the License.
  1051. *
  1052. * speedy-matrix.js
  1053. * Matrix class
  1054. */
  1055. /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
  1056. /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
  1057. /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
  1058. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  1059. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
  1060. /**
  1061. * Matrix class
  1062. */
  1063. class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r
  1064. {
  1065. /**
  1066. * @private
  1067. *
  1068. * Low-level constructor
  1069. * @param {number} rows number of rows
  1070. * @param {number} columns number of columns
  1071. * @param {number} step0 step size between two consecutive elements (e.g., 1)
  1072. * @param {number} step1 step size between two consecutive columns (e.g., rows)
  1073. * @param {SpeedyMatrixBufferType} data entries in column-major format
  1074. */
  1075. constructor(rows, columns, step0, step1, data)
  1076. {
  1077. super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
  1078. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
  1079. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
  1080. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(
  1081. data.length + rows * columns === 0 || // empty matrix and empty buffer, or
  1082. data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
  1083. );
  1084. /** @type {number} step size between two consecutive elements */
  1085. this._step0 = step0 | 0;
  1086. /** @type {number} step size between two consecutive columns */
  1087. this._step1 = step1 | 0;
  1088. /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
  1089. this._data = data;
  1090. }
  1091. /**
  1092. * Create a new matrix with the specified size and entries
  1093. * @param {number} rows number of rows
  1094. * @param {number} columns number of columns
  1095. * @param {number[]} entries in column-major format
  1096. * @param {SpeedyMatrixDtype} [dtype] data type
  1097. * @returns {SpeedyMatrix}
  1098. */
  1099. static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE)
  1100. {
  1101. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1102. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
  1103. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1104. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
  1105. }
  1106. /**
  1107. * Create a new matrix filled with zeros with the specified size
  1108. * @param {number} rows number of rows
  1109. * @param {number} [columns] number of columns
  1110. * @param {SpeedyMatrixDtype} [dtype] data type
  1111. * @returns {SpeedyMatrix}
  1112. */
  1113. static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE)
  1114. {
  1115. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1116. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1117. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
  1118. }
  1119. /**
  1120. * Create a new matrix filled with ones with the specified size
  1121. * @param {number} rows number of rows
  1122. * @param {number} [columns] number of columns
  1123. * @param {SpeedyMatrixDtype} [dtype] data type
  1124. * @returns {SpeedyMatrix}
  1125. */
  1126. static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE)
  1127. {
  1128. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1129. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1130. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
  1131. }
  1132. /**
  1133. * Create a new identity matrix with the specified size
  1134. * @param {number} rows number of rows
  1135. * @param {number} [columns] number of columns
  1136. * @param {SpeedyMatrixDtype} [dtype] data type
  1137. * @returns {SpeedyMatrix}
  1138. */
  1139. static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE)
  1140. {
  1141. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1142. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1143. const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
  1144. for(let j = Math.min(rows, columns) - 1; j >= 0; j--)
  1145. data[j * rows + j] = 1;
  1146. return new SpeedyMatrix(rows, columns, 1, rows, data);
  1147. }
  1148. /**
  1149. * Evaluate an expression synchronously and store the result in a new matrix
  1150. * @param {SpeedyMatrixExpr} expr matrix expression
  1151. * @returns {SpeedyMatrix}
  1152. */
  1153. static From(expr)
  1154. {
  1155. return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
  1156. }
  1157. /**
  1158. * Returns a promise that resolves immediately if the WebAssembly routines
  1159. * are ready to be used, or as soon as they do become ready
  1160. * @returns {SpeedyPromise<void>}
  1161. */
  1162. static ready()
  1163. {
  1164. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void(0));
  1165. }
  1166. /**
  1167. * Get the underlying buffer
  1168. * @returns {SpeedyMatrixBufferType}
  1169. */
  1170. get data()
  1171. {
  1172. return this._data;
  1173. }
  1174. /**
  1175. * Row-step
  1176. * @returns {number} defaults to 1
  1177. */
  1178. get step0()
  1179. {
  1180. return this._step0;
  1181. }
  1182. /**
  1183. * Column-step
  1184. * @returns {number} defaults to this.rows
  1185. */
  1186. get step1()
  1187. {
  1188. return this._step1;
  1189. }
  1190. /**
  1191. * Extract a block from this matrix. Use a shared underlying buffer
  1192. * @param {number} firstRow
  1193. * @param {number} lastRow
  1194. * @param {number} firstColumn
  1195. * @param {number} lastColumn
  1196. * @returns {SpeedyMatrix}
  1197. */
  1198. block(firstRow, lastRow, firstColumn, lastColumn)
  1199. {
  1200. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(
  1201. firstRow <= lastRow && firstColumn <= lastColumn,
  1202. `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`
  1203. );
  1204. // ensure that the indices are within bounds
  1205. firstRow = Math.max(firstRow, 0);
  1206. lastRow = Math.min(lastRow, this._rows - 1);
  1207. firstColumn = Math.max(firstColumn, 0);
  1208. lastColumn = Math.min(lastColumn, this._columns - 1);
  1209. // compute the dimensions of the new submatrix
  1210. const rows = lastRow - firstRow + 1;
  1211. const columns = lastColumn - firstColumn + 1;
  1212. // obtain the relevant portion of the data
  1213. const step0 = this._step0, step1 = this._step1;
  1214. const begin = firstRow * step0 + firstColumn * step1; // inclusive
  1215. const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
  1216. // create new matrix
  1217. return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
  1218. }
  1219. /**
  1220. * Extract a row from this matrix
  1221. * @param {number} index 0-based
  1222. * @returns {SpeedyMatrix}
  1223. */
  1224. row(index)
  1225. {
  1226. return this.block(index, index, 0, this._columns - 1);
  1227. }
  1228. /**
  1229. * Extract a column from this matrix
  1230. * @param {number} index 0-based
  1231. * @returns {SpeedyMatrix}
  1232. */
  1233. column(index)
  1234. {
  1235. return this.block(0, this._rows - 1, index, index);
  1236. }
  1237. /**
  1238. * Extract the main diagonal from this matrix
  1239. * @returns {SpeedyMatrix} as a column-vector
  1240. */
  1241. diagonal()
  1242. {
  1243. const diagsize = Math.min(this._rows, this._columns);
  1244. // compute the dimensions of the new submatrix
  1245. const rows = diagsize; // make it a column vector
  1246. const columns = 1;
  1247. // obtain the relevant portion of the data
  1248. const diagstep = this._step0 + this._step1; // jump a row and a column
  1249. const begin = 0; // inclusive
  1250. const end = 1 + (diagsize - 1) * diagstep; // exclusive
  1251. // create new matrix
  1252. return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
  1253. }
  1254. /**
  1255. * Read a single entry of this matrix
  1256. * @param {number} row 0-based index
  1257. * @param {number} column 0-based index
  1258. * @returns {number}
  1259. */
  1260. at(row, column)
  1261. {
  1262. if(row >= 0 && row < this._rows && column >= 0 && column < this._columns)
  1263. return this._data[this._step0 * row + this._step1 * column];
  1264. else
  1265. return Number.NaN;
  1266. }
  1267. /**
  1268. * Read the entries of the matrix in column-major format
  1269. * @returns {number[]}
  1270. */
  1271. read()
  1272. {
  1273. const entries = new Array(this._rows * this._columns);
  1274. const step0 = this._step0, step1 = this._step1;
  1275. let i = 0;
  1276. for(let column = 0; column < this._columns; column++) {
  1277. for(let row = 0; row < this._rows; row++)
  1278. entries[i++] = this._data[row * step0 + column * step1];
  1279. }
  1280. return entries;
  1281. }
  1282. /**
  1283. * Returns a human-readable string representation of the matrix
  1284. * @returns {string}
  1285. */
  1286. toString()
  1287. {
  1288. const DECIMALS = 5;
  1289. const rows = this.rows, columns = this.columns;
  1290. const entries = this.read();
  1291. const mat = /** @type {number[][]} */ ( new Array(rows) );
  1292. for(let i = 0; i < rows; i++) {
  1293. mat[i] = new Array(columns);
  1294. for(let j = 0; j < columns; j++)
  1295. mat[i][j] = entries[j * rows + i];
  1296. }
  1297. const fix = x => x.toFixed(DECIMALS);
  1298. const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
  1299. const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
  1300. return str;
  1301. }
  1302. /**
  1303. * Set the contents of this matrix to the result of an expression
  1304. * @param {SpeedyMatrixExpr} expr matrix expression
  1305. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1306. */
  1307. setTo(expr)
  1308. {
  1309. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
  1310. // TODO: add support for WebWorkers
  1311. return this.setToSync(expr);
  1312. });
  1313. }
  1314. /**
  1315. * Synchronously set the contents of this matrix to the result of an expression
  1316. * @param {SpeedyMatrixExpr} expr matrix expression
  1317. * @returns {SpeedyMatrix} this
  1318. */
  1319. setToSync(expr)
  1320. {
  1321. const { wasm, memory } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
  1322. // evaluate the expression
  1323. const result = expr._evaluate(wasm, memory);
  1324. /*
  1325. // shallow copy the results to this matrix
  1326. // limitation: can't handle blocks properly
  1327. // (a tree-like structure could be useful)
  1328. this._rows = result.rows;
  1329. this._columns = result.columns;
  1330. //this._dtype = result.dtype;
  1331. this._data = result.data;
  1332. this._step0 = result.step0;
  1333. this._step1 = result.step1;
  1334. */
  1335. // validate shape
  1336. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(
  1337. this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype,
  1338. `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`
  1339. );
  1340. // deep copy
  1341. const step0 = this._step0, step1 = this._step1, rstep0 = result._step0, rstep1 = result._step1;
  1342. if(step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
  1343. // fast copy
  1344. this._data.set(result._data);
  1345. }
  1346. else {
  1347. // copy each element
  1348. for(let column = this._columns - 1; column >= 0; column--) {
  1349. for(let row = this._rows - 1; row >= 0; row--)
  1350. this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
  1351. }
  1352. }
  1353. // done!
  1354. return this;
  1355. }
  1356. /**
  1357. * Fill this matrix with a scalar value
  1358. * @param {number} value
  1359. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1360. */
  1361. fill(value)
  1362. {
  1363. this.fillSync(value);
  1364. return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
  1365. }
  1366. /**
  1367. * Synchronously fill this matrix with a scalar value
  1368. * @param {number} value
  1369. * @returns {SpeedyMatrix} this
  1370. */
  1371. fillSync(value)
  1372. {
  1373. value = +value;
  1374. if(this._rows * this._columns === this._data.length) {
  1375. this._data.fill(value);
  1376. return this;
  1377. }
  1378. for(let column = 0; column < this._columns; column++) {
  1379. for(let row = 0; row < this._rows; row++) {
  1380. this._data[row * this._step0 + column * this._step1] = value;
  1381. }
  1382. }
  1383. return this;
  1384. }
  1385. /**
  1386. * Evaluate this expression
  1387. * @param {WebAssembly.Instance} wasm
  1388. * @param {SpeedyMatrixWASMMemory} memory
  1389. * @returns {SpeedyMatrix}
  1390. */
  1391. _evaluate(wasm, memory)
  1392. {
  1393. return this;
  1394. }
  1395. }
  1396. /***/ }),
  1397. /***/ 416:
  1398. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_51381__) => {
  1399. "use strict";
  1400. /* harmony export */ __nested_webpack_require_51381__.d(__nested_webpack_exports__, {
  1401. /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
  1402. /* harmony export */ });
  1403. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_51381__(5619);
  1404. /*
  1405. * speedy-vision.js
  1406. * GPU-accelerated Computer Vision for JavaScript
  1407. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1408. *
  1409. * Licensed under the Apache License, Version 2.0 (the "License");
  1410. * you may not use this file except in compliance with the License.
  1411. * You may obtain a copy of the License at
  1412. *
  1413. * http://www.apache.org/licenses/LICENSE-2.0
  1414. *
  1415. * Unless required by applicable law or agreed to in writing, software
  1416. * distributed under the License is distributed on an "AS IS" BASIS,
  1417. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1418. * See the License for the specific language governing permissions and
  1419. * limitations under the License.
  1420. *
  1421. * speedy-namespace.js
  1422. * Symbolizes a namespace
  1423. */
  1424. /**
  1425. * An abstract namespace
  1426. * @abstract
  1427. */
  1428. class SpeedyNamespace
  1429. {
  1430. /**
  1431. * Namespaces can't be instantiated.
  1432. * Only static methods are allowed.
  1433. * @abstract
  1434. * @throws SpeedyError
  1435. */
  1436. constructor()
  1437. {
  1438. // only static methods are allowed
  1439. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
  1440. }
  1441. }
  1442. /***/ }),
  1443. /***/ 8902:
  1444. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_52921__) => {
  1445. "use strict";
  1446. /* harmony export */ __nested_webpack_require_52921__.d(__nested_webpack_exports__, {
  1447. /* harmony export */ i: () => (/* binding */ SpeedyPromise)
  1448. /* harmony export */ });
  1449. /*
  1450. * speedy-vision.js
  1451. * GPU-accelerated Computer Vision for JavaScript
  1452. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1453. *
  1454. * Licensed under the Apache License, Version 2.0 (the "License");
  1455. * you may not use this file except in compliance with the License.
  1456. * You may obtain a copy of the License at
  1457. *
  1458. * http://www.apache.org/licenses/LICENSE-2.0
  1459. *
  1460. * Unless required by applicable law or agreed to in writing, software
  1461. * distributed under the License is distributed on an "AS IS" BASIS,
  1462. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1463. * See the License for the specific language governing permissions and
  1464. * limitations under the License.
  1465. *
  1466. * speedy-promise.js
  1467. * Speedy Promises: a fast implementation of Promises
  1468. */
  1469. const PENDING = 0;
  1470. const FULFILLED = 1;
  1471. const REJECTED = 2;
  1472. const SUSPEND_ASYNC = 1;
  1473. const asap = (typeof queueMicrotask !== 'undefined' && queueMicrotask) || // browsers
  1474. (typeof process !== 'undefined' && process.nextTick) || // node.js
  1475. (f => Promise.resolve().then(() => f())); // most compatible
  1476. /**
  1477. * SpeedyPromise: Super Fast Promises. SpeedyPromises can
  1478. * interoperate with ES6 Promises. This implementation is
  1479. * based on the Promises/A+ specification.
  1480. * @template T
  1481. */
  1482. class SpeedyPromise
  1483. {
  1484. /**
  1485. * Constructor
  1486. * @param {function(function(T=): void, function(Error): void): void} callback
  1487. */
  1488. constructor(callback)
  1489. {
  1490. this._state = PENDING;
  1491. this._value = undefined;
  1492. this._onFulfillment = null;
  1493. this._onRejection = null;
  1494. this._children = 0;
  1495. this[0] = this;
  1496. this._parent = undefined;
  1497. this._flags = 0;
  1498. this._fulfill = this._fulfill.bind(this);
  1499. this._reject = this._reject.bind(this);
  1500. this._resolve = this._resolve.bind(this);
  1501. this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
  1502. callback(this._fulfill, this._reject);
  1503. }
  1504. /**
  1505. * Setup handlers
  1506. * @template U, V=never
  1507. * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
  1508. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1509. * @returns {SpeedyPromise<U>}
  1510. */
  1511. then(onFulfillment, onRejection = null)
  1512. {
  1513. const child = new SpeedyPromise(this._nop);
  1514. child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
  1515. child._onRejection = typeof onRejection === 'function' && onRejection;
  1516. child._parent = this;
  1517. this[this._children++] = child; // attach child
  1518. this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
  1519. this._notify();
  1520. return child;
  1521. }
  1522. /**
  1523. * Setup rejection handler
  1524. * @template U, V=never
  1525. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1526. * @returns {SpeedyPromise<V>}
  1527. */
  1528. catch(onRejection)
  1529. {
  1530. return this.then(null, onRejection);
  1531. }
  1532. /**
  1533. * Execute a callback when the promise is settled
  1534. * (i.e., fulfilled or rejected)
  1535. * @param {function(): void} onFinally
  1536. * @returns {SpeedyPromise<T>}
  1537. */
  1538. finally(onFinally)
  1539. {
  1540. const fn = val => { onFinally(); return val; };
  1541. return this.then(fn, fn);
  1542. }
  1543. /**
  1544. * Start the computation immediately, synchronously.
  1545. * Can't afford to spend any time at all waiting for micro-tasks, etc.
  1546. * @returns {SpeedyPromise<T>} this
  1547. */
  1548. turbocharge()
  1549. {
  1550. let my = this;
  1551. // suspend the async behavior
  1552. this._flags |= SUSPEND_ASYNC;
  1553. while(my._parent !== undefined) {
  1554. my = my._parent;
  1555. my._flags |= SUSPEND_ASYNC;
  1556. }
  1557. // notify the children of the root
  1558. my._notify(); // will be synchronous
  1559. // return this SpeedyPromise
  1560. return this;
  1561. }
  1562. /**
  1563. * Convert to string
  1564. * @returns {string}
  1565. */
  1566. toString()
  1567. {
  1568. switch(this._state) {
  1569. case PENDING:
  1570. return `SpeedyPromise { <pending> }`;
  1571. case FULFILLED:
  1572. return `SpeedyPromise { <fulfilled> ${this._value} }`;
  1573. case REJECTED:
  1574. return `SpeedyPromise { <rejected> ${this._value} }`;
  1575. default:
  1576. return '';
  1577. }
  1578. }
  1579. /**
  1580. * Symbol.toStringTag
  1581. * @returns {string}
  1582. */
  1583. get [Symbol.toStringTag]()
  1584. {
  1585. return 'SpeedyPromise';
  1586. }
  1587. /**
  1588. * Creates a resolved SpeedyPromise
  1589. * @template U
  1590. * @param {U} [value]
  1591. * @returns {SpeedyPromise<U>}
  1592. */
  1593. static resolve(value)
  1594. {
  1595. const promise = new SpeedyPromise(this._snop);
  1596. if((typeof value === 'object' && value !== null && 'then' in value) || (typeof value === 'function' && 'then' in value)) {
  1597. // resolve asynchronously
  1598. promise._resolve(value);
  1599. }
  1600. else {
  1601. // fulfill synchronously
  1602. promise._value = value;
  1603. promise._state = FULFILLED;
  1604. }
  1605. return promise;
  1606. }
  1607. /**
  1608. * Creates a rejected SpeedyPromise
  1609. * @template U
  1610. * @param {Error} reason
  1611. * @returns {SpeedyPromise<U>}
  1612. */
  1613. static reject(reason)
  1614. {
  1615. const promise = new SpeedyPromise(this._snop);
  1616. promise._value = reason;
  1617. promise._state = REJECTED;
  1618. return promise;
  1619. }
  1620. /**
  1621. * Returns a SpeedyPromise that resolves to an array
  1622. * containing the results of the input promises/values,
  1623. * in their given order. The returned SpeedyPromise will
  1624. * resolve if all input promises resolve, or reject if
  1625. * any input promise rejects.
  1626. * @template U
  1627. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1628. * @returns {SpeedyPromise<U[]>}
  1629. *
  1630. * FIXME iterables need not be all <U>
  1631. */
  1632. static all(iterable)
  1633. {
  1634. return new SpeedyPromise((resolve, reject) => {
  1635. const input = [];
  1636. // get elements
  1637. for(const element of iterable)
  1638. input.push(element);
  1639. // resolve synchronously if there are no elements
  1640. const length = input.length;
  1641. if(length == 0) {
  1642. resolve([]);
  1643. return;
  1644. }
  1645. // resolve asynchronously
  1646. let counter = length;
  1647. const output = new Array(length);
  1648. const partialResolve = i => (val => { output[i] = val; if(0 == --counter) resolve(output); });
  1649. for(let i = 0; i < length; i++) {
  1650. const element = input[i];
  1651. if(element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype)
  1652. element.then(partialResolve(i), reject);
  1653. else
  1654. SpeedyPromise.resolve(element).then(partialResolve(i), reject);
  1655. }
  1656. });
  1657. }
  1658. /**
  1659. * Returns a promise that gets fulfilled or rejected as soon
  1660. * as the first promise in the iterable gets fulfilled or
  1661. * rejected (with its value/reason).
  1662. * @template U
  1663. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1664. * @returns {SpeedyPromise<U>}
  1665. */
  1666. static race(iterable)
  1667. {
  1668. return new SpeedyPromise((resolve, reject) => {
  1669. const input = [];
  1670. // get elements
  1671. for(const element of iterable)
  1672. input.push(element);
  1673. // if the iterable is empty, the promise
  1674. // will be pending forever...
  1675. // resolve asynchronously
  1676. const length = input.length;
  1677. for(let i = 0; i < length; i++) {
  1678. const element = input[i];
  1679. if(element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype)
  1680. element.then(resolve, reject);
  1681. else
  1682. SpeedyPromise.resolve(element).then(resolve, reject);
  1683. }
  1684. });
  1685. }
  1686. /**
  1687. * Fulfill this promise with a value
  1688. * @param {T} value
  1689. */
  1690. _fulfill(value)
  1691. {
  1692. this._setState(FULFILLED, value);
  1693. }
  1694. /**
  1695. * Reject this promise with a reason
  1696. * @param {Error} reason
  1697. */
  1698. _reject(reason)
  1699. {
  1700. this._setState(REJECTED, reason);
  1701. }
  1702. /**
  1703. * Set the state and the value of this promise
  1704. * @param {number} state
  1705. * @param {T|Error} value
  1706. */
  1707. _setState(state, value)
  1708. {
  1709. // the promise is already fulfilled or rejected
  1710. if(this._state != PENDING)
  1711. return;
  1712. // set the new state
  1713. this._state = state;
  1714. this._value = value;
  1715. this._notify();
  1716. }
  1717. /**
  1718. * Notify my children that this promise is no
  1719. * longer pending. This is an async operation:
  1720. * my childen will be notified "as soon
  1721. * as possible" (it will be scheduled).
  1722. * We may force this to be synchronous, though
  1723. */
  1724. _notify()
  1725. {
  1726. // nothing to do
  1727. if(this._state == PENDING)
  1728. return;
  1729. // have we turbocharged this promise?
  1730. if(this._flags & SUSPEND_ASYNC) {
  1731. this._broadcast(); // execute synchronously
  1732. return;
  1733. }
  1734. // install a timer (default behavior)
  1735. asap(this._broadcastIfAsync);
  1736. }
  1737. /**
  1738. * Helper method
  1739. */
  1740. _broadcastIfAsync()
  1741. {
  1742. // we may have installed a timer at some
  1743. // point, but turbocharged the promise later
  1744. if(!(this._flags & SUSPEND_ASYNC))
  1745. this._broadcast();
  1746. }
  1747. /**
  1748. * Tell my children that this promise
  1749. * is either fulfilled or rejected.
  1750. * This is a synchronous operation
  1751. */
  1752. _broadcast()
  1753. {
  1754. const children = this._children;
  1755. const state = this._state;
  1756. if(state === FULFILLED) {
  1757. for(let i = 0; i < children; i++) {
  1758. const child = this[i];
  1759. const callback = child._onFulfillment;
  1760. try {
  1761. if(callback) {
  1762. if(callback !== child._nop) {
  1763. child._resolve(callback(this._value)); // promise resolution procedure
  1764. child._onFulfillment = child._nop; // will not be called again
  1765. }
  1766. }
  1767. else
  1768. child._fulfill(this._value);
  1769. }
  1770. catch(e) {
  1771. child._reject(e);
  1772. }
  1773. }
  1774. }
  1775. else if(state === REJECTED) {
  1776. for(let i = 0; i < children; i++) {
  1777. const child = this[i];
  1778. const callback = child._onRejection;
  1779. try {
  1780. if(callback) {
  1781. if(callback !== child._nop) {
  1782. child._resolve(callback(this._value)); // promise resolution procedure
  1783. child._onRejection = child._nop; // will not be called again
  1784. }
  1785. }
  1786. else
  1787. child._reject(this._value);
  1788. }
  1789. catch(e) {
  1790. child._reject(e);
  1791. }
  1792. }
  1793. }
  1794. }
  1795. /**
  1796. * Promise Resolution Procedure
  1797. * based on the Promises/A+ spec
  1798. * @param {T} x
  1799. */
  1800. _resolve(x)
  1801. {
  1802. if((typeof x !== 'object' && typeof x !== 'function') || (x === null)) { // if(x !== Object(x))
  1803. this._fulfill(x);
  1804. return;
  1805. }
  1806. if(x === this)
  1807. throw new TypeError(); // Circular reference
  1808. if(x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
  1809. x.then(this._resolve, this._reject);
  1810. return;
  1811. }
  1812. try {
  1813. const then = x.then;
  1814. if(typeof then === 'function') {
  1815. let resolve = this._resolve, reject = this._reject;
  1816. try {
  1817. then.call(x,
  1818. y => { resolve(y); resolve = reject = this._nop; },
  1819. r => { reject(r); resolve = reject = this._nop; }
  1820. );
  1821. }
  1822. catch(e) {
  1823. if(resolve !== this._nop && reject !== this._nop)
  1824. this._reject(e);
  1825. }
  1826. }
  1827. else {
  1828. this._fulfill(x);
  1829. }
  1830. }
  1831. catch(e) {
  1832. this._reject(e);
  1833. }
  1834. }
  1835. /**
  1836. * No-operation
  1837. */
  1838. _nop()
  1839. {
  1840. }
  1841. /**
  1842. * Static no-operation
  1843. */
  1844. static _snop()
  1845. {
  1846. }
  1847. }
  1848. //module.exports = { SpeedyPromise };
  1849. /*
  1850. // Uncomment to test performance with regular Promises
  1851. module.exports = { SpeedyPromise: Promise };
  1852. Promise.prototype.turbocharge = function() { return this };
  1853. */
  1854. /***/ }),
  1855. /***/ 3112:
  1856. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_66386__) => {
  1857. "use strict";
  1858. // EXPORTS
  1859. __nested_webpack_require_66386__.d(__nested_webpack_exports__, {
  1860. gx: () => (/* binding */ createShader),
  1861. bf: () => (/* binding */ importShader)
  1862. });
  1863. // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
  1864. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  1865. var speedy_gl = __nested_webpack_require_66386__(1567);
  1866. // EXTERNAL MODULE: ./src/utils/utils.js
  1867. var utils = __nested_webpack_require_66386__(2191);
  1868. // EXTERNAL MODULE: ./src/utils/types.js
  1869. var types = __nested_webpack_require_66386__(6467);
  1870. // EXTERNAL MODULE: ./src/utils/errors.js
  1871. var errors = __nested_webpack_require_66386__(5619);
  1872. ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
  1873. /*
  1874. * speedy-vision.js
  1875. * GPU-accelerated Computer Vision for JavaScript
  1876. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1877. *
  1878. * Licensed under the Apache License, Version 2.0 (the "License");
  1879. * you may not use this file except in compliance with the License.
  1880. * You may obtain a copy of the License at
  1881. *
  1882. * http://www.apache.org/licenses/LICENSE-2.0
  1883. *
  1884. * Unless required by applicable law or agreed to in writing, software
  1885. * distributed under the License is distributed on an "AS IS" BASIS,
  1886. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1887. * See the License for the specific language governing permissions and
  1888. * limitations under the License.
  1889. *
  1890. * shader-preprocessor.js
  1891. * Custom preprocessor for shaders
  1892. */
  1893. /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
  1894. /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
  1895. // Import numeric globals
  1896. const globals = __nested_webpack_require_66386__(1814);
  1897. const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */ (
  1898. Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce(
  1899. (obj, key) => ((obj[key] = globals[key]), obj),
  1900. {}
  1901. )
  1902. );
  1903. /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
  1904. const basicConstants = Object.freeze({
  1905. // numeric globals
  1906. ...numericGlobals,
  1907. // fragment shader
  1908. 'FS_USE_CUSTOM_PRECISION': 0, // use default precision settings
  1909. 'FS_OUTPUT_TYPE': 0, // normalized RGBA
  1910. // colors
  1911. 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
  1912. 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
  1913. 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
  1914. 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA,
  1915. });
  1916. /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
  1917. const platformConstants = (platform, glRenderer) => Object.freeze({
  1918. 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0, // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
  1919. 'APPLE_GPU': /Apple/.test(glRenderer) | 0, // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
  1920. 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0, // Intel[(R)] ... [HD] Graphics xyz ...
  1921. });
  1922. // Regular Expressions
  1923. const commentsRegex = [ /\/\*(.|\s)*?\*\//g , /\/\/.*$/gm ];
  1924. const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
  1925. const constantRegex = /@(\w+)@/g;
  1926. const unrollRegex = [
  1927. /@\s*unroll\s+?for\s*\(\s*(int|)\s*(?<counter>\w+)\s*=\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*(<=?)\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g,
  1928. /@\s*unroll\s+?for\s*\(\s*(int|)\s*(?<counter>\w+)\s*=\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*(<=?)\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*\+=\s*(-?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g,
  1929. ];
  1930. /**
  1931. * Custom preprocessor for the shaders
  1932. */
  1933. class ShaderPreprocessor
  1934. {
  1935. /**
  1936. * Runs the preprocessor and generates GLSL code
  1937. * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
  1938. * @param {string} infix annotated GLSL code
  1939. * @param {string} [prefix]
  1940. * @param {string} [suffix]
  1941. * @returns {string} preprocessed GLSL code
  1942. */
  1943. static generateGLSL(defines, infix, prefix = null, suffix = null)
  1944. {
  1945. //
  1946. // The preprocessor will remove comments from GLSL code,
  1947. // include requested GLSL files and import global constants
  1948. // defined for all shaders (see above)
  1949. //
  1950. const errors = []; // compile-time errors
  1951. const constants = generateConstants(defines);
  1952. const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
  1953. return unrollLoops(
  1954. annotatedGLSL
  1955. .replace(commentsRegex[0], '')
  1956. .replace(commentsRegex[1], '')
  1957. .replace(constantRegex, (_, name) => String(
  1958. // Replace preprocessor @CONSTANTS@ by their numeric values
  1959. constants.has(name) ? Number(constants.get(name)) : (
  1960. errors.push(`Undefined constant ${name}`), 0
  1961. )
  1962. ))
  1963. .replace(includeRegex, (_, filename) =>
  1964. // Included files may include other files.
  1965. // XXX no cycle detection!
  1966. ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))
  1967. ),
  1968. defines
  1969. ) + errors.map(msg => `\n#error ${msg}\n`).join('');
  1970. }
  1971. }
  1972. /**
  1973. * Generate GLSL code based on the input arguments
  1974. * @param {ShaderPreprocessorConstants} defines
  1975. * @param {string} infix
  1976. * @param {string} [prefix]
  1977. * @param {string} [suffix]
  1978. * @returns {string} GLSL code
  1979. */
  1980. function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null)
  1981. {
  1982. const parts = [];
  1983. if(prefix !== null)
  1984. parts.push(prefix);
  1985. for(const [key, value] of defines)
  1986. parts.push(`#define ${key} ${Number(value)}`);
  1987. parts.push(infix);
  1988. if(suffix !== null)
  1989. parts.push(suffix);
  1990. return parts.join('\n');
  1991. }
  1992. /**
  1993. * Generate pre-processor constants. Constants provided by the
  1994. * user have higher priority than globally available constants.
  1995. * @param {ShaderPreprocessorConstants} defines user-provided
  1996. * @returns {ShaderPreprocessorConstants}
  1997. */
  1998. function generateConstants(defines)
  1999. {
  2000. utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
  2001. const myConstants = /** @type {ShaderPreprocessorConstants} */ ( new Map() );
  2002. const globalConstants = Object.assign(Object.create(null),
  2003. basicConstants,
  2004. platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer)
  2005. );
  2006. // globally available constants have lower priority
  2007. for(const key in globalConstants) {
  2008. //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
  2009. myConstants.set(key, globalConstants[key]);
  2010. }
  2011. // user-defined constants have higher priority
  2012. for(const [key, value] of defines)
  2013. myConstants.set(key, value);
  2014. // done!
  2015. return myConstants;
  2016. }
  2017. /**
  2018. * Reads a shader from the shaders/include/ folder
  2019. * @param {string} filename
  2020. * @returns {string}
  2021. */
  2022. function readfileSync(filename)
  2023. {
  2024. if(String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/))
  2025. return __nested_webpack_require_66386__(5235)("./" + filename);
  2026. throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
  2027. }
  2028. /**
  2029. * Unroll for loops in our own preprocessor
  2030. * @param {string} code
  2031. * @param {ShaderPreprocessorConstants} defines
  2032. * @returns {string}
  2033. */
  2034. function unrollLoops(code, defines)
  2035. {
  2036. //
  2037. // Currently, only integer for loops with positive step values
  2038. // can be unrolled. (TODO: negative step values?)
  2039. //
  2040. // The current implementation does not support curly braces
  2041. // inside unrolled loops. You may define macros to get around
  2042. // this, but do you actually need to unroll such loops?
  2043. //
  2044. // Loops that don't fit the supported pattern will crash
  2045. // the preprocessor if you try to unroll them.
  2046. //
  2047. const fn = unroll.bind(defines); // CRAZY!
  2048. const n = unrollRegex.length;
  2049. for(let i = 0; i < n; i++)
  2050. code = code.replace(unrollRegex[i], fn);
  2051. return code;
  2052. }
  2053. /**
  2054. * Unroll a loop pattern (regexp)
  2055. * @param {string} match the matched for loop
  2056. * @param {string} type
  2057. * @param {string} counter
  2058. * @param {string} start
  2059. * @param {string} cmp
  2060. * @param {string} end
  2061. * @param {string} step
  2062. * @param {string} loopcode
  2063. * @returns {string} unrolled loop
  2064. */
  2065. function unroll(match, type, counter, start, cmp, end, step, loopcode)
  2066. {
  2067. const defines = /** @type {ShaderPreprocessorConstants} */ ( this );
  2068. // check if the loop limits are numeric constants or #defined numbers from the outside
  2069. const hasStart = Number.isFinite(+start) || defines.has(start);
  2070. const hasEnd = Number.isFinite(+end) || defines.has(end);
  2071. if(!hasStart || !hasEnd) {
  2072. if(defines.size > 0)
  2073. throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);
  2074. else
  2075. return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
  2076. }
  2077. // parse and validate limits & step
  2078. let istart = defines.has(start) ? defines.get(start) : parseInt(start);
  2079. let iend = defines.has(end) ? defines.get(end) : parseInt(end);
  2080. let istep = (step.length == 0) ? 1 : parseInt(step);
  2081. utils/* Utils */.A.assert(istart <= iend && istep > 0);
  2082. /*
  2083. // debug
  2084. console.log(`Encontrei "${match}"`);
  2085. console.log(`type="${type}"`);
  2086. console.log(`counter="${counter}"`);
  2087. console.log(`start="${start}"`);
  2088. console.log(`cmp="${cmp}"`);
  2089. console.log(`end="${end}"`);
  2090. console.log(`step="${step}"`);
  2091. console.log(`loopcode="${loopcode}"`)
  2092. console.log('Defines:', defines);
  2093. */
  2094. // continue statements are not supported inside unrolled loops
  2095. // and will generate a compiler error. Using break is ok.
  2096. const hasBreak = (loopcode.match(/\bbreak\s*;/) !== null);
  2097. // create a new scope
  2098. let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
  2099. // declare counter
  2100. unrolledCode += `${type} ${counter};\n`;
  2101. // unroll loop
  2102. iend += (cmp == '<=') ? 1 : 0;
  2103. for(let i = istart; i < iend; i += istep)
  2104. unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
  2105. // close scope
  2106. unrolledCode += '}\n';
  2107. //console.log('Unrolled code:\n\n' + unrolledCode);
  2108. // done!
  2109. return unrolledCode;
  2110. }
  2111. ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
  2112. /*
  2113. * speedy-vision.js
  2114. * GPU-accelerated Computer Vision for JavaScript
  2115. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2116. *
  2117. * Licensed under the Apache License, Version 2.0 (the "License");
  2118. * you may not use this file except in compliance with the License.
  2119. * You may obtain a copy of the License at
  2120. *
  2121. * http://www.apache.org/licenses/LICENSE-2.0
  2122. *
  2123. * Unless required by applicable law or agreed to in writing, software
  2124. * distributed under the License is distributed on an "AS IS" BASIS,
  2125. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2126. * See the License for the specific language governing permissions and
  2127. * limitations under the License.
  2128. *
  2129. * shader-declaration.js
  2130. * Encapsulates a shader declaration
  2131. */
  2132. const DEFAULT_ATTRIBUTES = Object.freeze({
  2133. position: 'a_position',
  2134. texCoord: 'a_texCoord'
  2135. });
  2136. const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
  2137. position: 0, // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  2138. texCoord: 1,
  2139. });
  2140. const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
  2141. precision highp float;
  2142. precision highp int;
  2143. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
  2144. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
  2145. out highp vec2 texCoord;
  2146. uniform highp vec2 texSize;
  2147. #define vsinit() \
  2148. gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
  2149. texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
  2150. \n\n`;
  2151. const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
  2152. const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
  2153. const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
  2154. #if @FS_USE_CUSTOM_PRECISION@ == 0
  2155. precision mediump float; // ~float16
  2156. precision mediump sampler2D;
  2157. precision highp int; // int32
  2158. #endif
  2159. #if @FS_OUTPUT_TYPE@ == 0
  2160. #define OUT_TYPE mediump vec4
  2161. #elif @FS_OUTPUT_TYPE@ == 1
  2162. #define OUT_TYPE mediump ivec4
  2163. #elif @FS_OUTPUT_TYPE@ == 2
  2164. #define OUT_TYPE mediump uvec4
  2165. #else
  2166. #error Unknown FS_OUTPUT_TYPE
  2167. #endif
  2168. out OUT_TYPE color;
  2169. in highp vec2 texCoord;
  2170. uniform highp vec2 texSize;
  2171. @include "global.glsl"\n\n`;
  2172. const PRIVATE_TOKEN = Symbol();
  2173. /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
  2174. /** @typedef {string[]} ShaderDeclarationArgumentList */
  2175. /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
  2176. /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
  2177. /**
  2178. * Shader Declaration
  2179. * @abstract
  2180. */
  2181. class ShaderDeclaration
  2182. {
  2183. /**
  2184. * @private Constructor
  2185. * @param {Symbol} privateToken
  2186. * @param {ShaderDeclarationArgumentList} argumentList
  2187. * @param {ShaderDeclarationPreprocessorConstants} defines
  2188. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2189. * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
  2190. */
  2191. constructor(privateToken, argumentList, defines, fsSource, vsSource)
  2192. {
  2193. // private constructor!
  2194. if(privateToken !== PRIVATE_TOKEN)
  2195. throw new errors/* IllegalOperationError */.Er();
  2196. /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
  2197. this._arguments = [...argumentList];
  2198. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2199. this._defines = new Map(defines);
  2200. /** @type {string} preprocessed source code of the fragment shader */
  2201. this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
  2202. /** @type {string} preprocessed source code of the vertex shader */
  2203. this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
  2204. /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
  2205. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2206. // validate arguments
  2207. this._validateArguments(this._arguments, this._uniforms);
  2208. }
  2209. /**
  2210. * Return the preprocessed GLSL source code of the fragment shader
  2211. * @returns {string}
  2212. */
  2213. get fragmentSource()
  2214. {
  2215. return this._fragmentSource;
  2216. }
  2217. /**
  2218. * Return the preprocessed GLSL source code of the vertex shader
  2219. * @returns {string}
  2220. */
  2221. get vertexSource()
  2222. {
  2223. return this._vertexSource;
  2224. }
  2225. /**
  2226. * Get the names of the vertex shader attributes
  2227. * @returns {typeof DEFAULT_ATTRIBUTES}
  2228. */
  2229. get attributes()
  2230. {
  2231. return DEFAULT_ATTRIBUTES;
  2232. }
  2233. /**
  2234. * Get the pre-defined locations of the vertex shader attributes
  2235. * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
  2236. */
  2237. get locationOfAttributes()
  2238. {
  2239. return DEFAULT_ATTRIBUTES_LOCATION;
  2240. }
  2241. /**
  2242. * Names of the arguments that will be passed to the Shader,
  2243. * corresponding to GLSL uniforms, in the order they will be passed
  2244. * @returns {string[]}
  2245. */
  2246. get arguments()
  2247. {
  2248. return [].concat(this._arguments);
  2249. }
  2250. /**
  2251. * Names of the uniforms declared in the shader
  2252. * @returns {string[]}
  2253. */
  2254. get uniforms()
  2255. {
  2256. return Array.from(this._uniforms.keys());
  2257. }
  2258. /**
  2259. * The GLSL type of a uniform variable declared in the shader
  2260. * @param {string} name
  2261. * @returns {string}
  2262. */
  2263. uniformType(name)
  2264. {
  2265. if(!this._uniforms.has(name))
  2266. throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
  2267. return this._uniforms.get(name);
  2268. }
  2269. /**
  2270. * The value of an externally defined constant, i.e., via withDefines()
  2271. * @param {string} name
  2272. * @returns {number}
  2273. */
  2274. definedConstant(name)
  2275. {
  2276. if(!this._defines.has(name))
  2277. throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
  2278. return this._defines.get(name);
  2279. }
  2280. /**
  2281. * Parses a GLSL source and detects the uniform variables,
  2282. * as well as their types
  2283. * @param {string} preprocessedSource
  2284. * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
  2285. */
  2286. _autodetectUniforms(preprocessedSource)
  2287. {
  2288. const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
  2289. const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
  2290. const uniforms = /** @type {ShaderDeclarationUniformTypes} */ ( new Map() );
  2291. let match;
  2292. while((match = regex.exec(sourceWithoutComments)) !== null) {
  2293. const type = match[2];
  2294. const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
  2295. for(const name of names) {
  2296. if(name.endsWith(']')) {
  2297. // is it an array?
  2298. if(!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/)))
  2299. throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
  2300. // read array name & size
  2301. const [ array, size ] = [ match[1], Number(match[2]) ];
  2302. // register uniforms
  2303. for(let i = 0; i < size; i++)
  2304. uniforms.set(`${array}[${i}]`, type);
  2305. }
  2306. else {
  2307. // register a regular uniform
  2308. if(!uniforms.has(name) || uniforms.get(name) === type)
  2309. uniforms.set(name, type);
  2310. else
  2311. throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
  2312. }
  2313. }
  2314. }
  2315. return uniforms;
  2316. }
  2317. /**
  2318. * Checks if all the arguments of the shader declaration are backed by a
  2319. * uniform variable in GLSL code
  2320. * @param {ShaderDeclarationArgumentList} argumentList
  2321. * @param {ShaderDeclarationUniformTypes} uniforms
  2322. * @throws {IllegalArgumentError}
  2323. */
  2324. _validateArguments(argumentList, uniforms)
  2325. {
  2326. for(const argname of argumentList) {
  2327. if(!uniforms.has(argname)) {
  2328. if(!uniforms.has(argname + '[0]'))
  2329. throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
  2330. }
  2331. }
  2332. }
  2333. }
  2334. /**
  2335. * A ShaderDeclaration that has its GLSL code stored in-memory
  2336. */
  2337. class MemoryShaderDeclaration extends ShaderDeclaration
  2338. {
  2339. /**
  2340. * @private Constructor
  2341. * @param {Symbol} privateToken
  2342. * @param {ShaderDeclarationArgumentList} argumentList
  2343. * @param {ShaderDeclarationPreprocessorConstants} defines
  2344. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2345. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
  2346. */
  2347. constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER)
  2348. {
  2349. super(privateToken, argumentList, defines, fsSource, vsSource);
  2350. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
  2351. this._fsUnprocessedSource = String(fsSource);
  2352. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
  2353. this._vsUnprocessedSource = String(vsSource);
  2354. }
  2355. }
  2356. /**
  2357. * A ShaderDeclaration that has its GLSL code stored in a file
  2358. */
  2359. class FileShaderDeclaration extends ShaderDeclaration
  2360. {
  2361. /**
  2362. * @private Constructor
  2363. * @param {Symbol} privateToken
  2364. * @param {ShaderDeclarationArgumentList} argumentList
  2365. * @param {ShaderDeclarationPreprocessorConstants} defines
  2366. * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
  2367. * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
  2368. */
  2369. constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '')
  2370. {
  2371. // validate paths
  2372. if(!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/))
  2373. throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);
  2374. else if(vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/))
  2375. throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
  2376. // import files
  2377. const fsSource = __nested_webpack_require_66386__(4606)("./" + String(fsFilepath));
  2378. const vsSource = vsFilepath != '' ? __nested_webpack_require_66386__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
  2379. // super class
  2380. super(privateToken, argumentList, defines, fsSource, vsSource);
  2381. /** @type {string} filepath of the fragment shader */
  2382. this._fsFilepath = String(fsFilepath);
  2383. /** @type {string} filepath of the vertex shader */
  2384. this._vsFilepath = String(vsFilepath);
  2385. }
  2386. }
  2387. /**
  2388. * A builder of a ShaderDeclaration
  2389. * @abstract
  2390. */
  2391. class ShaderDeclarationBuilder
  2392. {
  2393. /**
  2394. * @private Constructor
  2395. * @param {Symbol} privateToken
  2396. */
  2397. constructor(privateToken)
  2398. {
  2399. if(privateToken !== PRIVATE_TOKEN)
  2400. throw new errors/* IllegalOperationError */.Er(); // private constructor!
  2401. /** @type {string[]} ordered list of uniform names */
  2402. this._arguments = [];
  2403. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2404. this._defines = new Map();
  2405. }
  2406. /**
  2407. * Specify the list & order of arguments to be
  2408. * passed to the shader
  2409. * @param {string[]} args argument names
  2410. * @returns {this}
  2411. */
  2412. withArguments(...args)
  2413. {
  2414. // the list of arguments may be declared only once
  2415. if(this._arguments.length > 0)
  2416. throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
  2417. // get arguments
  2418. for(let j = 0; j < args.length; j++)
  2419. this._arguments.push(String(args[j]));
  2420. // done!
  2421. return this;
  2422. }
  2423. /**
  2424. * Specify a set of #defines to be prepended to the shader
  2425. * @param {Object<string,number>} defines key-value pairs
  2426. * @returns {this}
  2427. */
  2428. withDefines(defines)
  2429. {
  2430. // the list of #defines may be defined only once
  2431. if(this._defines.size > 0)
  2432. throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
  2433. // store and write the #defines
  2434. const keys = Object.keys(defines);
  2435. for(const key of keys) {
  2436. const value = Number(defines[key]); // force numeric values (just in case)
  2437. this._defines.set(key, value);
  2438. }
  2439. // done!
  2440. return this;
  2441. }
  2442. /**
  2443. * Build a ShaderDeclaration
  2444. * @returns {ShaderDeclaration}
  2445. */
  2446. build()
  2447. {
  2448. throw new errors/* AbstractMethodError */.aQ();
  2449. }
  2450. }
  2451. /**
  2452. * A builder of a MemoryShaderDeclaration
  2453. */
  2454. class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder
  2455. {
  2456. /**
  2457. * @private Constructor
  2458. * @param {Symbol} privateToken
  2459. * @param {ShaderDeclarationUnprocessedGLSL} fsSource
  2460. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
  2461. */
  2462. constructor(privateToken, fsSource, vsSource)
  2463. {
  2464. super(privateToken);
  2465. /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
  2466. this._fsSource = String(fsSource);
  2467. /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
  2468. this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
  2469. }
  2470. /**
  2471. * Build a MemoryShaderDeclaration
  2472. * @returns {ShaderDeclaration}
  2473. */
  2474. build()
  2475. {
  2476. return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
  2477. }
  2478. }
  2479. /**
  2480. * A builder of a FileShaderDeclaration
  2481. */
  2482. class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder
  2483. {
  2484. /**
  2485. * @private Constructor
  2486. * @param {Symbol} privateToken
  2487. * @param {string} fsFilepath
  2488. * @param {string} [vsFilepath]
  2489. */
  2490. constructor(privateToken, fsFilepath, vsFilepath)
  2491. {
  2492. super(privateToken);
  2493. /** @type {string} path to the unprocessed GLSL code of the fragment shader */
  2494. this._fsFilepath = String(fsFilepath);
  2495. /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
  2496. this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
  2497. }
  2498. /**
  2499. * Build a FileShaderDeclaration
  2500. * @returns {ShaderDeclaration}
  2501. */
  2502. build()
  2503. {
  2504. return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
  2505. }
  2506. }
  2507. /**
  2508. * Import a ShaderDeclaration from a GLSL file
  2509. * @param {string} filepath relative to the shaders/ folder (a .glsl file)
  2510. * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
  2511. * @returns {ShaderDeclaration}
  2512. */
  2513. function importShader(filepath, vsfilepath = undefined)
  2514. {
  2515. return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
  2516. }
  2517. /**
  2518. * Create a ShaderDeclaration from a GLSL source code
  2519. * @param {string} source fragment shader
  2520. * @param {string} [vssource] optional vertex shader
  2521. * @returns {ShaderDeclaration}
  2522. */
  2523. function createShader(source, vssource = undefined)
  2524. {
  2525. return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
  2526. }
  2527. /***/ }),
  2528. /***/ 5282:
  2529. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_92836__) => {
  2530. "use strict";
  2531. __nested_webpack_require_92836__.r(__nested_webpack_exports__);
  2532. /* harmony export */ __nested_webpack_require_92836__.d(__nested_webpack_exports__, {
  2533. /* harmony export */ conv2D: () => (/* binding */ conv2D),
  2534. /* harmony export */ convX: () => (/* binding */ convX),
  2535. /* harmony export */ convY: () => (/* binding */ convY)
  2536. /* harmony export */ });
  2537. /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_92836__(3112);
  2538. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_92836__(2191);
  2539. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_92836__(5619);
  2540. /*
  2541. * speedy-vision.js
  2542. * GPU-accelerated Computer Vision for JavaScript
  2543. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2544. *
  2545. * Licensed under the Apache License, Version 2.0 (the "License");
  2546. * you may not use this file except in compliance with the License.
  2547. * You may obtain a copy of the License at
  2548. *
  2549. * http://www.apache.org/licenses/LICENSE-2.0
  2550. *
  2551. * Unless required by applicable law or agreed to in writing, software
  2552. * distributed under the License is distributed on an "AS IS" BASIS,
  2553. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2554. * See the License for the specific language governing permissions and
  2555. * limitations under the License.
  2556. *
  2557. * convolution.js
  2558. * Convolution shader generators
  2559. */
  2560. /**
  2561. * Generate a 2D convolution with a square kernel
  2562. * @param {number[]} kernel convolution kernel
  2563. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2564. * @returns {ShaderDeclarationBuilder}
  2565. */
  2566. function conv2D(kernel, normalizationConstant = 1.0)
  2567. {
  2568. const kernel32 = new Float32Array(kernel.map(x => (+x) * (+normalizationConstant)));
  2569. const kSize = Math.sqrt(kernel32.length) | 0;
  2570. const N = kSize >> 1; // idiv 2
  2571. // validate input
  2572. if(kSize < 1 || kSize % 2 == 0)
  2573. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);
  2574. else if(kSize * kSize != kernel32.length)
  2575. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
  2576. // select the appropriate pixel function
  2577. const pixelAtOffset = (N <= 7) ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2578. // code generator
  2579. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(
  2580. cur => fn(
  2581. kernel32[(cur[0] + N) * kSize + (cur[1] + N)],
  2582. cur[0], cur[1]
  2583. )
  2584. ).join('\n');
  2585. const generateCode = (k, dy, dx) => `
  2586. result += ${pixelAtOffset}(image, ivec2(${(-dx) | 0}, ${(-dy) | 0})) * float(${+k});
  2587. `;
  2588. // shader
  2589. const source = `
  2590. uniform sampler2D image;
  2591. void main()
  2592. {
  2593. float alpha = threadPixel(image).a;
  2594. vec4 result = vec4(0.0f);
  2595. ${foreachKernelElement(generateCode)}
  2596. color = vec4(result.rgb, alpha);
  2597. }
  2598. `;
  2599. // done!
  2600. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2601. }
  2602. /**
  2603. * Generate a 1D convolution function on the x-axis
  2604. * @param {number[]} kernel convolution kernel
  2605. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2606. * @returns {ShaderDeclarationBuilder}
  2607. */
  2608. function convX(kernel, normalizationConstant = 1.0)
  2609. {
  2610. return conv1D('x', kernel, normalizationConstant);
  2611. }
  2612. /**
  2613. * Generate a 1D convolution function on the y-axis
  2614. * @param {number[]} kernel convolution kernel
  2615. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2616. * @returns {ShaderDeclarationBuilder}
  2617. */
  2618. function convY(kernel, normalizationConstant = 1.0)
  2619. {
  2620. return conv1D('y', kernel, normalizationConstant);
  2621. }
  2622. /**
  2623. * 1D convolution function generator
  2624. * @param {string} axis either "x" or "y"
  2625. * @param {number[]} kernel convolution kernel
  2626. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2627. * @returns {ShaderDeclarationBuilder}
  2628. */
  2629. function conv1D(axis, kernel, normalizationConstant = 1.0)
  2630. {
  2631. const kernel32 = new Float32Array(kernel.map(x => (+x) * (+normalizationConstant)));
  2632. const kSize = kernel32.length;
  2633. const N = kSize >> 1; // idiv 2
  2634. // validate input
  2635. if(kSize < 1 || kSize % 2 == 0)
  2636. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);
  2637. else if(axis != 'x' && axis != 'y')
  2638. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
  2639. // select the appropriate pixel function
  2640. const pixelAtOffset = (N <= 7) ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2641. // code generator
  2642. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce(
  2643. (acc, cur) => acc + fn(kernel32[cur + N], cur),
  2644. '');
  2645. const generateCode = (k, i) => ((axis == 'x') ? `
  2646. pixel += ${pixelAtOffset}(image, ivec2(${(-i) | 0}, 0)) * float(${+k});
  2647. ` : `
  2648. pixel += ${pixelAtOffset}(image, ivec2(0, ${(-i) | 0})) * float(${+k});
  2649. `);
  2650. // shader
  2651. const source = `
  2652. uniform sampler2D image;
  2653. void main()
  2654. {
  2655. float alpha = threadPixel(image).a;
  2656. vec4 pixel = vec4(0.0f);
  2657. ${foreachKernelElement(generateCode)}
  2658. color = vec4(pixel.rgb, alpha);
  2659. }
  2660. `;
  2661. // done!
  2662. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2663. }
  2664. /***/ }),
  2665. /***/ 5235:
  2666. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_98884__) => {
  2667. var map = {
  2668. "./colors.glsl": 8609,
  2669. "./filters.glsl": 4672,
  2670. "./fixed-point.glsl": 9778,
  2671. "./float16.glsl": 8710,
  2672. "./global.glsl": 2434,
  2673. "./int32.glsl": 439,
  2674. "./keypoint-descriptors.glsl": 8545,
  2675. "./keypoint-matches.glsl": 6762,
  2676. "./keypoints.glsl": 7639,
  2677. "./math.glsl": 431,
  2678. "./platform.glsl": 6822,
  2679. "./pyramids.glsl": 2728,
  2680. "./subpixel.glsl": 6823
  2681. };
  2682. function webpackContext(req) {
  2683. var id = webpackContextResolve(req);
  2684. return __nested_webpack_require_98884__(id);
  2685. }
  2686. function webpackContextResolve(req) {
  2687. if(!__nested_webpack_require_98884__.o(map, req)) {
  2688. var e = new Error("Cannot find module '" + req + "'");
  2689. e.code = 'MODULE_NOT_FOUND';
  2690. throw e;
  2691. }
  2692. return map[req];
  2693. }
  2694. webpackContext.keys = function webpackContextKeys() {
  2695. return Object.keys(map);
  2696. };
  2697. webpackContext.resolve = webpackContextResolve;
  2698. module.exports = webpackContext;
  2699. webpackContext.id = 5235;
  2700. /***/ }),
  2701. /***/ 4606:
  2702. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_99834__) => {
  2703. var map = {
  2704. "./filters/convolution": 5282,
  2705. "./filters/convolution.js": 5282,
  2706. "./filters/convolution1d.glsl": 8211,
  2707. "./filters/convolution2d.glsl": 7360,
  2708. "./filters/fast-median.glsl": 8191,
  2709. "./filters/nightvision.glsl": 4438,
  2710. "./filters/normalize-image.glsl": 5867,
  2711. "./filters/rgb2grey.glsl": 9252,
  2712. "./include/colors.glsl": 8609,
  2713. "./include/filters.glsl": 4672,
  2714. "./include/fixed-point.glsl": 9778,
  2715. "./include/float16.glsl": 8710,
  2716. "./include/global.glsl": 2434,
  2717. "./include/int32.glsl": 439,
  2718. "./include/keypoint-descriptors.glsl": 8545,
  2719. "./include/keypoint-matches.glsl": 6762,
  2720. "./include/keypoints.glsl": 7639,
  2721. "./include/math.glsl": 431,
  2722. "./include/platform.glsl": 6822,
  2723. "./include/pyramids.glsl": 2728,
  2724. "./include/subpixel.glsl": 6823,
  2725. "./keypoints/allocate-descriptors.glsl": 1341,
  2726. "./keypoints/allocate-extra.glsl": 7833,
  2727. "./keypoints/apply-homography.glsl": 2352,
  2728. "./keypoints/bf-knn.glsl": 7541,
  2729. "./keypoints/clip-border.glsl": 4868,
  2730. "./keypoints/clip.glsl": 5591,
  2731. "./keypoints/distance-filter.glsl": 191,
  2732. "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
  2733. "./keypoints/encode-keypoint-offsets.glsl": 336,
  2734. "./keypoints/encode-keypoint-positions.glsl": 8968,
  2735. "./keypoints/encode-keypoint-properties.glsl": 1733,
  2736. "./keypoints/encode-keypoints.glsl": 9674,
  2737. "./keypoints/encode-null-keypoints.glsl": 2090,
  2738. "./keypoints/fast.glsl": 1855,
  2739. "./keypoints/fast.vs.glsl": 4824,
  2740. "./keypoints/hamming-distance-filter.glsl": 2381,
  2741. "./keypoints/harris-cutoff.glsl": 6060,
  2742. "./keypoints/harris.glsl": 9974,
  2743. "./keypoints/knn-init.glsl": 3047,
  2744. "./keypoints/knn-transfer.glsl": 3266,
  2745. "./keypoints/laplacian.glsl": 8018,
  2746. "./keypoints/lk.glsl": 3168,
  2747. "./keypoints/lookup-of-locations.glsl": 3890,
  2748. "./keypoints/lookup-of-locations.vs.glsl": 8647,
  2749. "./keypoints/lsh-knn.glsl": 4776,
  2750. "./keypoints/mix-keypoints.glsl": 2648,
  2751. "./keypoints/nonmax-scale.glsl": 8825,
  2752. "./keypoints/nonmax-space.glsl": 5693,
  2753. "./keypoints/nonmax-suppression.glsl": 9280,
  2754. "./keypoints/orb-descriptor.glsl": 9108,
  2755. "./keypoints/orb-orientation.glsl": 7137,
  2756. "./keypoints/refine-scale.glsl": 9739,
  2757. "./keypoints/score-findmax.glsl": 8231,
  2758. "./keypoints/shuffle.glsl": 2518,
  2759. "./keypoints/sort-keypoints.glsl": 8096,
  2760. "./keypoints/subpixel-refinement.glsl": 5795,
  2761. "./keypoints/transfer-flow.glsl": 3169,
  2762. "./keypoints/transfer-orientation.glsl": 1337,
  2763. "./keypoints/transfer-to-extra.glsl": 6187,
  2764. "./keypoints/upload-keypoints.glsl": 477,
  2765. "./pyramids/downsample2.glsl": 4050,
  2766. "./pyramids/upsample2.glsl": 5545,
  2767. "./transforms/additive-mix.glsl": 7113,
  2768. "./transforms/resize.glsl": 1202,
  2769. "./transforms/warp-perspective.glsl": 7971,
  2770. "./utils/copy-components.glsl": 6122,
  2771. "./utils/copy-raster.glsl": 371,
  2772. "./utils/copy.glsl": 7307,
  2773. "./utils/fill-components.glsl": 8614,
  2774. "./utils/fill.glsl": 6271,
  2775. "./utils/flip-y.vs.glsl": 3016,
  2776. "./utils/scan-minmax2d.glsl": 3630,
  2777. "./utils/sobel-derivatives.glsl": 8508,
  2778. "./utils/sobel-derivatives.vs.glsl": 8073
  2779. };
  2780. function webpackContext(req) {
  2781. var id = webpackContextResolve(req);
  2782. return __nested_webpack_require_99834__(id);
  2783. }
  2784. function webpackContextResolve(req) {
  2785. if(!__nested_webpack_require_99834__.o(map, req)) {
  2786. var e = new Error("Cannot find module '" + req + "'");
  2787. e.code = 'MODULE_NOT_FOUND';
  2788. throw e;
  2789. }
  2790. return map[req];
  2791. }
  2792. webpackContext.keys = function webpackContextKeys() {
  2793. return Object.keys(map);
  2794. };
  2795. webpackContext.resolve = webpackContextResolve;
  2796. module.exports = webpackContext;
  2797. webpackContext.id = 4606;
  2798. /***/ }),
  2799. /***/ 1567:
  2800. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_103401__) => {
  2801. "use strict";
  2802. /* harmony export */ __nested_webpack_require_103401__.d(__nested_webpack_exports__, {
  2803. /* harmony export */ c: () => (/* binding */ SpeedyGL)
  2804. /* harmony export */ });
  2805. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_103401__(2191);
  2806. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_103401__(5637);
  2807. /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_103401__(4109);
  2808. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_103401__(8902);
  2809. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_103401__(5619);
  2810. /*
  2811. * speedy-vision.js
  2812. * GPU-accelerated Computer Vision for JavaScript
  2813. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2814. *
  2815. * Licensed under the Apache License, Version 2.0 (the "License");
  2816. * you may not use this file except in compliance with the License.
  2817. * You may obtain a copy of the License at
  2818. *
  2819. * http://www.apache.org/licenses/LICENSE-2.0
  2820. *
  2821. * Unless required by applicable law or agreed to in writing, software
  2822. * distributed under the License is distributed on an "AS IS" BASIS,
  2823. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2824. * See the License for the specific language governing permissions and
  2825. * limitations under the License.
  2826. *
  2827. * speedy-gl.js
  2828. * A wrapper around the WebGL Rendering Context
  2829. */
  2830. /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
  2831. // Constants
  2832. const SINGLETON_KEY = Symbol();
  2833. const DEFAULT_POWER_PREFERENCE = 'default';
  2834. //
  2835. // We use a small canvas to improve the performance
  2836. // of createImageBitmap() on Firefox.
  2837. //
  2838. // A large canvas (2048x2048) causes a FPS drop, even
  2839. // if we only extract a small region of it (this is
  2840. // unlike Chrome, which is fast).
  2841. //
  2842. // Note: we automatically increase the size of the
  2843. // canvas (as needed) when rendering to it.
  2844. //
  2845. const CANVAS_WIDTH = 16, CANVAS_HEIGHT = 16;
  2846. /** @type {SpeedyGL} Singleton */
  2847. let instance = null;
  2848. /** @type {PowerPreference} power preference */
  2849. let powerPreference = DEFAULT_POWER_PREFERENCE;
  2850. /**
  2851. * A wrapper around a WebGL Rendering Context
  2852. */
  2853. class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c
  2854. {
  2855. /**
  2856. * Constructor
  2857. * @param {Symbol} key
  2858. * @private
  2859. */
  2860. constructor(key)
  2861. {
  2862. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
  2863. super();
  2864. /** @type {boolean} internal flag */
  2865. this._reinitializeOnContextLoss = true;
  2866. /** @type {HTMLCanvasElement} internal canvas */
  2867. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2868. /** @type {WebGL2RenderingContext} WebGL rendering context */
  2869. this._gl = this._createContext(this._canvas);
  2870. /** @type {string} vendor string of the video driver */
  2871. this._vendor = '';
  2872. /** @type {string} renderer string of the video driver */
  2873. this._renderer = '';
  2874. // read driver info
  2875. this._readDriverInfo();
  2876. // log driver info
  2877. if(_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic')
  2878. this._logDriverInfo();
  2879. }
  2880. /**
  2881. * Get Singleton
  2882. * @returns {SpeedyGL}
  2883. */
  2884. static get instance()
  2885. {
  2886. return instance || (instance = new SpeedyGL(SINGLETON_KEY));
  2887. }
  2888. /**
  2889. * The WebGL Rendering Context
  2890. * Be careful not to cache this rendering context, as it may be lost!
  2891. * @returns {WebGL2RenderingContext}
  2892. */
  2893. get gl()
  2894. {
  2895. return this._gl;
  2896. }
  2897. /**
  2898. * The internal canvas
  2899. * @returns {HTMLCanvasElement}
  2900. */
  2901. get canvas()
  2902. {
  2903. return this._canvas;
  2904. }
  2905. /**
  2906. * Renderer string of the video driver
  2907. * @returns {string}
  2908. */
  2909. get renderer()
  2910. {
  2911. return this._renderer;
  2912. }
  2913. /**
  2914. * Vendor string of the video driver
  2915. * @returns {string}
  2916. */
  2917. get vendor()
  2918. {
  2919. return this._vendor;
  2920. }
  2921. /**
  2922. * Create a WebGL-capable canvas
  2923. * @param {Function} reinitialize to be called if we get a WebGL context loss event
  2924. * @returns {HTMLCanvasElement}
  2925. */
  2926. _createCanvas(reinitialize)
  2927. {
  2928. const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
  2929. canvas.addEventListener('webglcontextlost', ev => {
  2930. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
  2931. setTimeout(reinitialize, 0);
  2932. ev.preventDefault();
  2933. }, false);
  2934. /*canvas.addEventListener('webglcontextrestored', ev => {
  2935. Utils.warning(`Restored WebGL2 context`);
  2936. ev.preventDefault();
  2937. }, false);*/
  2938. return canvas;
  2939. }
  2940. /**
  2941. * Create a WebGL2 Rendering Context
  2942. * @param {HTMLCanvasElement} canvas
  2943. * @returns {WebGL2RenderingContext}
  2944. */
  2945. _createContext(canvas)
  2946. {
  2947. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
  2948. // does the browser support WebGL2?
  2949. if(typeof WebGL2RenderingContext === 'undefined')
  2950. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please use a different browser.`);
  2951. const gl = canvas.getContext('webgl2', {
  2952. premultipliedAlpha: false,
  2953. preserveDrawingBuffer: false,
  2954. powerPreference: powerPreference,
  2955. alpha: true, // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
  2956. antialias: false,
  2957. depth: false,
  2958. stencil: false,
  2959. desynchronized: true,
  2960. });
  2961. if(!gl)
  2962. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
  2963. return gl;
  2964. }
  2965. /**
  2966. * Reinitialize WebGL
  2967. */
  2968. _reinitialize()
  2969. {
  2970. // disable reinitialization?
  2971. if(!this._reinitializeOnContextLoss)
  2972. return;
  2973. // warning
  2974. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
  2975. // create new canvas
  2976. this._canvas.remove();
  2977. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2978. // create new context
  2979. this._gl = this._createContext(this._canvas);
  2980. // is this needed?
  2981. this._readDriverInfo();
  2982. // notify observers: we have a new context!
  2983. // we need to recreate all textures...
  2984. this._notify();
  2985. }
  2986. /**
  2987. * Read debugging information about the video driver of the user
  2988. */
  2989. _readDriverInfo()
  2990. {
  2991. // Depending on the privacy settings of the browser, this information
  2992. // may be unavailable. When available, it may not be entirely correct.
  2993. // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
  2994. const gl = this._gl;
  2995. let debugInfo = null;
  2996. if(navigator.userAgent.includes('Firefox')) {
  2997. this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
  2998. this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
  2999. }
  3000. else if(null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
  3001. this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
  3002. this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
  3003. }
  3004. else {
  3005. this._vendor = ''; // unavailable information
  3006. this._renderer = '';
  3007. }
  3008. }
  3009. /**
  3010. * Log debugging information about the video driver and the platform
  3011. */
  3012. _logDriverInfo()
  3013. {
  3014. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
  3015. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
  3016. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
  3017. }
  3018. /**
  3019. * Lose the WebGL context. This is used to manually
  3020. * free resources, and also for purposes of testing
  3021. * @returns {WEBGL_lose_context}
  3022. */
  3023. loseContext()
  3024. {
  3025. const gl = this._gl;
  3026. // find the appropriate extension
  3027. const ext = gl.getExtension('WEBGL_lose_context');
  3028. if(!ext)
  3029. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
  3030. // nothing to do?
  3031. if(gl.isContextLost())
  3032. return ext;
  3033. // disable reinitialization
  3034. this._reinitializeOnContextLoss = false;
  3035. // lose context
  3036. ext.loseContext();
  3037. // done!
  3038. return ext;
  3039. }
  3040. /**
  3041. * Lose & restore the WebGL context
  3042. * @param {number} [secondsToRestore]
  3043. * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
  3044. */
  3045. loseAndRestoreContext(secondsToRestore = 1)
  3046. {
  3047. const ms = Math.max(secondsToRestore, 0) * 1000;
  3048. const ext = this.loseContext();
  3049. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
  3050. setTimeout(() => {
  3051. //ext.restoreContext();
  3052. this._reinitializeOnContextLoss = true;
  3053. this._reinitialize();
  3054. setTimeout(() => resolve(ext), 0); // next frame
  3055. }, ms);
  3056. });
  3057. }
  3058. /**
  3059. * Power preference for the WebGL context
  3060. * @returns {PowerPreference}
  3061. */
  3062. static get powerPreference()
  3063. {
  3064. return powerPreference;
  3065. }
  3066. /**
  3067. * Power preference for the WebGL context
  3068. * @param {PowerPreference} value
  3069. */
  3070. static set powerPreference(value)
  3071. {
  3072. // validate
  3073. if(!(value === 'default' || value === 'low-power' || value === 'high-performance'))
  3074. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
  3075. // the power preference should be set before we create the WebGL context
  3076. if(instance == null || powerPreference !== value) {
  3077. powerPreference = value;
  3078. // recreate the context if it already exists. Experimental.
  3079. if(instance != null)
  3080. instance.loseAndRestoreContext();
  3081. }
  3082. }
  3083. /**
  3084. * Check if an instance of SpeedyGL has already been created
  3085. * @returns {boolean}
  3086. */
  3087. static isInitialized()
  3088. {
  3089. return instance != null;
  3090. }
  3091. }
  3092. /***/ }),
  3093. /***/ 5619:
  3094. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_114412__) => {
  3095. "use strict";
  3096. /* harmony export */ __nested_webpack_require_114412__.d(__nested_webpack_exports__, {
  3097. /* harmony export */ EM: () => (/* binding */ NotSupportedError),
  3098. /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
  3099. /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
  3100. /* harmony export */ MU: () => (/* binding */ TimeoutError),
  3101. /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
  3102. /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
  3103. /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
  3104. /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
  3105. /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
  3106. /* harmony export */ mB: () => (/* binding */ ParseError),
  3107. /* harmony export */ pf: () => (/* binding */ AssertionError),
  3108. /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
  3109. /* harmony export */ wB: () => (/* binding */ GLError),
  3110. /* harmony export */ xB: () => (/* binding */ SpeedyError)
  3111. /* harmony export */ });
  3112. /* unused harmony export NotImplementedError */
  3113. /*
  3114. * speedy-vision.js
  3115. * GPU-accelerated Computer Vision for JavaScript
  3116. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3117. *
  3118. * Licensed under the Apache License, Version 2.0 (the "License");
  3119. * you may not use this file except in compliance with the License.
  3120. * You may obtain a copy of the License at
  3121. *
  3122. * http://www.apache.org/licenses/LICENSE-2.0
  3123. *
  3124. * Unless required by applicable law or agreed to in writing, software
  3125. * distributed under the License is distributed on an "AS IS" BASIS,
  3126. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3127. * See the License for the specific language governing permissions and
  3128. * limitations under the License.
  3129. *
  3130. * errors.js
  3131. * Error classes
  3132. */
  3133. /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
  3134. /**
  3135. * Generic error class for Speedy
  3136. */
  3137. class SpeedyError extends Error
  3138. {
  3139. /**
  3140. * Class constructor
  3141. * @param {string} message message text
  3142. * @param {SpeedyErrorCause} [cause] cause of the error
  3143. */
  3144. constructor(message, cause = null)
  3145. {
  3146. super([
  3147. message,
  3148. cause ? cause.toString() : '[speedy-vision.js]'
  3149. ].join('\n-> '));
  3150. /** @type {SpeedyErrorCause} cause of the error */
  3151. this._cause = cause;
  3152. }
  3153. /**
  3154. * Error name
  3155. * @returns {string}
  3156. */
  3157. get name()
  3158. {
  3159. return this.constructor.name;
  3160. }
  3161. /**
  3162. * Set error name (ignored)
  3163. * @param {string} _ ignored
  3164. */
  3165. set name(_)
  3166. {
  3167. void(0);
  3168. }
  3169. /**
  3170. * Get the cause of the error. Available if
  3171. * it has been specified in the constructor
  3172. * @returns {SpeedyErrorCause}
  3173. */
  3174. get cause()
  3175. {
  3176. return this._cause;
  3177. }
  3178. }
  3179. /**
  3180. * Unsupported operation error
  3181. * The requested operation is not supported
  3182. */
  3183. class NotSupportedError extends SpeedyError
  3184. {
  3185. /**
  3186. * Class constructor
  3187. * @param {string} [message] additional text
  3188. * @param {SpeedyErrorCause} [cause] cause of the error
  3189. */
  3190. constructor(message = '', cause = null)
  3191. {
  3192. super(`Unsupported operation. ${message}`, cause);
  3193. }
  3194. }
  3195. /**
  3196. * Not implemented error
  3197. * The called method is not implemented
  3198. */
  3199. class NotImplementedError extends SpeedyError
  3200. {
  3201. /**
  3202. * Class constructor
  3203. * @param {string} [message] additional text
  3204. * @param {SpeedyErrorCause} [cause] cause of the error
  3205. */
  3206. constructor(message = '', cause = null)
  3207. {
  3208. super(`Method not implemented. ${message}`, cause);
  3209. }
  3210. }
  3211. /**
  3212. * WebGL error
  3213. */
  3214. class GLError extends SpeedyError
  3215. {
  3216. /**
  3217. * Class constructor
  3218. * @param {string} [message] additional text
  3219. * @param {SpeedyErrorCause} [cause] cause of the error
  3220. */
  3221. constructor(message = '', cause = null)
  3222. {
  3223. super(`WebGL error. ${message}`, cause);
  3224. }
  3225. /**
  3226. * Get an error object describing the latest WebGL error
  3227. * @param {WebGL2RenderingContext} gl
  3228. * @returns {GLError}
  3229. */
  3230. static from(gl)
  3231. {
  3232. const recognizedErrors = [
  3233. 'NO_ERROR',
  3234. 'INVALID_ENUM',
  3235. 'INVALID_VALUE',
  3236. 'INVALID_OPERATION',
  3237. 'INVALID_FRAMEBUFFER_OPERATION',
  3238. 'OUT_OF_MEMORY',
  3239. 'CONTEXT_LOST_WEBGL',
  3240. ];
  3241. const glError = gl.getError();
  3242. const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
  3243. return new GLError(message);
  3244. }
  3245. }
  3246. /**
  3247. * AbstractMethodError
  3248. * Thrown when one tries to call an abstract method
  3249. */
  3250. class AbstractMethodError extends SpeedyError
  3251. {
  3252. /**
  3253. * Class constructor
  3254. * @param {string} [message] additional text
  3255. * @param {SpeedyErrorCause} [cause] cause of the error
  3256. */
  3257. constructor(message = '', cause = null)
  3258. {
  3259. super(`Can't call abstract method. ${message}`, cause);
  3260. }
  3261. }
  3262. /**
  3263. * Illegal argument error
  3264. * A method has received one or more illegal arguments
  3265. */
  3266. class IllegalArgumentError extends SpeedyError
  3267. {
  3268. /**
  3269. * Class constructor
  3270. * @param {string} [message] additional text
  3271. * @param {SpeedyErrorCause} [cause] cause of the error
  3272. */
  3273. constructor(message = '', cause = null)
  3274. {
  3275. super(`Illegal argument. ${message}`, cause);
  3276. }
  3277. }
  3278. /**
  3279. * Illegal operation error
  3280. * The method arguments are valid, but the method can't
  3281. * be called due to the current the state of the object
  3282. */
  3283. class IllegalOperationError extends SpeedyError
  3284. {
  3285. /**
  3286. * Class constructor
  3287. * @param {string} [message] additional text
  3288. * @param {SpeedyErrorCause} [cause] cause of the error
  3289. */
  3290. constructor(message = '', cause = null)
  3291. {
  3292. super(`Illegal operation. ${message}`, cause);
  3293. }
  3294. }
  3295. /**
  3296. * Out of memory
  3297. */
  3298. class OutOfMemoryError extends SpeedyError
  3299. {
  3300. /**
  3301. * Class constructor
  3302. * @param {string} [message] additional text
  3303. * @param {SpeedyErrorCause} [cause] cause of the error
  3304. */
  3305. constructor(message = '', cause = null)
  3306. {
  3307. super(`Out of memory. ${message}`, cause);
  3308. }
  3309. }
  3310. /**
  3311. * File not found error
  3312. */
  3313. class FileNotFoundError extends SpeedyError
  3314. {
  3315. /**
  3316. * Class constructor
  3317. * @param {string} [message] additional text
  3318. * @param {SpeedyErrorCause} [cause] cause of the error
  3319. */
  3320. constructor(message = '', cause = null)
  3321. {
  3322. super(`File not found. ${message}`, cause);
  3323. }
  3324. }
  3325. /**
  3326. * Resource not loaded error
  3327. */
  3328. class ResourceNotLoadedError extends SpeedyError
  3329. {
  3330. /**
  3331. * Class constructor
  3332. * @param {string} [message] additional text
  3333. * @param {SpeedyErrorCause} [cause] cause of the error
  3334. */
  3335. constructor(message = '', cause = null)
  3336. {
  3337. super(`Resource not loaded. ${message}`, cause);
  3338. }
  3339. }
  3340. /**
  3341. * Timeout error
  3342. */
  3343. class TimeoutError extends SpeedyError
  3344. {
  3345. /**
  3346. * Class constructor
  3347. * @param {string} [message] additional text
  3348. * @param {SpeedyErrorCause} [cause] cause of the error
  3349. */
  3350. constructor(message = '', cause = null)
  3351. {
  3352. super(`Timeout error. ${message}`, cause);
  3353. }
  3354. }
  3355. /**
  3356. * Parse error
  3357. */
  3358. class ParseError extends SpeedyError
  3359. {
  3360. /**
  3361. * Class constructor
  3362. * @param {string} [message] additional text
  3363. * @param {SpeedyErrorCause} [cause] cause of the error
  3364. */
  3365. constructor(message = '', cause = null)
  3366. {
  3367. super(`Parse error. ${message}`, cause);
  3368. }
  3369. }
  3370. /**
  3371. * Assertion error
  3372. */
  3373. class AssertionError extends SpeedyError
  3374. {
  3375. /**
  3376. * Class constructor
  3377. * @param {string} [message] additional text
  3378. * @param {SpeedyErrorCause} [cause] cause of the error
  3379. */
  3380. constructor(message = '', cause = null)
  3381. {
  3382. super(`Assertion failed. ${message}`, cause);
  3383. }
  3384. }
  3385. /**
  3386. * Access denied
  3387. */
  3388. class AccessDeniedError extends SpeedyError
  3389. {
  3390. /**
  3391. * Class constructor
  3392. * @param {string} [message] additional text
  3393. * @param {SpeedyErrorCause} [cause] cause of the error
  3394. */
  3395. constructor(message = '', cause = null)
  3396. {
  3397. super(`Access denied. ${message}`, cause);
  3398. }
  3399. }
  3400. /**
  3401. * WebAssembly error
  3402. */
  3403. class WebAssemblyError extends SpeedyError
  3404. {
  3405. /**
  3406. * Class constructor
  3407. * @param {string} [message] additional text
  3408. * @param {SpeedyErrorCause} [cause] cause of the error
  3409. */
  3410. constructor(message = '', cause = null)
  3411. {
  3412. super(`WebAssembly error. ${message}`, cause);
  3413. }
  3414. }
  3415. /***/ }),
  3416. /***/ 1814:
  3417. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_122991__) => {
  3418. "use strict";
  3419. __nested_webpack_require_122991__.r(__nested_webpack_exports__);
  3420. /* harmony export */ __nested_webpack_require_122991__.d(__nested_webpack_exports__, {
  3421. /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
  3422. /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
  3423. /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
  3424. /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
  3425. /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
  3426. /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
  3427. /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
  3428. /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
  3429. /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
  3430. /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
  3431. /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
  3432. /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
  3433. /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
  3434. /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
  3435. /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
  3436. /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
  3437. /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
  3438. /* harmony export */ });
  3439. /*
  3440. * speedy-vision.js
  3441. * GPU-accelerated Computer Vision for JavaScript
  3442. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3443. *
  3444. * Licensed under the Apache License, Version 2.0 (the "License");
  3445. * you may not use this file except in compliance with the License.
  3446. * You may obtain a copy of the License at
  3447. *
  3448. * http://www.apache.org/licenses/LICENSE-2.0
  3449. *
  3450. * Unless required by applicable law or agreed to in writing, software
  3451. * distributed under the License is distributed on an "AS IS" BASIS,
  3452. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3453. * See the License for the specific language governing permissions and
  3454. * limitations under the License.
  3455. *
  3456. * globals.js
  3457. * Global constants
  3458. */
  3459. // -----------------------------------------------------------------
  3460. // IMAGE PYRAMIDS & SCALE-SPACE
  3461. // -----------------------------------------------------------------
  3462. /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
  3463. const PYRAMID_MAX_LEVELS = 8;
  3464. /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
  3465. const LOG2_PYRAMID_MAX_SCALE = 0;
  3466. /** @type {number} The maximum supported scale for a pyramid level */
  3467. const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
  3468. // -----------------------------------------------------------------
  3469. // FIXED-POINT MATH
  3470. // -----------------------------------------------------------------
  3471. /** @type {number} How many bits do we use to store fractional data? */
  3472. const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
  3473. /** @type {number} Fixed-point resolution */
  3474. const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
  3475. // -----------------------------------------------------------------
  3476. // TEXTURE LIMITS
  3477. // -----------------------------------------------------------------
  3478. /** @type {number} Maximum texture length (width, height) */
  3479. const MAX_TEXTURE_LENGTH = (1 << (16 - FIX_BITS)) - 1; // must be 2^n - 1 due to keypoint encoding
  3480. // -----------------------------------------------------------------
  3481. // KEYPOINTS
  3482. // -----------------------------------------------------------------
  3483. /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
  3484. const MIN_KEYPOINT_SIZE = 8;
  3485. /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
  3486. const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
  3487. /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
  3488. const MAX_ENCODER_CAPACITY = 8192;
  3489. /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
  3490. const DEFAULT_ENCODER_CAPACITY = 2048;
  3491. /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
  3492. const LOG2_MAX_DESCRIPTOR_SIZE = 6;
  3493. /** @type {number} maximum size of a keypoint descriptor, in bytes */
  3494. const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
  3495. /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
  3496. const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
  3497. /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
  3498. const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
  3499. /** @type {number} Maximum size of the database of keypoints for matching */
  3500. const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
  3501. /** @type {number} The maximum distance that can be stored in a match */
  3502. const MATCH_MAX_DISTANCE = (1 << (32 - MATCH_INDEX_BITS)) - 1;
  3503. // -----------------------------------------------------------------
  3504. // MISC
  3505. // -----------------------------------------------------------------
  3506. /** @type {boolean} Are we in a little-endian machine? */
  3507. const LITTLE_ENDIAN = (function() {
  3508. return 0xCAFE === (new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer))[0];
  3509. })();
  3510. /***/ }),
  3511. /***/ 4109:
  3512. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_128591__) => {
  3513. "use strict";
  3514. /* harmony export */ __nested_webpack_require_128591__.d(__nested_webpack_exports__, {
  3515. /* harmony export */ c: () => (/* binding */ Observable)
  3516. /* harmony export */ });
  3517. /*
  3518. * speedy-vision.js
  3519. * GPU-accelerated Computer Vision for JavaScript
  3520. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3521. *
  3522. * Licensed under the Apache License, Version 2.0 (the "License");
  3523. * you may not use this file except in compliance with the License.
  3524. * You may obtain a copy of the License at
  3525. *
  3526. * http://www.apache.org/licenses/LICENSE-2.0
  3527. *
  3528. * Unless required by applicable law or agreed to in writing, software
  3529. * distributed under the License is distributed on an "AS IS" BASIS,
  3530. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3531. * See the License for the specific language governing permissions and
  3532. * limitations under the License.
  3533. *
  3534. * observable.js
  3535. * Observer design pattern
  3536. */
  3537. /**
  3538. * Implementation of the Observer design pattern
  3539. * @abstract
  3540. */
  3541. class Observable
  3542. {
  3543. /**
  3544. * Constructor
  3545. */
  3546. constructor()
  3547. {
  3548. /** @type {Function[]} subscribers / callbacks */
  3549. this._subscribers = [];
  3550. /** @type {object[]} "this" pointers */
  3551. this._thisptr = [];
  3552. /** @type {Array<any[]>} function arguments */
  3553. this._args = [];
  3554. }
  3555. /**
  3556. * Add subscriber
  3557. * @param {Function} fn callback
  3558. * @param {object} [thisptr] "this" pointer to be used when invoking the callback
  3559. * @param {...any} args arguments to be passed to the callback
  3560. */
  3561. subscribe(fn, thisptr, ...args)
  3562. {
  3563. this._subscribers.push(fn);
  3564. this._thisptr.push(thisptr);
  3565. this._args.push(args);
  3566. }
  3567. /**
  3568. * Remove subscriber
  3569. * @param {Function} fn previously added callback
  3570. * @param {object} [thisptr] "this" pointer
  3571. */
  3572. unsubscribe(fn, thisptr)
  3573. {
  3574. for(let j = this._subscribers.length - 1; j >= 0; j--) {
  3575. if(this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
  3576. this._subscribers.splice(j, 1);
  3577. this._thisptr.splice(j, 1);
  3578. this._args.splice(j, 1);
  3579. break;
  3580. }
  3581. }
  3582. }
  3583. /**
  3584. * Notify all subscribers about a state change
  3585. * @protected
  3586. */
  3587. _notify()
  3588. {
  3589. for(let i = 0; i < this._subscribers.length; i++)
  3590. this._subscribers[i].apply(this._thisptr[i], this._args[i]);
  3591. }
  3592. }
  3593. /***/ }),
  3594. /***/ 6467:
  3595. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_131144__) => {
  3596. "use strict";
  3597. /* harmony export */ __nested_webpack_require_131144__.d(__nested_webpack_exports__, {
  3598. /* harmony export */ f5: () => (/* binding */ ImageFormat),
  3599. /* harmony export */ kQ: () => (/* binding */ PixelComponent),
  3600. /* harmony export */ kg: () => (/* binding */ ColorComponentId),
  3601. /* harmony export */ zu: () => (/* binding */ MediaType)
  3602. /* harmony export */ });
  3603. /*
  3604. * speedy-vision.js
  3605. * GPU-accelerated Computer Vision for JavaScript
  3606. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3607. *
  3608. * Licensed under the Apache License, Version 2.0 (the "License");
  3609. * you may not use this file except in compliance with the License.
  3610. * You may obtain a copy of the License at
  3611. *
  3612. * http://www.apache.org/licenses/LICENSE-2.0
  3613. *
  3614. * Unless required by applicable law or agreed to in writing, software
  3615. * distributed under the License is distributed on an "AS IS" BASIS,
  3616. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3617. * See the License for the specific language governing permissions and
  3618. * limitations under the License.
  3619. *
  3620. * types.js
  3621. * Types & formats
  3622. */
  3623. /**
  3624. * Media types
  3625. * @enum {Symbol}
  3626. */
  3627. const MediaType = Object.freeze({
  3628. Image: Symbol('Image'),
  3629. Video: Symbol('Video'),
  3630. Canvas: Symbol('Canvas'),
  3631. OffscreenCanvas: Symbol('OffscreenCanvas'),
  3632. Bitmap: Symbol('Bitmap'),
  3633. Data: Symbol('Data')
  3634. });
  3635. /**
  3636. * Image formats
  3637. * @enum {Symbol}
  3638. */
  3639. const ImageFormat = Object.freeze({
  3640. RGBA: Symbol('RGBA'),
  3641. GREY: Symbol('GREY'),
  3642. });
  3643. /**
  3644. * Pixel component (bitwise flags)
  3645. * @typedef {number} PixelComponent
  3646. */
  3647. const PixelComponent = Object.freeze({
  3648. RED: 1,
  3649. GREEN: 2,
  3650. BLUE: 4,
  3651. ALPHA: 8,
  3652. ALL: 15 // = RED | GREEN | BLUE | ALPHA
  3653. });
  3654. /**
  3655. * Component ID utility
  3656. */
  3657. const ColorComponentId = Object.freeze({
  3658. [PixelComponent.RED]: 0,
  3659. [PixelComponent.GREEN]: 1,
  3660. [PixelComponent.BLUE]: 2,
  3661. [PixelComponent.ALPHA]: 3
  3662. });
  3663. /***/ }),
  3664. /***/ 2191:
  3665. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_133172__) => {
  3666. "use strict";
  3667. /* harmony export */ __nested_webpack_require_133172__.d(__nested_webpack_exports__, {
  3668. /* harmony export */ A: () => (/* binding */ Utils)
  3669. /* harmony export */ });
  3670. /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_133172__(5619);
  3671. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_133172__(8902);
  3672. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_133172__(5637);
  3673. /*
  3674. * speedy-vision.js
  3675. * GPU-accelerated Computer Vision for JavaScript
  3676. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3677. *
  3678. * Licensed under the Apache License, Version 2.0 (the "License");
  3679. * you may not use this file except in compliance with the License.
  3680. * You may obtain a copy of the License at
  3681. *
  3682. * http://www.apache.org/licenses/LICENSE-2.0
  3683. *
  3684. * Unless required by applicable law or agreed to in writing, software
  3685. * distributed under the License is distributed on an "AS IS" BASIS,
  3686. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3687. * See the License for the specific language governing permissions and
  3688. * limitations under the License.
  3689. *
  3690. * utils.js
  3691. * Generic utilities
  3692. */
  3693. /**
  3694. * Generic utilities
  3695. */
  3696. class Utils
  3697. {
  3698. /**
  3699. * Generates a warning
  3700. * @param {string} text message text
  3701. * @param {...string} args optional text
  3702. */
  3703. static warning(text, ...args)
  3704. {
  3705. //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
  3706. if(_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none')
  3707. console.warn('[speedy-vision] ' + text, ...args);
  3708. }
  3709. /**
  3710. * Logs a message
  3711. * @param {string} text message text
  3712. * @param {...string} args optional text
  3713. */
  3714. static log(text, ...args)
  3715. {
  3716. if(_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none')
  3717. console.log('[speedy-vision] ' + text, ...args);
  3718. }
  3719. /**
  3720. * Assertion
  3721. * @param {boolean} expr expression
  3722. * @param {string} [text] error message
  3723. * @throws {AssertionError}
  3724. */
  3725. static assert(expr, text = '')
  3726. {
  3727. if(!expr)
  3728. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
  3729. }
  3730. /**
  3731. * Gets the names of the arguments of the specified function
  3732. * @param {Function} fun
  3733. * @returns {string[]}
  3734. */
  3735. static functionArguments(fun)
  3736. {
  3737. const code = fun.toString();
  3738. const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' :
  3739. (code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>');
  3740. const match = new RegExp(regex).exec(code);
  3741. if(match !== null) {
  3742. const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
  3743. return args.split(',').map(argname =>
  3744. argname.replace(/=.*$/, '').trim() // remove default params & trim
  3745. ).filter(argname =>
  3746. argname // handle trailing commas
  3747. );
  3748. }
  3749. else
  3750. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
  3751. }
  3752. /**
  3753. * Get all property descriptors from an object,
  3754. * traversing its entire prototype chain
  3755. * @param {object} obj
  3756. * @returns {object}
  3757. */
  3758. static getAllPropertyDescriptors(obj)
  3759. {
  3760. if(obj) {
  3761. const proto = Object.getPrototypeOf(obj);
  3762. return {
  3763. ...(Utils.getAllPropertyDescriptors(proto)),
  3764. ...Object.getOwnPropertyDescriptors(obj)
  3765. };
  3766. }
  3767. else
  3768. return Object.create(null);
  3769. }
  3770. /**
  3771. * Creates a HTMLCanvasElement with the given dimensions
  3772. * @param {number} width in pixels
  3773. * @param {number} height in pixels
  3774. * @returns {HTMLCanvasElement}
  3775. */
  3776. static createCanvas(width, height)
  3777. {
  3778. const canvas = document.createElement('canvas');
  3779. canvas.width = width;
  3780. canvas.height = height;
  3781. return canvas;
  3782. }
  3783. /**
  3784. * Generate a 1D gaussian kernel with custom sigma
  3785. * Tip: use kernelSize >= (5 * sigma), kernelSize odd
  3786. * @param {number} sigma gaussian sigma
  3787. * @param {number} [kernelSize] kernel size, odd number
  3788. * @param {boolean} [normalized] normalize entries so that their sum is 1
  3789. * @returns {number[]}
  3790. */
  3791. static gaussianKernel(sigma, kernelSize = 0, normalized = true)
  3792. {
  3793. /*
  3794. * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
  3795. *
  3796. * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
  3797. *
  3798. * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
  3799. *
  3800. * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
  3801. * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
  3802. *
  3803. * Setting a constant c := sqrt(2) * sigma, it follows that:
  3804. *
  3805. * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
  3806. */
  3807. // default kernel size
  3808. if(kernelSize == 0) {
  3809. kernelSize = Math.ceil(5.0 * sigma) | 0;
  3810. kernelSize += 1 - (kernelSize % 2);
  3811. }
  3812. // validate input
  3813. kernelSize |= 0;
  3814. if(kernelSize < 1 || kernelSize % 2 == 0)
  3815. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);
  3816. else if(sigma <= 0.0)
  3817. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
  3818. // function erf(x) = -erf(-x) can be approximated numerically. See:
  3819. // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
  3820. const kernel = new Array(kernelSize);
  3821. // set constants
  3822. const N = kernelSize >> 1; // integer (floor, div 2)
  3823. const c = (+sigma) * 1.4142135623730951; // sigma * sqrt(2)
  3824. const m = 0.3275911;
  3825. const a1 = 0.254829592;
  3826. const a2 = -0.284496736;
  3827. const a3 = 1.421413741;
  3828. const a4 = -1.453152027;
  3829. const a5 = 1.061405429;
  3830. // compute the kernel
  3831. let sum = 0.0;
  3832. for(let j = 0; j < kernelSize; j++) {
  3833. let xa = (j - N + 0.5) / c;
  3834. let xb = (j - N - 0.5) / c;
  3835. let sa = 1.0, sb = 1.0;
  3836. if(xa < 0.0) { sa = -1.0; xa = -xa; }
  3837. if(xb < 0.0) { sb = -1.0; xb = -xb; }
  3838. const ta = 1.0 / (1.0 + m * xa);
  3839. const tb = 1.0 / (1.0 + m * xb);
  3840. const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
  3841. const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
  3842. const ya = 1.0 - pa * Math.exp(-xa * xa);
  3843. const yb = 1.0 - pb * Math.exp(-xb * xb);
  3844. const erfa = sa * ya;
  3845. const erfb = sb * yb;
  3846. const fp = (erfa - erfb) / (2.0 * c);
  3847. kernel[j] = fp;
  3848. sum += fp;
  3849. }
  3850. // normalize the kernel
  3851. if(normalized) {
  3852. for(let j = 0; j < kernelSize; j++)
  3853. kernel[j] /= sum;
  3854. }
  3855. // done!
  3856. return kernel;
  3857. }
  3858. /**
  3859. * Generate a 2D kernel in column-major format using two separable 1D kernels
  3860. * @param {number[]} ka 1D kernel
  3861. * @param {number[]} [kb]
  3862. * @returns {number[]}
  3863. */
  3864. static kernel2d(ka, kb = ka)
  3865. {
  3866. const ksize = ka.length;
  3867. Utils.assert(ka.length == ka.length);
  3868. Utils.assert(ksize >= 1 && ksize % 2 == 1);
  3869. // compute the outer product ka x kb
  3870. let kernel2d = new Array(ksize * ksize), k = 0;
  3871. for(let col = 0; col < ksize; col++) {
  3872. for(let row = 0; row < ksize; row++)
  3873. kernel2d[k++] = ka[row] * kb[col];
  3874. }
  3875. return kernel2d;
  3876. }
  3877. /**
  3878. * Cartesian product a x b: [ [ai, bj] for all i, j ]
  3879. * @param {number[]} a
  3880. * @param {number[]} b
  3881. * @returns {Array<[number,number]>}
  3882. */
  3883. static cartesian(a, b)
  3884. {
  3885. return [].concat(...a.map(a => b.map(b => [a, b])));
  3886. }
  3887. /**
  3888. * Symmetric range
  3889. * @param {number} n non-negative integer
  3890. * @returns {number[]} [ -n, ..., n ]
  3891. */
  3892. static symmetricRange(n)
  3893. {
  3894. if((n |= 0) < 0)
  3895. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
  3896. return [...(Array(2*n + 1).keys())].map(x => x - n);
  3897. }
  3898. /**
  3899. * Compute the [0, n) range of integers
  3900. * @param {number} n positive integer
  3901. * @returns {number[]} [ 0, 1, ..., n-1 ]
  3902. */
  3903. static range(n)
  3904. {
  3905. if((n |= 0) <= 0)
  3906. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
  3907. return [...(Array(n).keys())];
  3908. }
  3909. /**
  3910. * Shuffle in-place
  3911. * @template T
  3912. * @param {T[]} arr
  3913. * @returns {T[]} arr
  3914. */
  3915. static shuffle(arr)
  3916. {
  3917. const len = arr.length;
  3918. const m = len - 1;
  3919. // Fisher-Yattes
  3920. for(let i = 0; i < m; i++) {
  3921. const j = i + ((Math.random() * (len - i)) | 0); // i <= j < arr.length
  3922. if(i !== j) {
  3923. const t = arr[i];
  3924. arr[i] = arr[j];
  3925. arr[j] = t;
  3926. }
  3927. }
  3928. return arr;
  3929. }
  3930. /**
  3931. * Flatten an array (1 level only)
  3932. * @template U
  3933. * @param {U[]} array
  3934. * @returns {U[]}
  3935. */
  3936. static flatten(array)
  3937. {
  3938. //return array.flat();
  3939. //return array.reduce((arr, val) => arr.concat(val), []);
  3940. const flat = [];
  3941. for(let i = 0, n = array.length; i < n; i++) {
  3942. const entry = array[i];
  3943. if(Array.isArray(entry)) {
  3944. for(let j = 0, m = entry.length; j < m; j++)
  3945. flat.push(entry[j]);
  3946. }
  3947. else
  3948. flat.push(entry);
  3949. }
  3950. return flat;
  3951. }
  3952. /**
  3953. * Decode a 16-bit float from a
  3954. * unsigned 16-bit integer
  3955. * @param {number} uint16
  3956. * @returns {number}
  3957. */
  3958. static decodeFloat16(uint16)
  3959. {
  3960. // decode according to sec 2.1.2
  3961. // 16-Bit Floating Point Numbers
  3962. // of the OpenGL ES 3 spec
  3963. const s = (uint16 & 0xFFFF) >> 15; // sign bit
  3964. const e = (uint16 & 0x7FFF) >> 10; // exponent
  3965. const m = (uint16 & 0x3FF); // mantissa
  3966. const sign = 1 - 2 * s; // (-1)^s
  3967. if(e == 0)
  3968. return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
  3969. else if(e == 31)
  3970. return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
  3971. const f = e >= 15 ? (1 << (e-15)) : 1.0 / (1 << (15-e)); // 2^(e-15)
  3972. return sign * f * (1.0 + m * 0.0009765625); // normal
  3973. }
  3974. /**
  3975. * Wrapper around getUserMedia()
  3976. * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
  3977. * @returns {SpeedyPromise<HTMLVideoElement>}
  3978. */
  3979. static requestCameraStream(constraints = { audio: false, video: true })
  3980. {
  3981. Utils.log('Accessing the webcam...');
  3982. if(!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  3983. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
  3984. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
  3985. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  3986. const video = document.createElement('video');
  3987. video.onloadedmetadata = () => {
  3988. video.play();
  3989. Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
  3990. resolve(video);
  3991. };
  3992. video.setAttribute('playsinline', '');
  3993. video.setAttribute('autoplay', '');
  3994. if(constraints.audio === false || constraints.audio === undefined)
  3995. video.setAttribute('muted', '');
  3996. video.srcObject = stream;
  3997. })
  3998. .catch(err => {
  3999. if(err.name === 'NotAllowedError') {
  4000. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(
  4001. `Please give access to the camera and reload the page.`,
  4002. err
  4003. ));
  4004. }
  4005. else if(err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
  4006. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(
  4007. `Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`,
  4008. err
  4009. ));
  4010. }
  4011. else {
  4012. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(
  4013. `Can't access the webcam.`,
  4014. err
  4015. ));
  4016. }
  4017. });
  4018. });
  4019. }
  4020. /**
  4021. * Format binary data as a string with hex values
  4022. * @param {ArrayBuffer} bytes
  4023. * @returns {string}
  4024. */
  4025. static formatBinaryData(bytes)
  4026. {
  4027. const uint8 = new Uint8Array(bytes);
  4028. const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
  4029. return array.join(' ');
  4030. }
  4031. /**
  4032. * Returns a string containing platform brand information
  4033. * @returns {string}
  4034. */
  4035. static platformString()
  4036. {
  4037. // navigator.userAgent is easily and often spoofed, and thus is unreliable
  4038. // use the NavigatorUAData interface if available
  4039. if(typeof navigator.userAgentData === 'object') {
  4040. // use only low entropy data, so we don't need to ask the permission
  4041. // of the user to read this string
  4042. return navigator.userAgentData.platform;
  4043. }
  4044. // navigator.platform is deprecated. It can be spoofed on Firefox, but,
  4045. // at the time of this writing, there is no alternative apparently.
  4046. return navigator.platform;
  4047. }
  4048. }
  4049. /***/ }),
  4050. /***/ 8211:
  4051. /***/ ((module) => {
  4052. module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  4053. /***/ }),
  4054. /***/ 7360:
  4055. /***/ ((module) => {
  4056. module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  4057. /***/ }),
  4058. /***/ 8191:
  4059. /***/ ((module) => {
  4060. module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
  4061. /***/ }),
  4062. /***/ 4438:
  4063. /***/ ((module) => {
  4064. module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
  4065. /***/ }),
  4066. /***/ 5867:
  4067. /***/ ((module) => {
  4068. module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
  4069. /***/ }),
  4070. /***/ 9252:
  4071. /***/ ((module) => {
  4072. module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
  4073. /***/ }),
  4074. /***/ 8609:
  4075. /***/ ((module) => {
  4076. module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
  4077. /***/ }),
  4078. /***/ 4672:
  4079. /***/ ((module) => {
  4080. module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
  4081. /***/ }),
  4082. /***/ 9778:
  4083. /***/ ((module) => {
  4084. module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
  4085. /***/ }),
  4086. /***/ 8710:
  4087. /***/ ((module) => {
  4088. module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
  4089. /***/ }),
  4090. /***/ 2434:
  4091. /***/ ((module) => {
  4092. module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
  4093. /***/ }),
  4094. /***/ 439:
  4095. /***/ ((module) => {
  4096. module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
  4097. /***/ }),
  4098. /***/ 8545:
  4099. /***/ ((module) => {
  4100. module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
  4101. /***/ }),
  4102. /***/ 6762:
  4103. /***/ ((module) => {
  4104. module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
  4105. /***/ }),
  4106. /***/ 7639:
  4107. /***/ ((module) => {
  4108. module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
  4109. /***/ }),
  4110. /***/ 431:
  4111. /***/ ((module) => {
  4112. module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
  4113. /***/ }),
  4114. /***/ 6822:
  4115. /***/ ((module) => {
  4116. module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
  4117. /***/ }),
  4118. /***/ 2728:
  4119. /***/ ((module) => {
  4120. module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
  4121. /***/ }),
  4122. /***/ 6823:
  4123. /***/ ((module) => {
  4124. module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
  4125. /***/ }),
  4126. /***/ 1341:
  4127. /***/ ((module) => {
  4128. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  4129. /***/ }),
  4130. /***/ 7833:
  4131. /***/ ((module) => {
  4132. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  4133. /***/ }),
  4134. /***/ 2352:
  4135. /***/ ((module) => {
  4136. module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
  4137. /***/ }),
  4138. /***/ 7541:
  4139. /***/ ((module) => {
  4140. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
  4141. /***/ }),
  4142. /***/ 4868:
  4143. /***/ ((module) => {
  4144. module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
  4145. /***/ }),
  4146. /***/ 5591:
  4147. /***/ ((module) => {
  4148. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
  4149. /***/ }),
  4150. /***/ 191:
  4151. /***/ ((module) => {
  4152. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  4153. /***/ }),
  4154. /***/ 5467:
  4155. /***/ ((module) => {
  4156. module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
  4157. /***/ }),
  4158. /***/ 336:
  4159. /***/ ((module) => {
  4160. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
  4161. /***/ }),
  4162. /***/ 8968:
  4163. /***/ ((module) => {
  4164. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
  4165. /***/ }),
  4166. /***/ 1733:
  4167. /***/ ((module) => {
  4168. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
  4169. /***/ }),
  4170. /***/ 9674:
  4171. /***/ ((module) => {
  4172. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
  4173. /***/ }),
  4174. /***/ 2090:
  4175. /***/ ((module) => {
  4176. module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
  4177. /***/ }),
  4178. /***/ 1855:
  4179. /***/ ((module) => {
  4180. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
  4181. /***/ }),
  4182. /***/ 4824:
  4183. /***/ ((module) => {
  4184. module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
  4185. /***/ }),
  4186. /***/ 2381:
  4187. /***/ ((module) => {
  4188. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  4189. /***/ }),
  4190. /***/ 6060:
  4191. /***/ ((module) => {
  4192. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
  4193. /***/ }),
  4194. /***/ 9974:
  4195. /***/ ((module) => {
  4196. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
  4197. /***/ }),
  4198. /***/ 3047:
  4199. /***/ ((module) => {
  4200. module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
  4201. /***/ }),
  4202. /***/ 3266:
  4203. /***/ ((module) => {
  4204. module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
  4205. /***/ }),
  4206. /***/ 8018:
  4207. /***/ ((module) => {
  4208. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
  4209. /***/ }),
  4210. /***/ 3168:
  4211. /***/ ((module) => {
  4212. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
  4213. /***/ }),
  4214. /***/ 3890:
  4215. /***/ ((module) => {
  4216. module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  4217. /***/ }),
  4218. /***/ 8647:
  4219. /***/ ((module) => {
  4220. module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
  4221. /***/ }),
  4222. /***/ 4776:
  4223. /***/ ((module) => {
  4224. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
  4225. /***/ }),
  4226. /***/ 2648:
  4227. /***/ ((module) => {
  4228. module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  4229. /***/ }),
  4230. /***/ 8825:
  4231. /***/ ((module) => {
  4232. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
  4233. /***/ }),
  4234. /***/ 5693:
  4235. /***/ ((module) => {
  4236. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
  4237. /***/ }),
  4238. /***/ 9280:
  4239. /***/ ((module) => {
  4240. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
  4241. /***/ }),
  4242. /***/ 9108:
  4243. /***/ ((module) => {
  4244. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
  4245. /***/ }),
  4246. /***/ 7137:
  4247. /***/ ((module) => {
  4248. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
  4249. /***/ }),
  4250. /***/ 9739:
  4251. /***/ ((module) => {
  4252. module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
  4253. /***/ }),
  4254. /***/ 8231:
  4255. /***/ ((module) => {
  4256. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
  4257. /***/ }),
  4258. /***/ 2518:
  4259. /***/ ((module) => {
  4260. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
  4261. /***/ }),
  4262. /***/ 8096:
  4263. /***/ ((module) => {
  4264. module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
  4265. /***/ }),
  4266. /***/ 5795:
  4267. /***/ ((module) => {
  4268. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
  4269. /***/ }),
  4270. /***/ 3169:
  4271. /***/ ((module) => {
  4272. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
  4273. /***/ }),
  4274. /***/ 1337:
  4275. /***/ ((module) => {
  4276. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
  4277. /***/ }),
  4278. /***/ 6187:
  4279. /***/ ((module) => {
  4280. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
  4281. /***/ }),
  4282. /***/ 477:
  4283. /***/ ((module) => {
  4284. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
  4285. /***/ }),
  4286. /***/ 4050:
  4287. /***/ ((module) => {
  4288. module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
  4289. /***/ }),
  4290. /***/ 5545:
  4291. /***/ ((module) => {
  4292. module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
  4293. /***/ }),
  4294. /***/ 7113:
  4295. /***/ ((module) => {
  4296. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
  4297. /***/ }),
  4298. /***/ 1202:
  4299. /***/ ((module) => {
  4300. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
  4301. /***/ }),
  4302. /***/ 7971:
  4303. /***/ ((module) => {
  4304. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
  4305. /***/ }),
  4306. /***/ 6122:
  4307. /***/ ((module) => {
  4308. module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
  4309. /***/ }),
  4310. /***/ 371:
  4311. /***/ ((module) => {
  4312. module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
  4313. /***/ }),
  4314. /***/ 7307:
  4315. /***/ ((module) => {
  4316. module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
  4317. /***/ }),
  4318. /***/ 8614:
  4319. /***/ ((module) => {
  4320. module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
  4321. /***/ }),
  4322. /***/ 6271:
  4323. /***/ ((module) => {
  4324. module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
  4325. /***/ }),
  4326. /***/ 3016:
  4327. /***/ ((module) => {
  4328. module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
  4329. /***/ }),
  4330. /***/ 3630:
  4331. /***/ ((module) => {
  4332. module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
  4333. /***/ }),
  4334. /***/ 8508:
  4335. /***/ ((module) => {
  4336. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
  4337. /***/ }),
  4338. /***/ 8073:
  4339. /***/ ((module) => {
  4340. module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
  4341. /***/ }),
  4342. /***/ 3575:
  4343. /***/ ((module) => {
  4344. module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
  4345. f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
  4346. f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
  4347. BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
  4348. AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
  4349. CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
  4350. Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
  4351. AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
  4352. dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
  4353. TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
  4354. X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
  4355. MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
  4356. ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
  4357. PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
  4358. CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
  4359. AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
  4360. gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
  4361. AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
  4362. QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
  4363. AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
  4364. gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
  4365. IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
  4366. gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
  4367. hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
  4368. nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
  4369. AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
  4370. EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
  4371. IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
  4372. AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
  4373. IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
  4374. gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
  4375. AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
  4376. IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
  4377. AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
  4378. AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
  4379. IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
  4380. AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
  4381. QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
  4382. gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
  4383. IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
  4384. OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
  4385. IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
  4386. AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
  4387. APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
  4388. IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
  4389. AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
  4390. IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
  4391. CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
  4392. hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
  4393. IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
  4394. AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
  4395. BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
  4396. QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
  4397. ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
  4398. AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
  4399. BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
  4400. iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
  4401. IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
  4402. AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
  4403. gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
  4404. QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
  4405. QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
  4406. ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
  4407. gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
  4408. NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
  4409. AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
  4410. gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
  4411. IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
  4412. a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
  4413. KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
  4414. QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
  4415. CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
  4416. ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
  4417. QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
  4418. CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
  4419. AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
  4420. QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
  4421. ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
  4422. BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
  4423. AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
  4424. KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
  4425. EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
  4426. DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
  4427. BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
  4428. Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
  4429. DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
  4430. AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
  4431. QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
  4432. IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
  4433. QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
  4434. QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
  4435. IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
  4436. CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
  4437. KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
  4438. IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
  4439. ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
  4440. KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
  4441. AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
  4442. DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
  4443. QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
  4444. QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
  4445. ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
  4446. EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
  4447. SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
  4448. KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
  4449. gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
  4450. ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
  4451. ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
  4452. IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
  4453. IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
  4454. IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
  4455. DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
  4456. A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
  4457. akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
  4458. DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
  4459. u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
  4460. AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
  4461. IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
  4462. IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
  4463. aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
  4464. QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
  4465. KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
  4466. bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
  4467. IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
  4468. IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
  4469. IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
  4470. An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
  4471. DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
  4472. KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
  4473. QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
  4474. BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
  4475. QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
  4476. KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
  4477. ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
  4478. GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
  4479. QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
  4480. ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
  4481. B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
  4482. DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
  4483. BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
  4484. bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
  4485. IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
  4486. DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
  4487. IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
  4488. QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
  4489. FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
  4490. DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
  4491. AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
  4492. AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
  4493. QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
  4494. AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
  4495. EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
  4496. SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
  4497. Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
  4498. ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
  4499. IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
  4500. fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
  4501. IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
  4502. gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
  4503. gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
  4504. CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
  4505. IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
  4506. ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
  4507. DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
  4508. KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
  4509. gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
  4510. ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
  4511. AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
  4512. gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
  4513. aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
  4514. SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
  4515. CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
  4516. CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
  4517. CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
  4518. CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
  4519. AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
  4520. ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
  4521. gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
  4522. QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
  4523. AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
  4524. lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
  4525. kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
  4526. 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
  4527. ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
  4528. NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
  4529. gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
  4530. BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
  4531. AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
  4532. QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
  4533. GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
  4534. C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
  4535. DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
  4536. QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
  4537. IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
  4538. DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
  4539. f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
  4540. gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
  4541. gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
  4542. IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
  4543. AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
  4544. aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
  4545. bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
  4546. IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
  4547. AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
  4548. ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
  4549. gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
  4550. AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
  4551. IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
  4552. BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
  4553. l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
  4554. gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
  4555. AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
  4556. AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
  4557. aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
  4558. ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
  4559. aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
  4560. AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
  4561. FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
  4562. IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
  4563. oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
  4564. AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
  4565. IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
  4566. IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
  4567. 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
  4568. oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
  4569. IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
  4570. PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
  4571. APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
  4572. oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
  4573. GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
  4574. IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
  4575. mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
  4576. BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
  4577. AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
  4578. oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
  4579. oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
  4580. FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
  4581. oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
  4582. IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
  4583. JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
  4584. ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
  4585. AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
  4586. DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
  4587. IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
  4588. ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
  4589. CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
  4590. QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
  4591. IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
  4592. IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
  4593. ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
  4594. AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
  4595. CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
  4596. dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
  4597. KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
  4598. AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
  4599. HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
  4600. BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
  4601. AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
  4602. gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
  4603. QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
  4604. AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
  4605. BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
  4606. IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
  4607. kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
  4608. BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
  4609. IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
  4610. AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
  4611. AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
  4612. QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
  4613. CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
  4614. lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
  4615. dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
  4616. IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
  4617. IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
  4618. C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
  4619. IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
  4620. AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
  4621. KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
  4622. Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4623. ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
  4624. IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
  4625. EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
  4626. IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
  4627. IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
  4628. QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
  4629. IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
  4630. AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
  4631. ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
  4632. CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
  4633. AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
  4634. QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
  4635. kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
  4636. GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
  4637. ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
  4638. OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
  4639. BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
  4640. AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
  4641. IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
  4642. A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
  4643. dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
  4644. KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
  4645. IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
  4646. FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
  4647. GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
  4648. PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
  4649. E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
  4650. IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
  4651. IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
  4652. KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
  4653. Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4654. ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
  4655. gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
  4656. BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
  4657. EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
  4658. gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
  4659. AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
  4660. IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
  4661. AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
  4662. lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
  4663. b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
  4664. ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
  4665. IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
  4666. gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
  4667. ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
  4668. KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
  4669. AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
  4670. dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
  4671. NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
  4672. ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
  4673. DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
  4674. IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
  4675. IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
  4676. GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
  4677. BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
  4678. ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
  4679. gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
  4680. IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
  4681. FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
  4682. ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
  4683. QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
  4684. DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
  4685. dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
  4686. IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
  4687. IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
  4688. IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
  4689. AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
  4690. D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
  4691. EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
  4692. QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
  4693. IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
  4694. IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
  4695. KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
  4696. QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
  4697. GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
  4698. AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
  4699. ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
  4700. DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
  4701. AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
  4702. IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
  4703. zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
  4704. ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
  4705. AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
  4706. IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
  4707. AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
  4708. QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
  4709. B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
  4710. DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
  4711. gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
  4712. KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
  4713. AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
  4714. KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
  4715. HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
  4716. IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
  4717. IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
  4718. HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
  4719. QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
  4720. gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
  4721. QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
  4722. QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
  4723. ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
  4724. ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
  4725. ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
  4726. EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
  4727. KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
  4728. X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
  4729. ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
  4730. QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
  4731. dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
  4732. AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
  4733. AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
  4734. AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
  4735. KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
  4736. koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
  4737. CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
  4738. CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
  4739. IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
  4740. aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
  4741. ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
  4742. IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
  4743. EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
  4744. A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
  4745. A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
  4746. KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
  4747. AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
  4748. lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
  4749. ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
  4750. IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
  4751. aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
  4752. iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
  4753. DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
  4754. AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
  4755. AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
  4756. IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
  4757. lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
  4758. QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
  4759. IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
  4760. Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
  4761. BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
  4762. A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
  4763. ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
  4764. IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
  4765. taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
  4766. NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
  4767. gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
  4768. Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
  4769. DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
  4770. aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
  4771. ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
  4772. C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
  4773. dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
  4774. IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
  4775. dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
  4776. NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
  4777. YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
  4778. IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
  4779. bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
  4780. AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
  4781. b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
  4782. QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
  4783. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
  4784. dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
  4785. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
  4786. dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
  4787. NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
  4788. ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
  4789. YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
  4790. aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
  4791. MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
  4792. IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
  4793. dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
  4794. ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
  4795. IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
  4796. cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
  4797. AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
  4798. AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
  4799. OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
  4800. MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
  4801. MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
  4802. IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
  4803. cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
  4804. aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
  4805. LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
  4806. ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
  4807. bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
  4808. dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
  4809. MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
  4810. YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
  4811. YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
  4812. cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
  4813. b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
  4814. AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
  4815. NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
  4816. bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
  4817. aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
  4818. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
  4819. `
  4820. /***/ })
  4821. /******/ });
  4822. /************************************************************************/
  4823. /******/ // The module cache
  4824. /******/ var __webpack_module_cache__ = {};
  4825. /******/
  4826. /******/ // The require function
  4827. /******/ function __nested_webpack_require_320900__(moduleId) {
  4828. /******/ // Check if module is in cache
  4829. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  4830. /******/ if (cachedModule !== undefined) {
  4831. /******/ return cachedModule.exports;
  4832. /******/ }
  4833. /******/ // Create a new module (and put it into the cache)
  4834. /******/ var module = __webpack_module_cache__[moduleId] = {
  4835. /******/ // no module.id needed
  4836. /******/ // no module.loaded needed
  4837. /******/ exports: {}
  4838. /******/ };
  4839. /******/
  4840. /******/ // Execute the module function
  4841. /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_320900__);
  4842. /******/
  4843. /******/ // Return the exports of the module
  4844. /******/ return module.exports;
  4845. /******/ }
  4846. /******/
  4847. /************************************************************************/
  4848. /******/ /* webpack/runtime/define property getters */
  4849. /******/ (() => {
  4850. /******/ // define getter functions for harmony exports
  4851. /******/ __nested_webpack_require_320900__.d = (exports, definition) => {
  4852. /******/ for(var key in definition) {
  4853. /******/ if(__nested_webpack_require_320900__.o(definition, key) && !__nested_webpack_require_320900__.o(exports, key)) {
  4854. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  4855. /******/ }
  4856. /******/ }
  4857. /******/ };
  4858. /******/ })();
  4859. /******/
  4860. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  4861. /******/ (() => {
  4862. /******/ __nested_webpack_require_320900__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  4863. /******/ })();
  4864. /******/
  4865. /******/ /* webpack/runtime/make namespace object */
  4866. /******/ (() => {
  4867. /******/ // define __esModule on exports
  4868. /******/ __nested_webpack_require_320900__.r = (exports) => {
  4869. /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
  4870. /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
  4871. /******/ }
  4872. /******/ Object.defineProperty(exports, '__esModule', { value: true });
  4873. /******/ };
  4874. /******/ })();
  4875. /******/
  4876. /************************************************************************/
  4877. var __nested_webpack_exports__ = {};
  4878. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  4879. (() => {
  4880. "use strict";
  4881. // EXPORTS
  4882. __nested_webpack_require_320900__.d(__nested_webpack_exports__, {
  4883. "default": () => (/* binding */ Speedy)
  4884. });
  4885. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  4886. var speedy_gl = __nested_webpack_require_320900__(1567);
  4887. // EXTERNAL MODULE: ./src/utils/utils.js
  4888. var utils = __nested_webpack_require_320900__(2191);
  4889. // EXTERNAL MODULE: ./src/core/settings.js
  4890. var settings = __nested_webpack_require_320900__(5637);
  4891. // EXTERNAL MODULE: ./src/core/speedy-promise.js
  4892. var speedy_promise = __nested_webpack_require_320900__(8902);
  4893. ;// CONCATENATED MODULE: ./src/utils/asap.js
  4894. /*
  4895. * speedy-vision.js
  4896. * GPU-accelerated Computer Vision for JavaScript
  4897. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4898. *
  4899. * Licensed under the Apache License, Version 2.0 (the "License");
  4900. * you may not use this file except in compliance with the License.
  4901. * You may obtain a copy of the License at
  4902. *
  4903. * http://www.apache.org/licenses/LICENSE-2.0
  4904. *
  4905. * Unless required by applicable law or agreed to in writing, software
  4906. * distributed under the License is distributed on an "AS IS" BASIS,
  4907. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4908. * See the License for the specific language governing permissions and
  4909. * limitations under the License.
  4910. *
  4911. * asap.js
  4912. * Schedule a function to run "as soon as possible"
  4913. */
  4914. /** callbacks */
  4915. const callbacks = /** @type {Function[]} */ ( [] );
  4916. /** arguments to be passed to the callbacks */
  4917. const args = /** @type {any[][]} */ ( [] );
  4918. /** asap key */
  4919. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  4920. // Register an event listener
  4921. window.addEventListener('message', event => {
  4922. if(event.source !== window || event.data !== ASAP_KEY)
  4923. return;
  4924. event.stopPropagation();
  4925. if(callbacks.length == 0)
  4926. return;
  4927. const fn = callbacks.pop();
  4928. const argArray = args.pop();
  4929. fn.apply(undefined, argArray);
  4930. }, true);
  4931. /**
  4932. * Schedule a function to run "as soon as possible"
  4933. * @param {Function} fn callback
  4934. * @param {any[]} params optional parameters
  4935. */
  4936. function asap(fn, ...params)
  4937. {
  4938. callbacks.unshift(fn);
  4939. args.unshift(params);
  4940. window.postMessage(ASAP_KEY, '*');
  4941. }
  4942. // EXTERNAL MODULE: ./src/utils/errors.js
  4943. var utils_errors = __nested_webpack_require_320900__(5619);
  4944. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
  4945. /*
  4946. * speedy-vision.js
  4947. * GPU-accelerated Computer Vision for JavaScript
  4948. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4949. *
  4950. * Licensed under the Apache License, Version 2.0 (the "License");
  4951. * you may not use this file except in compliance with the License.
  4952. * You may obtain a copy of the License at
  4953. *
  4954. * http://www.apache.org/licenses/LICENSE-2.0
  4955. *
  4956. * Unless required by applicable law or agreed to in writing, software
  4957. * distributed under the License is distributed on an "AS IS" BASIS,
  4958. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4959. * See the License for the specific language governing permissions and
  4960. * limitations under the License.
  4961. *
  4962. * speedy-texture-reader.js
  4963. * Reads data from textures
  4964. */
  4965. /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
  4966. const DEFAULT_NUMBER_OF_BUFFERS = 2;
  4967. /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
  4968. const runOnNextFrame = navigator.userAgent.includes('Firefox') ?
  4969. ((fn, ...args) => setTimeout(fn, 10, ...args)) : // RAF produces a warning on Firefox
  4970. ((fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args))); // reduce battery usage
  4971. /**
  4972. * Reads data from textures
  4973. */
  4974. class SpeedyTextureReader
  4975. {
  4976. /**
  4977. * Constructor
  4978. * @param {number} [numberOfBuffers]
  4979. */
  4980. constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS)
  4981. {
  4982. utils/* Utils */.A.assert(numberOfBuffers > 0);
  4983. /** @type {boolean} is this object initialized? */
  4984. this._initialized = false;
  4985. /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
  4986. this._pixelBuffer = (new Array(numberOfBuffers)).fill(null).map(() => new Uint8Array(0));
  4987. /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
  4988. this._pbo = (new Array(numberOfBuffers)).fill(null);
  4989. /** @type {number} the index of the buffer that will be consumed in this frame */
  4990. this._consumerIndex = 0;
  4991. /** @type {number} the index of the buffer that will be produced next */
  4992. this._producerIndex = numberOfBuffers - 1;
  4993. /** @type {SpeedyPromise<void>[]} producer-consumer promises */
  4994. this._promise = Array.from({ length: numberOfBuffers }, () => speedy_promise/* SpeedyPromise */.i.resolve());
  4995. /** @type {boolean[]} are the contents of the ith buffer being produced? */
  4996. this._busy = (new Array(numberOfBuffers)).fill(false);
  4997. /** @type {boolean[]} can the ith buffer be consumed? */
  4998. this._ready = (new Array(numberOfBuffers)).fill(true);
  4999. }
  5000. /**
  5001. * Initialize this object
  5002. * @param {SpeedyGPU} gpu
  5003. */
  5004. init(gpu)
  5005. {
  5006. this._allocatePBOs(gpu);
  5007. gpu.subscribe(this._allocatePBOs, this, gpu);
  5008. this._initialized = true;
  5009. }
  5010. /**
  5011. * Release resources
  5012. * @param {SpeedyGPU} gpu
  5013. * @returns {null}
  5014. */
  5015. release(gpu)
  5016. {
  5017. gpu.unsubscribe(this._allocatePBOs, this);
  5018. this._deallocatePBOs(gpu);
  5019. this._initialized = false;
  5020. return null;
  5021. }
  5022. /**
  5023. * Read pixels from a texture, synchronously.
  5024. * You may optionally specify a (x,y,width,height) sub-rectangle.
  5025. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  5026. * @param {number} [x]
  5027. * @param {number} [y]
  5028. * @param {number} [width]
  5029. * @param {number} [height]
  5030. * @returns {Uint8Array} pixels in the RGBA format
  5031. */
  5032. readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height)
  5033. {
  5034. utils/* Utils */.A.assert(this._initialized);
  5035. const gl = texture.gl;
  5036. const fbo = texture.glFbo;
  5037. // clamp values
  5038. width = Math.max(0, Math.min(width, texture.width));
  5039. height = Math.max(0, Math.min(height, texture.height));
  5040. x = Math.max(0, Math.min(x, texture.width - width));
  5041. y = Math.max(0, Math.min(y, texture.height - height));
  5042. // buffer allocation
  5043. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  5044. this._reallocate(sizeofBuffer);
  5045. // lost context?
  5046. if(gl.isContextLost())
  5047. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  5048. // read pixels
  5049. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5050. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
  5051. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5052. // done!
  5053. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  5054. }
  5055. /**
  5056. * Read pixels from a texture, asynchronously, with PBOs.
  5057. * You may optionally specify a (x,y,width,height) sub-rectangle.
  5058. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  5059. * @param {number} [x]
  5060. * @param {number} [y]
  5061. * @param {number} [width]
  5062. * @param {number} [height]
  5063. * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
  5064. * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
  5065. */
  5066. readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false)
  5067. {
  5068. utils/* Utils */.A.assert(this._initialized);
  5069. const gl = texture.gl;
  5070. const fbo = texture.glFbo;
  5071. // clamp values
  5072. width = Math.max(0, Math.min(width, texture.width));
  5073. height = Math.max(0, Math.min(height, texture.height));
  5074. x = Math.max(0, Math.min(x, texture.width - width));
  5075. y = Math.max(0, Math.min(y, texture.height - height));
  5076. // buffer allocation
  5077. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  5078. this._reallocate(sizeofBuffer);
  5079. // lost context?
  5080. if(gl.isContextLost())
  5081. return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
  5082. // do not optimize?
  5083. if(!useBufferedDownloads) {
  5084. const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
  5085. return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() =>
  5086. pixelBuffer
  5087. );
  5088. }
  5089. // Hide latency with a Producer-Consumer mechanism
  5090. const numberOfBuffers = this._pixelBuffer.length;
  5091. // GPU needs to produce data
  5092. const producerIndex = this._producerIndex;
  5093. if(!this._busy[producerIndex]) {
  5094. const pbo = this._pbo[producerIndex];
  5095. const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
  5096. this._producerIndex = (producerIndex + 1) % numberOfBuffers;
  5097. this._ready[producerIndex] = false;
  5098. this._busy[producerIndex] = true;
  5099. //console.time("produce "+producerIndex);
  5100. this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
  5101. //console.timeEnd("produce "+producerIndex);
  5102. this._busy[producerIndex] = false;
  5103. this._ready[producerIndex] = true;
  5104. });
  5105. }
  5106. //else console.log("skip",producerIndex);
  5107. else /* skip frame */ ;
  5108. // CPU needs to consume data
  5109. const consumerIndex = this._consumerIndex;
  5110. this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
  5111. if(!this._ready[consumerIndex]) {
  5112. //console.time("consume "+consumerIndex);
  5113. return this._promise[consumerIndex].then(() => {
  5114. //console.timeEnd("consume "+consumerIndex);
  5115. this._ready[consumerIndex] = false;
  5116. return this._pixelBuffer[consumerIndex];
  5117. });
  5118. }
  5119. //console.log("NO WAIT "+consumerIndex);
  5120. this._ready[consumerIndex] = false;
  5121. return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
  5122. }
  5123. /**
  5124. * Reallocate the pixel buffers, so that they can hold the required number of bytes
  5125. * If the pixel buffers already have the required capacity, then nothing is done
  5126. * @param {number} size in bytes
  5127. */
  5128. _reallocate(size)
  5129. {
  5130. // no need to reallocate
  5131. if(size <= this._pixelBuffer[0].byteLength)
  5132. return;
  5133. // reallocate
  5134. for(let i = 0; i < this._pixelBuffer.length; i++) {
  5135. const newBuffer = new Uint8Array(size);
  5136. //newBuffer.set(this._pixelBuffer[i]); // make this optional?
  5137. this._pixelBuffer[i] = newBuffer;
  5138. }
  5139. }
  5140. /**
  5141. * Allocate PBOs
  5142. * @param {SpeedyGPU} gpu
  5143. */
  5144. _allocatePBOs(gpu)
  5145. {
  5146. const gl = gpu.gl;
  5147. for(let i = 0; i < this._pbo.length; i++)
  5148. this._pbo[i] = gl.createBuffer();
  5149. }
  5150. /**
  5151. * Deallocate PBOs
  5152. * @param {SpeedyGPU} gpu
  5153. */
  5154. _deallocatePBOs(gpu)
  5155. {
  5156. const gl = gpu.gl;
  5157. for(let i = this._pbo.length - 1; i >= 0; i--) {
  5158. gl.deleteBuffer(this._pbo[i]);
  5159. this._pbo[i] = null;
  5160. }
  5161. }
  5162. /**
  5163. * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
  5164. * It's assumed that the target texture is in the RGBA8 format
  5165. * @param {WebGL2RenderingContext} gl
  5166. * @param {WebGLBuffer} pbo
  5167. * @param {Uint8Array} outputBuffer with size >= width * height * 4
  5168. * @param {WebGLFramebuffer} fbo
  5169. * @param {GLint} x
  5170. * @param {GLint} y
  5171. * @param {GLsizei} width
  5172. * @param {GLsizei} height
  5173. * @returns {SpeedyPromise<void>}
  5174. */
  5175. static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height)
  5176. {
  5177. /*
  5178. When testing Speedy on Chrome (mobile) using about:tracing with the
  5179. --enable-gpu-service-tracing flag, I found that A LOT of time is spent
  5180. in TraceGLAPI::glMapBufferRange, which takes place just after
  5181. GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
  5182. Using multiple PBOs doesn't seem to impact Chrome too much. Performance
  5183. is much better on Firefox. This suggests there is room for improvement.
  5184. I do not yet understand clearly the cause for this lag on Chrome. It
  5185. may be a CPU-GPU synchronization issue.
  5186. EDIT: I have found that using gl.flush() aggressively greatly improves
  5187. things. WebGL commands will be pushed frequently!
  5188. See also:
  5189. https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
  5190. https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
  5191. */
  5192. const size = width * height * 4;
  5193. // validate outputBuffer
  5194. utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
  5195. // read pixels into the PBO
  5196. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  5197. gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
  5198. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5199. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
  5200. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5201. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  5202. // create a fence
  5203. const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
  5204. gl.flush(); // make sure the sync command is read
  5205. // wait for the commands to be processed by the GPU
  5206. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  5207. // according to the WebGL2 spec sec 3.7.14 Sync objects,
  5208. // "sync objects may only transition to the signaled state
  5209. // when the user agent's event loop is not executing a task"
  5210. // in other words, it won't be signaled in the same frame
  5211. if(settings/* Settings */.w.gpuPollingMode != 'asap')
  5212. runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  5213. else
  5214. asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  5215. }).then(() => {
  5216. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  5217. gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
  5218. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  5219. }).catch(err => {
  5220. throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
  5221. }).finally(() => {
  5222. gl.deleteSync(sync);
  5223. });
  5224. }
  5225. /**
  5226. * Waits for a sync object to become signaled
  5227. * @param {WebGL2RenderingContext} gl
  5228. * @param {WebGLSync} sync
  5229. * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
  5230. * @param {Function} resolve
  5231. * @param {Function} reject
  5232. * @param {number} [pollInterval] in milliseconds
  5233. * @param {number} [remainingAttempts] for timeout
  5234. */
  5235. static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000)
  5236. {
  5237. (function poll() {
  5238. const status = gl.clientWaitSync(sync, flags, 0);
  5239. if(remainingAttempts-- <= 0) {
  5240. reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
  5241. }
  5242. else if(status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
  5243. resolve();
  5244. }
  5245. else {
  5246. //setTimeout(poll, pollInterval);
  5247. if(settings/* Settings */.w.gpuPollingMode != 'asap')
  5248. requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
  5249. else
  5250. asap(poll);
  5251. }
  5252. })();
  5253. }
  5254. }
  5255. // EXTERNAL MODULE: ./src/utils/globals.js
  5256. var globals = __nested_webpack_require_320900__(1814);
  5257. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
  5258. /*
  5259. * speedy-vision.js
  5260. * GPU-accelerated Computer Vision for JavaScript
  5261. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  5262. *
  5263. * Licensed under the Apache License, Version 2.0 (the "License");
  5264. * you may not use this file except in compliance with the License.
  5265. * You may obtain a copy of the License at
  5266. *
  5267. * http://www.apache.org/licenses/LICENSE-2.0
  5268. *
  5269. * Unless required by applicable law or agreed to in writing, software
  5270. * distributed under the License is distributed on an "AS IS" BASIS,
  5271. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5272. * See the License for the specific language governing permissions and
  5273. * limitations under the License.
  5274. *
  5275. * speedy-texture.js
  5276. * A wrapper around WebGLTexture
  5277. */
  5278. /**
  5279. * Get a buffer filled with zeros
  5280. * @param {number} size number of bytes
  5281. * @returns {Uint8Array}
  5282. */
  5283. /*
  5284. const zeros = (function() {
  5285. let buffer = new Uint8Array(4);
  5286. return function(size) {
  5287. if(size > buffer.length)
  5288. buffer = new Uint8Array(size);
  5289. return buffer.subarray(0, size);
  5290. }
  5291. })();
  5292. */
  5293. /**
  5294. * A wrapper around WebGLTexture
  5295. */
  5296. class SpeedyTexture
  5297. {
  5298. /**
  5299. * Constructor
  5300. * @param {WebGL2RenderingContext} gl
  5301. * @param {number} width texture width in pixels
  5302. * @param {number} height texture height in pixels
  5303. * @param {number} [format]
  5304. * @param {number} [internalFormat]
  5305. * @param {number} [dataType]
  5306. * @param {number} [filter]
  5307. * @param {number} [wrap]
  5308. */
  5309. constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT)
  5310. {
  5311. /** @type {WebGL2RenderingContext} rendering context */
  5312. this._gl = gl;
  5313. /** @type {number} width of the texture */
  5314. this._width = Math.max(1, width | 0);
  5315. /** @type {number} height of the texture */
  5316. this._height = Math.max(1, height | 0);
  5317. /** @type {boolean} have we generated mipmaps for this texture? */
  5318. this._hasMipmaps = false;
  5319. /** @type {number} texture format */
  5320. this._format = format;
  5321. /** @type {number} internal format (usually a sized format) */
  5322. this._internalFormat = internalFormat;
  5323. /** @type {number} data type */
  5324. this._dataType = dataType;
  5325. /** @type {number} texture filtering (min & mag) */
  5326. this._filter = filter;
  5327. /** @type {number} texture wrapping */
  5328. this._wrap = wrap;
  5329. /** @type {WebGLTexture} internal texture object */
  5330. this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
  5331. }
  5332. /**
  5333. * Releases the texture
  5334. * @returns {null}
  5335. */
  5336. release()
  5337. {
  5338. const gl = this._gl;
  5339. // already released?
  5340. if(this._glTexture == null)
  5341. throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
  5342. // release resources
  5343. this.discardMipmaps();
  5344. gl.deleteTexture(this._glTexture);
  5345. this._glTexture = null;
  5346. this._width = this._height = 0;
  5347. // done!
  5348. return null;
  5349. }
  5350. /**
  5351. * Upload pixel data to the texture. The texture will be resized if needed.
  5352. * @param {TexImageSource} data
  5353. * @param {number} [width] in pixels
  5354. * @param {number} [height] in pixels
  5355. * @return {SpeedyTexture} this
  5356. */
  5357. upload(data, width = this._width, height = this._height)
  5358. {
  5359. const gl = this._gl;
  5360. // bugfix: if the media is a video, we can't really
  5361. // upload it to the GPU unless it's ready
  5362. if(data instanceof HTMLVideoElement) {
  5363. if(data.readyState < 2) {
  5364. // this may happen when the video loops (Firefox)
  5365. // keep the previously uploaded texture
  5366. //Utils.warning(`Trying to process a video that isn't ready yet`);
  5367. return this;
  5368. }
  5369. }
  5370. utils/* Utils */.A.assert(width > 0 && height > 0);
  5371. this.discardMipmaps();
  5372. this._width = width;
  5373. this._height = height;
  5374. this._internalFormat = gl.RGBA8;
  5375. this._format = gl.RGBA;
  5376. this._dataType = gl.UNSIGNED_BYTE;
  5377. SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
  5378. return this;
  5379. }
  5380. /**
  5381. * Clear the texture
  5382. * @returns {this}
  5383. */
  5384. clear()
  5385. {
  5386. const gl = this._gl;
  5387. // context loss?
  5388. if(gl.isContextLost())
  5389. return this;
  5390. // clear texture data
  5391. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5392. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5393. gl.bindTexture(gl.TEXTURE_2D, null);
  5394. // no mipmaps
  5395. this.discardMipmaps();
  5396. // done!
  5397. return this;
  5398. }
  5399. /**
  5400. * Resize this texture. Its content will be lost!
  5401. * @param {number} width new width, in pixels
  5402. * @param {number} height new height, in pixels
  5403. * @returns {this}
  5404. */
  5405. resize(width, height)
  5406. {
  5407. const gl = this._gl;
  5408. // no need to resize?
  5409. if(this._width === width && this._height === height)
  5410. return this;
  5411. // validate size
  5412. width |= 0; height |= 0;
  5413. if(width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH)
  5414. throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);
  5415. else if(width < 1 || height < 1)
  5416. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
  5417. // context loss?
  5418. if(gl.isContextLost())
  5419. return this;
  5420. // update dimensions
  5421. this._width = width;
  5422. this._height = height;
  5423. // resize
  5424. // Note: this is fast on Chrome, but seems slow on Firefox
  5425. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5426. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5427. gl.bindTexture(gl.TEXTURE_2D, null);
  5428. // no mipmaps
  5429. this.discardMipmaps();
  5430. // done!
  5431. return this;
  5432. }
  5433. /**
  5434. * Generate mipmap
  5435. * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
  5436. * @returns {SpeedyTexture} this
  5437. */
  5438. generateMipmaps(mipmap = [])
  5439. {
  5440. const gl = this._gl;
  5441. // nothing to do
  5442. if(this._hasMipmaps)
  5443. return this;
  5444. // let the hardware compute the all levels of the pyramid, up to 1x1
  5445. // we also specify the TEXTURE_MIN_FILTER to be used from now on
  5446. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5447. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
  5448. gl.generateMipmap(gl.TEXTURE_2D);
  5449. gl.bindTexture(gl.TEXTURE_2D, null);
  5450. // accept custom textures
  5451. if(mipmap.length > 0) {
  5452. // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  5453. const width = this.width, height = this.height;
  5454. const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
  5455. utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
  5456. // verify the dimensions of each level
  5457. for(let level = 1; level < mipmap.length; level++) {
  5458. // use max(1, floor(size / 2^lod)), in accordance to
  5459. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  5460. const w = Math.max(1, width >>> level);
  5461. const h = Math.max(1, height >>> level);
  5462. // verify the dimensions of this level
  5463. utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
  5464. // copy to mipmap
  5465. mipmap[level].copyTo(this, level);
  5466. }
  5467. }
  5468. // done!
  5469. this._hasMipmaps = true;
  5470. return this;
  5471. }
  5472. /**
  5473. * Invalidates previously generated mipmap, if any
  5474. */
  5475. discardMipmaps()
  5476. {
  5477. const gl = this._gl;
  5478. // nothing to do
  5479. if(!this._hasMipmaps)
  5480. return;
  5481. // reset the min filter
  5482. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5483. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
  5484. gl.bindTexture(gl.TEXTURE_2D, null);
  5485. // done!
  5486. this._hasMipmaps = false;
  5487. }
  5488. /**
  5489. * Does this texture have a mipmap?
  5490. * @returns {boolean}
  5491. */
  5492. hasMipmaps()
  5493. {
  5494. return this._hasMipmaps;
  5495. }
  5496. /**
  5497. * Has this texture been released?
  5498. * @returns {boolean}
  5499. */
  5500. isReleased()
  5501. {
  5502. return this._glTexture == null;
  5503. }
  5504. /**
  5505. * The internal WebGLTexture
  5506. * @returns {WebGLTexture}
  5507. */
  5508. get glTexture()
  5509. {
  5510. return this._glTexture;
  5511. }
  5512. /**
  5513. * The width of the texture, in pixels
  5514. * @returns {number}
  5515. */
  5516. get width()
  5517. {
  5518. return this._width;
  5519. }
  5520. /**
  5521. * The height of the texture, in pixels
  5522. * @returns {number}
  5523. */
  5524. get height()
  5525. {
  5526. return this._height;
  5527. }
  5528. /**
  5529. * The WebGL Context
  5530. * @returns {WebGL2RenderingContext}
  5531. */
  5532. get gl()
  5533. {
  5534. return this._gl;
  5535. }
  5536. /**
  5537. * Create a WebGL texture
  5538. * @param {WebGL2RenderingContext} gl
  5539. * @param {number} width in pixels
  5540. * @param {number} height in pixels
  5541. * @param {number} format usually gl.RGBA
  5542. * @param {number} internalFormat usually gl.RGBA8
  5543. * @param {number} dataType usually gl.UNSIGNED_BYTE
  5544. * @param {number} filter usually gl.NEAREST or gl.LINEAR
  5545. * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
  5546. * @returns {WebGLTexture}
  5547. */
  5548. static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap)
  5549. {
  5550. utils/* Utils */.A.assert(width > 0 && height > 0);
  5551. // create & bind texture
  5552. const texture = gl.createTexture();
  5553. gl.bindTexture(gl.TEXTURE_2D, texture);
  5554. // setup
  5555. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
  5556. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
  5557. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
  5558. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
  5559. //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
  5560. gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
  5561. // unbind & return
  5562. gl.bindTexture(gl.TEXTURE_2D, null);
  5563. return texture;
  5564. }
  5565. /**
  5566. * Upload pixel data to a WebGL texture
  5567. * @param {WebGL2RenderingContext} gl
  5568. * @param {WebGLTexture} texture
  5569. * @param {GLsizei} width texture width
  5570. * @param {GLsizei} height texture height
  5571. * @param {TexImageSource} pixels
  5572. * @param {GLint} lod mipmap level-of-detail
  5573. * @param {number} format
  5574. * @param {number} internalFormat
  5575. * @param {number} dataType
  5576. * @returns {WebGLTexture} texture
  5577. */
  5578. static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType)
  5579. {
  5580. // Prefer calling _upload() before gl.useProgram() to avoid the
  5581. // needless switching of GL programs internally. See also:
  5582. // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  5583. gl.bindTexture(gl.TEXTURE_2D, texture);
  5584. /*
  5585. // slower than texImage2D, unlike the spec?
  5586. gl.texSubImage2D(gl.TEXTURE_2D, // target
  5587. lod, // mip level
  5588. 0, // x-offset
  5589. 0, // y-offset
  5590. width, // texture width
  5591. height, // texture height
  5592. gl.RGBA, // source format
  5593. gl.UNSIGNED_BYTE, // source type
  5594. pixels); // source data
  5595. */
  5596. gl.texImage2D(gl.TEXTURE_2D, // target
  5597. lod, // mip level
  5598. internalFormat, // internal format
  5599. width, // texture width
  5600. height, // texture height
  5601. 0, // border
  5602. format, // source format
  5603. dataType, // source type
  5604. pixels); // source data
  5605. gl.bindTexture(gl.TEXTURE_2D, null);
  5606. return texture;
  5607. }
  5608. }
  5609. /**
  5610. * A SpeedyTexture with a framebuffer
  5611. */
  5612. class SpeedyDrawableTexture extends SpeedyTexture
  5613. {
  5614. /**
  5615. * Constructor
  5616. * @param {WebGL2RenderingContext} gl
  5617. * @param {number} width texture width in pixels
  5618. * @param {number} height texture height in pixels
  5619. * @param {number} [format]
  5620. * @param {number} [internalFormat]
  5621. * @param {number} [dataType]
  5622. * @param {number} [filter]
  5623. * @param {number} [wrap]
  5624. */
  5625. constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined)
  5626. {
  5627. super(gl, width, height, format, internalFormat, dataType, filter, wrap);
  5628. /** @type {WebGLFramebuffer} framebuffer */
  5629. this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
  5630. }
  5631. /**
  5632. * Releases the texture
  5633. * @returns {null}
  5634. */
  5635. release()
  5636. {
  5637. const gl = this._gl;
  5638. // already released?
  5639. if(this._glFbo == null)
  5640. throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
  5641. // release the framebuffer
  5642. gl.deleteFramebuffer(this._glFbo);
  5643. this._glFbo = null;
  5644. // release the SpeedyTexture
  5645. return super.release();
  5646. }
  5647. /**
  5648. * The internal WebGLFramebuffer
  5649. * @returns {WebGLFramebuffer}
  5650. */
  5651. get glFbo()
  5652. {
  5653. return this._glFbo;
  5654. }
  5655. /**
  5656. * Copy this texture into another
  5657. * (you may have to discard the mipmaps after calling this function)
  5658. * @param {SpeedyTexture} texture target texture
  5659. * @param {number} [lod] level-of-detail of the target texture
  5660. */
  5661. copyTo(texture, lod = 0)
  5662. {
  5663. const gl = this._gl;
  5664. // context loss?
  5665. if(gl.isContextLost())
  5666. return;
  5667. // compute texture size as max(1, floor(size / 2^lod)),
  5668. // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
  5669. // (Mipmapping)
  5670. const pot = 1 << (lod |= 0);
  5671. const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
  5672. const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
  5673. // validate
  5674. utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
  5675. // copy to texture
  5676. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
  5677. }
  5678. /*
  5679. * Resize this texture
  5680. * @param {number} width new width, in pixels
  5681. * @param {number} height new height, in pixels
  5682. * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
  5683. * @returns {this}
  5684. */
  5685. /*resize(width, height, preserveContent = false)
  5686. {
  5687. const gl = this._gl;
  5688. // no need to preserve the content?
  5689. if(!preserveContent)
  5690. return super.resize(width, height);
  5691. // no need to resize?
  5692. if(this._width === width && this._height === height)
  5693. return this;
  5694. // validate size
  5695. width |= 0; height |= 0;
  5696. Utils.assert(width > 0 && height > 0);
  5697. // context loss?
  5698. if(gl.isContextLost())
  5699. return this;
  5700. // allocate new texture
  5701. const newTexture = SpeedyTexture._createTexture(gl, width, height);
  5702. // initialize the new texture with zeros to avoid a
  5703. // warning when calling copyTexSubImage2D() on Firefox
  5704. // this may not be very efficient?
  5705. SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
  5706. // copy the old texture to the new one
  5707. const oldWidth = this._width, oldHeight = this._height;
  5708. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
  5709. // bind FBO
  5710. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5711. // invalidate old data (is this needed?)
  5712. gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
  5713. // attach the new texture to the existing framebuffer
  5714. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5715. gl.COLOR_ATTACHMENT0, // color buffer
  5716. gl.TEXTURE_2D, // tex target
  5717. newTexture, // texture
  5718. 0); // mipmap level
  5719. // unbind FBO
  5720. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5721. // release the old texture and replace it
  5722. gl.deleteTexture(this._glTexture);
  5723. this._glTexture = newTexture;
  5724. // update dimensions & discard mipmaps
  5725. this.discardMipmaps();
  5726. this._width = width;
  5727. this._height = height;
  5728. // done!
  5729. return this;
  5730. }
  5731. */
  5732. /**
  5733. * Clear the texture
  5734. * @returns {this}
  5735. */
  5736. clear()
  5737. {
  5738. //
  5739. // When we pass null to texImage2D(), it seems that Firefox
  5740. // doesn't clear the texture. Instead, it displays this warning:
  5741. //
  5742. // "WebGL warning: drawArraysInstanced:
  5743. // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
  5744. //
  5745. // Here is a workaround:
  5746. //
  5747. return this.clearToColor(0, 0, 0, 0);
  5748. }
  5749. /**
  5750. * Clear the texture to a color
  5751. * @param {number} r red component, a value in [0,1]
  5752. * @param {number} g green component, a value in [0,1]
  5753. * @param {number} b blue component, a value in [0,1]
  5754. * @param {number} a alpha component, a value in [0,1]
  5755. * @returns {this}
  5756. */
  5757. clearToColor(r, g, b, a)
  5758. {
  5759. const gl = this._gl;
  5760. // context loss?
  5761. if(gl.isContextLost())
  5762. return this;
  5763. // clamp parameters
  5764. r = Math.max(0.0, Math.min(+r, 1.0));
  5765. g = Math.max(0.0, Math.min(+g, 1.0));
  5766. b = Math.max(0.0, Math.min(+b, 1.0));
  5767. a = Math.max(0.0, Math.min(+a, 1.0));
  5768. // discard mipmaps, if any
  5769. this.discardMipmaps();
  5770. // clear the texture
  5771. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5772. gl.viewport(0, 0, this._width, this._height);
  5773. gl.clearColor(r, g, b, a);
  5774. gl.clear(gl.COLOR_BUFFER_BIT);
  5775. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5776. // done!
  5777. return this;
  5778. }
  5779. /**
  5780. * Inspect the pixels of the texture for debugging purposes
  5781. * @param {SpeedyGPU} gpu
  5782. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5783. * @returns {Uint8Array}
  5784. */
  5785. inspect(gpu, textureReader)
  5786. {
  5787. if(textureReader === undefined) {
  5788. textureReader = new SpeedyTextureReader();
  5789. textureReader.init(gpu);
  5790. const pixels = textureReader.readPixelsSync(this);
  5791. textureReader.release(gpu);
  5792. return new Uint8Array(pixels); // copy the array
  5793. }
  5794. else {
  5795. const pixels = textureReader.readPixelsSync(this);
  5796. return new Uint8Array(pixels);
  5797. }
  5798. }
  5799. /**
  5800. * Inspect the pixels of the texture as unsigned 32-bit integers
  5801. * @param {SpeedyGPU} gpu
  5802. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5803. * @returns {Uint32Array}
  5804. */
  5805. inspect32(gpu, textureReader)
  5806. {
  5807. utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
  5808. return new Uint32Array(this.inspect(gpu, textureReader).buffer);
  5809. }
  5810. /**
  5811. * Create a FBO associated with an existing texture
  5812. * @param {WebGL2RenderingContext} gl
  5813. * @param {WebGLTexture} texture
  5814. * @returns {WebGLFramebuffer}
  5815. */
  5816. static _createFramebuffer(gl, texture)
  5817. {
  5818. const fbo = gl.createFramebuffer();
  5819. // setup framebuffer
  5820. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5821. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5822. gl.COLOR_ATTACHMENT0, // color buffer
  5823. gl.TEXTURE_2D, // tex target
  5824. texture, // texture
  5825. 0); // mipmap level
  5826. // check for errors
  5827. const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
  5828. if(status != gl.FRAMEBUFFER_COMPLETE) {
  5829. const error = (() => (([
  5830. 'FRAMEBUFFER_UNSUPPORTED',
  5831. 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT',
  5832. 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS',
  5833. 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT',
  5834. 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'
  5835. ].filter(err => gl[err] === status))[0] || 'unknown error'))();
  5836. throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
  5837. }
  5838. // unbind & return
  5839. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5840. return fbo;
  5841. }
  5842. /**
  5843. * Copy data from a framebuffer to a texture
  5844. * @param {WebGL2RenderingContext} gl
  5845. * @param {WebGLFramebuffer} fbo we'll read the data from this
  5846. * @param {WebGLTexture} texture destination texture
  5847. * @param {GLint} x xpos (where to start copying)
  5848. * @param {GLint} y ypos (where to start copying)
  5849. * @param {GLsizei} width width of the texture
  5850. * @param {GLsizei} height height of the texture
  5851. * @param {GLint} [lod] mipmap level-of-detail
  5852. * @returns {WebGLTexture} texture
  5853. */
  5854. static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0)
  5855. {
  5856. //gl.activeTexture(gl.TEXTURE0);
  5857. gl.bindTexture(gl.TEXTURE_2D, texture);
  5858. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5859. gl.copyTexSubImage2D(
  5860. gl.TEXTURE_2D, // target
  5861. lod, // mipmap level
  5862. 0, // xoffset
  5863. 0, // yoffset
  5864. x, // xpos (where to start copying)
  5865. y, // ypos (where to start copying)
  5866. width, // width of the texture
  5867. height // height of the texture
  5868. );
  5869. /*
  5870. gl.copyTexImage2D(
  5871. gl.TEXTURE_2D, // target
  5872. lod, // mipmap level
  5873. gl.RGBA, // internal format
  5874. x, // xpos (where to start copying)
  5875. y, // ypos (where to start copying)
  5876. width, // width of the texture
  5877. height, // height of the texture
  5878. 0 // border
  5879. );
  5880. */
  5881. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5882. gl.bindTexture(gl.TEXTURE_2D, null);
  5883. return texture;
  5884. }
  5885. }
  5886. // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
  5887. var shader_declaration = __nested_webpack_require_320900__(3112);
  5888. ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
  5889. /*
  5890. * speedy-vision.js
  5891. * GPU-accelerated Computer Vision for JavaScript
  5892. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  5893. *
  5894. * Licensed under the Apache License, Version 2.0 (the "License");
  5895. * you may not use this file except in compliance with the License.
  5896. * You may obtain a copy of the License at
  5897. *
  5898. * http://www.apache.org/licenses/LICENSE-2.0
  5899. *
  5900. * Unless required by applicable law or agreed to in writing, software
  5901. * distributed under the License is distributed on an "AS IS" BASIS,
  5902. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5903. * See the License for the specific language governing permissions and
  5904. * limitations under the License.
  5905. *
  5906. * speedy-program.js
  5907. * SpeedyProgram class
  5908. */
  5909. /** @const {Object<string,string>} Map uniform type to a gl function */
  5910. const UNIFORM_SETTERS = Object.freeze({
  5911. 'sampler2D': 'uniform1i',
  5912. 'isampler2D':'uniform1i',
  5913. 'usampler2D':'uniform1i',
  5914. 'float': 'uniform1f',
  5915. 'int': 'uniform1i',
  5916. 'uint': 'uniform1ui',
  5917. 'bool': 'uniform1i',
  5918. 'vec2': 'uniform2f',
  5919. 'vec3': 'uniform3f',
  5920. 'vec4': 'uniform4f',
  5921. 'ivec2': 'uniform2i',
  5922. 'ivec3': 'uniform3i',
  5923. 'ivec4': 'uniform4i',
  5924. 'uvec2': 'uniform2ui',
  5925. 'uvec3': 'uniform3ui',
  5926. 'uvec4': 'uniform4ui',
  5927. 'bvec2': 'uniform2i',
  5928. 'bvec3': 'uniform3i',
  5929. 'bvec4': 'uniform4i',
  5930. 'mat2': 'uniformMatrix2fv',
  5931. 'mat3': 'uniformMatrix3fv',
  5932. 'mat4': 'uniformMatrix4fv',
  5933. });
  5934. /**
  5935. * @typedef {object} SpeedyProgramOptions
  5936. * @property {boolean} [renderToTexture] render results to a texture?
  5937. * @property {boolean} [pingpong] alternate output texture between calls
  5938. */
  5939. /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
  5940. /**
  5941. * A SpeedyProgram is a Function that runs GLSL code
  5942. */
  5943. class SpeedyProgram extends Function
  5944. {
  5945. /**
  5946. * Creates a new SpeedyProgram
  5947. * @param {WebGL2RenderingContext} gl WebGL context
  5948. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5949. * @param {SpeedyProgramOptions} [options] user options
  5950. */
  5951. constructor(gl, shaderdecl, options = { })
  5952. {
  5953. super('...args', 'return this._self._call(...args)');
  5954. /** @type {SpeedyProgram} this function bound to this function! */
  5955. this._self = this.bind(this);
  5956. this._self._init(gl, shaderdecl, options);
  5957. return this._self;
  5958. }
  5959. /**
  5960. * Initialize the SpeedyProgram
  5961. * @param {WebGL2RenderingContext} gl WebGL context
  5962. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5963. * @param {SpeedyProgramOptions} options user options
  5964. */
  5965. _init(gl, shaderdecl, options)
  5966. {
  5967. // not a valid context?
  5968. if(gl.isContextLost())
  5969. throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
  5970. // options object
  5971. options = Object.assign({
  5972. // default options
  5973. renderToTexture: true,
  5974. pingpong: false,
  5975. }, options);
  5976. /** @type {WebGL2RenderingContext} */
  5977. this._gl = gl;
  5978. /** @type {WebGLProgram} vertex shader + fragment shader */
  5979. this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
  5980. /** @type {ProgramGeometry} this is a quad */
  5981. this._geometry = new ProgramGeometry(gl, {
  5982. position: shaderdecl.locationOfAttributes.position,
  5983. texCoord: shaderdecl.locationOfAttributes.texCoord
  5984. });
  5985. /** @type {string[]} names of the arguments of the SpeedyProgram */
  5986. this._argnames = shaderdecl.arguments;
  5987. /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
  5988. this._argIsArray = (new Array(this._argnames.length)).fill(false);
  5989. /** @type {UBOHelper} UBO helper (lazy instantiation) */
  5990. this._ubo = null;
  5991. /** @type {boolean} should we render to a texture? If false, we render to the canvas */
  5992. this._renderToTexture = Boolean(options.renderToTexture);
  5993. /** @type {number} width of the output */
  5994. this._width = 1;
  5995. /** @type {number} height of the output */
  5996. this._height = 1;
  5997. /** @type {[number,number]} cached object that stores the size of the output */
  5998. this._size = [ 1, 1 ];
  5999. /** @type {SpeedyDrawableTexture[]} output texture(s) */
  6000. this._texture = (new Array(options.pingpong ? 2 : 1)).fill(null);
  6001. /** @type {number} used for pingpong rendering */
  6002. this._textureIndex = 0;
  6003. /** @type {Map<string,UniformVariable>} uniform variables */
  6004. this._uniform = new Map();
  6005. /** @type {ShaderDeclaration} shader declaration */
  6006. this._shaderdecl = shaderdecl;
  6007. // autodetect uniforms
  6008. gl.useProgram(this._program);
  6009. for(const name of shaderdecl.uniforms) {
  6010. const type = shaderdecl.uniformType(name);
  6011. const location = gl.getUniformLocation(this._program, name);
  6012. this._uniform.set(name, new UniformVariable(type, location));
  6013. }
  6014. // match arguments & uniforms
  6015. for(let j = 0; j < this._argnames.length; j++) {
  6016. const argname = this._argnames[j];
  6017. if(!this._uniform.has(argname)) {
  6018. this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
  6019. if(!this._argIsArray[j])
  6020. throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
  6021. }
  6022. }
  6023. }
  6024. /**
  6025. * Run the SpeedyProgram
  6026. * @param {...SpeedyProgramUniformValue} args
  6027. * @returns {SpeedyDrawableTexture}
  6028. */
  6029. _call(...args)
  6030. {
  6031. const gl = this._gl;
  6032. const argnames = this._argnames;
  6033. const texture = this._texture[this._textureIndex];
  6034. // matching arguments?
  6035. if(args.length != argnames.length)
  6036. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
  6037. // can't use the output texture as an input
  6038. /*
  6039. // slower method
  6040. const flatArgs = Utils.flatten(args);
  6041. for(let j = flatArgs.length - 1; j >= 0; j--) {
  6042. if(flatArgs[j] === this._texture[this._textureIndex])
  6043. throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  6044. }
  6045. */
  6046. for(let j = args.length - 1; j >= 0; j--) {
  6047. if(args[j] === texture)
  6048. throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  6049. // else if(Array.isArray(args[j])) ...
  6050. // we don't support passing arrays of textures at the time of this writing
  6051. }
  6052. // context loss?
  6053. if(gl.isContextLost())
  6054. return texture;
  6055. // use program
  6056. gl.useProgram(this._program);
  6057. // bind the VAO
  6058. gl.bindVertexArray(this._geometry.vao);
  6059. // select the render target
  6060. const fbo = this._renderToTexture ? texture.glFbo : null;
  6061. // update texSize uniform (available in all fragment shaders)
  6062. const texSize = this._uniform.get('texSize');
  6063. this._size[0] = this._width;
  6064. this._size[1] = this._height;
  6065. texSize.setValue(gl, this._size);
  6066. // set uniforms[i] to args[i]
  6067. for(let i = 0, texNo = 0; i < args.length; i++) {
  6068. const argname = argnames[i];
  6069. if(!this._argIsArray[i]) {
  6070. // uniform variable matches argument name
  6071. const uniform = this._uniform.get(argname);
  6072. texNo = uniform.setValue(gl, args[i], texNo);
  6073. }
  6074. else {
  6075. // uniform array matches argument name
  6076. const array = args[i];
  6077. if(Array.isArray(array)) {
  6078. if(this._uniform.has(indexedVariable(argname, array.length)))
  6079. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
  6080. for(let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++)
  6081. texNo = uniform.setValue(gl, array[j], texNo);
  6082. }
  6083. else
  6084. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
  6085. }
  6086. }
  6087. // set Uniform Buffer Objects (if any)
  6088. if(this._ubo !== null)
  6089. this._ubo.update();
  6090. // bind the FBO
  6091. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  6092. // draw call
  6093. gl.viewport(0, 0, this._width, this._height);
  6094. gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
  6095. // unbind the FBO
  6096. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  6097. // unbind the VAO
  6098. gl.bindVertexArray(null);
  6099. // we've just changed the texture! discard the pyramid, if any
  6100. if(texture != null)
  6101. texture.discardMipmaps();
  6102. // ping-pong rendering
  6103. this._pingpong();
  6104. // done!
  6105. return texture;
  6106. }
  6107. /**
  6108. * Set the output texture(s) and its (their) shape(s)
  6109. * @param {number} width new width, in pixels
  6110. * @param {number} height new height, in pixels
  6111. * @param {...SpeedyDrawableTexture|null} texture output texture(s)
  6112. * @returns {SpeedyProgram} this
  6113. */
  6114. outputs(width, height, ...texture)
  6115. {
  6116. this._setOutputTexture(...texture);
  6117. this._setOutputSize(width, height);
  6118. return this;
  6119. }
  6120. /**
  6121. * Set the size of the output
  6122. * @param {number} width new width, in pixels
  6123. * @param {number} height new height, in pixels
  6124. * @returns {SpeedyProgram} this
  6125. */
  6126. _setOutputSize(width, height)
  6127. {
  6128. utils/* Utils */.A.assert(width > 0 && height > 0);
  6129. // update output size
  6130. this._width = width | 0;
  6131. this._height = height | 0;
  6132. // resize the output texture(s)
  6133. for(let i = 0; i < this._texture.length; i++) {
  6134. if(this._texture[i] != null)
  6135. this._texture[i].resize(this._width, this._height);
  6136. }
  6137. // done!
  6138. return this;
  6139. }
  6140. /**
  6141. * Use the provided texture(s) as output
  6142. * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
  6143. * @returns {SpeedyProgram} this
  6144. */
  6145. _setOutputTexture(...texture)
  6146. {
  6147. utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
  6148. // update output texture(s)
  6149. for(let i = 0; i < this._texture.length; i++)
  6150. this._texture[i] = texture[i];
  6151. this._textureIndex = 0;
  6152. // done!
  6153. return this;
  6154. }
  6155. /**
  6156. * Clear the internal textures
  6157. * @returns {SpeedyDrawableTexture}
  6158. */
  6159. clear()
  6160. {
  6161. const texture = this._texture[this._textureIndex];
  6162. // clear internal textures
  6163. for(let i = 0; i < this._texture.length; i++)
  6164. this._texture[i].clear();
  6165. // ping-pong rendering
  6166. this._pingpong();
  6167. // done!
  6168. return texture;
  6169. }
  6170. /**
  6171. * Set data using a Uniform Buffer Object
  6172. * @param {string} blockName uniform block name
  6173. * @param {ArrayBufferView} data
  6174. * @returns {SpeedyProgram} this
  6175. */
  6176. setUBO(blockName, data)
  6177. {
  6178. if(this._ubo === null)
  6179. this._ubo = new UBOHelper(this._gl, this._program);
  6180. this._ubo.set(blockName, data);
  6181. return this;
  6182. }
  6183. /**
  6184. * Release the resources associated with this SpeedyProgram
  6185. * @returns {null}
  6186. */
  6187. release()
  6188. {
  6189. const gl = this._gl;
  6190. // Release UBOs (if any)
  6191. if(this._ubo != null)
  6192. this._ubo = this._ubo.release();
  6193. // Unlink textures
  6194. this._texture.fill(null);
  6195. // Release geometry
  6196. this._geometry = this._geometry.release();
  6197. // Release program
  6198. gl.deleteProgram(this._program);
  6199. this._program = null;
  6200. // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
  6201. // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
  6202. // will automatically be marked for deletion when the JS object is
  6203. // destroyed (i.e., garbage collected)
  6204. // done!
  6205. return null;
  6206. }
  6207. /**
  6208. * A constant #defined in the shader declaration
  6209. * @param {string} name
  6210. * @returns {number}
  6211. */
  6212. definedConstant(name)
  6213. {
  6214. return this._shaderdecl.definedConstant(name);
  6215. }
  6216. /**
  6217. * Helper method for pingpong rendering: alternates
  6218. * the texture index from 0 to 1 and vice-versa
  6219. */
  6220. _pingpong()
  6221. {
  6222. if(this._texture.length > 1)
  6223. this._textureIndex = 1 - this._textureIndex;
  6224. }
  6225. /**
  6226. * Compile and link GLSL shaders
  6227. * @param {WebGL2RenderingContext} gl
  6228. * @param {string} vertexShaderSource GLSL code of the vertex shader
  6229. * @param {string} fragmentShaderSource GLSL code of the fragment shader
  6230. * @returns {WebGLProgram}
  6231. */
  6232. static _compile(gl, vertexShaderSource, fragmentShaderSource)
  6233. {
  6234. const program = gl.createProgram();
  6235. const vertexShader = gl.createShader(gl.VERTEX_SHADER);
  6236. const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
  6237. // compile vertex shader
  6238. gl.shaderSource(vertexShader, vertexShaderSource);
  6239. gl.compileShader(vertexShader);
  6240. gl.attachShader(program, vertexShader);
  6241. // compile fragment shader
  6242. gl.shaderSource(fragmentShader, fragmentShaderSource);
  6243. gl.compileShader(fragmentShader);
  6244. gl.attachShader(program, fragmentShader);
  6245. // link program
  6246. gl.linkProgram(program);
  6247. gl.validateProgram(program);
  6248. // return on success
  6249. if(gl.getProgramParameter(program, gl.LINK_STATUS))
  6250. return program;
  6251. // display an error
  6252. const errors = [
  6253. gl.getShaderInfoLog(fragmentShader),
  6254. gl.getShaderInfoLog(vertexShader),
  6255. gl.getProgramInfoLog(program),
  6256. ];
  6257. gl.deleteProgram(program);
  6258. gl.deleteShader(fragmentShader);
  6259. gl.deleteShader(vertexShader);
  6260. // display error
  6261. const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
  6262. const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
  6263. const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
  6264. const formattedSource = source.split('\n')
  6265. .map((line, no) => col(1+no) + line)
  6266. .join('\n');
  6267. throw new utils_errors/* GLError */.wB(
  6268. `\n\n---------- ERROR ----------\n\n` +
  6269. errors.filter(err => err).join('\n') +
  6270. `\n\n---------- SOURCE CODE ----------\n\n` +
  6271. formattedSource + '\n'
  6272. );
  6273. }
  6274. }
  6275. // ============================================================================
  6276. // HELPERS
  6277. // ============================================================================
  6278. /**
  6279. * Configure and store the VAO and the VBOs
  6280. * @param {WebGL2RenderingContext} gl
  6281. * @param {LocationOfAttributes} location
  6282. * @returns {ProgramGeometry}
  6283. *
  6284. * @typedef {Object} LocationOfAttributes
  6285. * @property {number} position
  6286. * @property {number} texCoord
  6287. *
  6288. * @typedef {Object} BufferOfAttributes
  6289. * @property {WebGLBuffer} position
  6290. * @property {WebGLBuffer} texCoord
  6291. */
  6292. function ProgramGeometry(gl, location)
  6293. {
  6294. /** @type {WebGLVertexArrayObject} Vertex Array Object */
  6295. this.vao = gl.createVertexArray();
  6296. /** @type {BufferOfAttributes} Vertex Buffer Objects */
  6297. this.vbo = Object.freeze({
  6298. position: gl.createBuffer(),
  6299. texCoord: gl.createBuffer()
  6300. });
  6301. /** @type {WebGL2RenderingContext} */
  6302. this._gl = gl;
  6303. // bind the VAO
  6304. gl.bindVertexArray(this.vao);
  6305. // set the position attribute
  6306. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
  6307. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  6308. // clip coordinates (CCW)
  6309. -1, -1,
  6310. 1, -1,
  6311. -1, 1,
  6312. -1, 1,
  6313. 1, -1,
  6314. 1, 1,
  6315. ]), gl.STATIC_DRAW);
  6316. gl.enableVertexAttribArray(location.position);
  6317. gl.vertexAttribPointer(location.position, // attribute location
  6318. 2, // 2 components per vertex (x,y)
  6319. gl.FLOAT, // type
  6320. false, // don't normalize
  6321. 0, // default stride (tightly packed)
  6322. 0); // offset
  6323. // set the texCoord attribute
  6324. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
  6325. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  6326. // texture coordinates (CCW)
  6327. 0, 0,
  6328. 1, 0,
  6329. 0, 1,
  6330. 0, 1,
  6331. 1, 0,
  6332. 1, 1,
  6333. ]), gl.STATIC_DRAW);
  6334. gl.enableVertexAttribArray(location.texCoord);
  6335. gl.vertexAttribPointer(location.texCoord, // attribute location
  6336. 2, // 2 components per vertex (x,y)
  6337. gl.FLOAT, // type
  6338. false, // don't normalize
  6339. 0, // default stride (tightly packed)
  6340. 0); // offset
  6341. // unbind
  6342. gl.bindBuffer(gl.ARRAY_BUFFER, null);
  6343. gl.bindVertexArray(null);
  6344. // done!
  6345. return Object.freeze(this);
  6346. }
  6347. /**
  6348. * Releases the internal resources
  6349. * @returns {null}
  6350. */
  6351. ProgramGeometry.prototype.release = function()
  6352. {
  6353. const gl = this._gl;
  6354. gl.deleteVertexArray(this.vao);
  6355. gl.deleteBuffer(this.vbo.position);
  6356. gl.deleteBuffer(this.vbo.texCoord);
  6357. return null;
  6358. }
  6359. /**
  6360. * Helper class for storing data in GLSL uniform variables
  6361. * @param {string} type
  6362. * @param {WebGLUniformLocation} location
  6363. */
  6364. function UniformVariable(type, location)
  6365. {
  6366. /** @type {string} GLSL data type */
  6367. this.type = String(type);
  6368. if(!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type))
  6369. throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
  6370. /** @type {WebGLUniformLocation} uniform location in a WebGL program */
  6371. this.location = location;
  6372. /** @type {string} setter function */
  6373. this.setter = UNIFORM_SETTERS[this.type];
  6374. const n = Number((this.setter.match(/^uniform(Matrix)?(\d)/))[2]) | 0;
  6375. /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
  6376. this.dim = this.type.startsWith('mat') ? 2 : ((this.type.indexOf('vec') >= 0) ? 1 : 0);
  6377. /** @type {number} required number of scalars */
  6378. this.length = (this.dim == 2) ? n * n : n;
  6379. /** @type {SpeedyProgramUniformValue|null} cached value */
  6380. this._value = null;
  6381. }
  6382. /**
  6383. * Set the value of a uniform variable
  6384. * @param {WebGL2RenderingContext} gl
  6385. * @param {SpeedyProgramUniformValue} value use column-major format for matrices
  6386. * @param {number} [texNo] current texture index
  6387. * @returns {number} new texture index
  6388. */
  6389. UniformVariable.prototype.setValue = function(gl, value, texNo = -1)
  6390. {
  6391. const setValue = /** @type {Function} */ ( gl[this.setter] );
  6392. // check uniform type
  6393. if(typeof value === 'object' && this.type.endsWith('sampler2D')) {
  6394. // set texture
  6395. if(texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS)
  6396. throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);
  6397. else if(Array.isArray(value))
  6398. throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);
  6399. else if(value == null)
  6400. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);
  6401. else if(texNo < 0)
  6402. throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
  6403. const tex = value;
  6404. gl.activeTexture(gl.TEXTURE0 + texNo);
  6405. gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
  6406. gl.uniform1i(this.location, texNo);
  6407. texNo++;
  6408. }
  6409. else if(value === this._value && typeof value !== 'object') {
  6410. // do not update the uniform if it hasn't changed
  6411. // note that value may be an array whose entries may have been updated
  6412. void(0);
  6413. }
  6414. else if(typeof value === 'number' || typeof value === 'boolean') {
  6415. // set scalar value
  6416. setValue.call(gl, this.location, value);
  6417. }
  6418. else if(Array.isArray(value)) {
  6419. // set vector or matrix
  6420. if(value.length === this.length) {
  6421. if(this.dim == 2)
  6422. setValue.call(gl, this.location, false, value); // matrix
  6423. else
  6424. setValue.call(gl, this.location, ...value); // vector
  6425. }
  6426. else
  6427. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
  6428. }
  6429. else
  6430. throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
  6431. // cache the value
  6432. this._value = value;
  6433. // done
  6434. return texNo;
  6435. }
  6436. /**
  6437. * @typedef {object} UBOStuff
  6438. * @property {WebGLBuffer} buffer
  6439. * @property {number} blockBindingIndex "global" binding index
  6440. * @property {number} blockIndex UBO "location" in the program
  6441. * @property {ArrayBufferView|null} data user-data
  6442. */
  6443. /**
  6444. * A helper class for handling Uniform Buffer Objects (UBOs)
  6445. * @param {WebGL2RenderingContext} gl
  6446. * @param {WebGLProgram} program
  6447. */
  6448. function UBOHelper(gl, program)
  6449. {
  6450. /** @type {WebGL2RenderingContext} */
  6451. this._gl = gl;
  6452. /** @type {WebGLProgram} */
  6453. this._program = program;
  6454. /** @type {number} auto-increment counter */
  6455. this._nextIndex = 0;
  6456. /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
  6457. this._ubo = Object.create(null);
  6458. }
  6459. /**
  6460. * Set Uniform Buffer Object data
  6461. * (the buffer will be uploaded when the program is executed)
  6462. * @param {string} name uniform block name
  6463. * @param {ArrayBufferView} data
  6464. */
  6465. UBOHelper.prototype.set = function(name, data)
  6466. {
  6467. const gl = this._gl;
  6468. // create UBO entry
  6469. if(this._ubo[name] === undefined) {
  6470. this._ubo[name] = {
  6471. buffer: gl.createBuffer(),
  6472. blockBindingIndex: this._nextIndex++,
  6473. blockIndex: -1,
  6474. data: null
  6475. };
  6476. }
  6477. // get UBO entry for the given block name
  6478. const ubo = this._ubo[name];
  6479. // read block index & assign binding point
  6480. if(ubo.blockIndex < 0) {
  6481. const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
  6482. gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
  6483. ubo.blockIndex = blockIndex;
  6484. }
  6485. // store the data - we'll upload it later
  6486. ubo.data = data;
  6487. }
  6488. /**
  6489. * Update UBO data
  6490. * Called when we're using the appropriate WebGLProgram
  6491. */
  6492. UBOHelper.prototype.update = function()
  6493. {
  6494. const gl = this._gl;
  6495. for(const name in this._ubo) {
  6496. const ubo = this._ubo[name];
  6497. gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
  6498. gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
  6499. gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
  6500. gl.bindBuffer(gl.UNIFORM_BUFFER, null);
  6501. }
  6502. }
  6503. /**
  6504. * Release allocated buffers
  6505. * @returns {null}
  6506. */
  6507. UBOHelper.prototype.release = function()
  6508. {
  6509. const gl = this._gl;
  6510. for(const name in this._ubo) {
  6511. const ubo = this._ubo[name];
  6512. gl.deleteBuffer(ubo.buffer);
  6513. ubo.data = null;
  6514. }
  6515. return null;
  6516. }
  6517. /**
  6518. * Generates an indexed variable name, as in variable[index]
  6519. * @param {string} variable
  6520. * @param {number} index
  6521. * @returns {string} variable[index]
  6522. */
  6523. function indexedVariable(variable, index)
  6524. {
  6525. //return `${variable}[${index}]`; // no caching
  6526. // is this cache lookup really faster than string concatenation?
  6527. // what about memory consumption?
  6528. const cache = indexedVariable.cache;
  6529. let nameList = cache.get(variable);
  6530. if(nameList === undefined)
  6531. cache.set(variable, nameList = []);
  6532. if(nameList[index] === undefined)
  6533. nameList[index] = `${variable}[${index}]`;
  6534. return nameList[index];
  6535. }
  6536. /** @type {Map<string,string[]>} cached argument names */
  6537. indexedVariable.cache = new Map(); // Object.create(null)
  6538. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
  6539. /*
  6540. * speedy-vision.js
  6541. * GPU-accelerated Computer Vision for JavaScript
  6542. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6543. *
  6544. * Licensed under the Apache License, Version 2.0 (the "License");
  6545. * you may not use this file except in compliance with the License.
  6546. * You may obtain a copy of the License at
  6547. *
  6548. * http://www.apache.org/licenses/LICENSE-2.0
  6549. *
  6550. * Unless required by applicable law or agreed to in writing, software
  6551. * distributed under the License is distributed on an "AS IS" BASIS,
  6552. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6553. * See the License for the specific language governing permissions and
  6554. * limitations under the License.
  6555. *
  6556. * speedy-program-group.js
  6557. * An abstract group of programs that run on the GPU
  6558. */
  6559. /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
  6560. /**
  6561. * @typedef {object} SpeedyProgramHelpers
  6562. * @property {function(): SpeedyProgramOptions} usesPingpongRendering
  6563. * @property {function(): SpeedyProgramOptions} rendersToCanvas
  6564. */
  6565. /** @const {SpeedyProgramHelpers} Program settings generator */
  6566. const PROGRAM_HELPERS = Object.freeze({
  6567. /**
  6568. * Pingpong Rendering: the output texture of a
  6569. * program cannot be used as an input to itself.
  6570. * This is a convenient helper in these situations
  6571. * @returns {SpeedyProgramOptions}
  6572. */
  6573. usesPingpongRendering() {
  6574. return {
  6575. pingpong: true
  6576. };
  6577. },
  6578. /**
  6579. * Render to canvas
  6580. * Use it when we're supposed to see the texture
  6581. * @returns {SpeedyProgramOptions}
  6582. */
  6583. rendersToCanvas() {
  6584. return {
  6585. renderToTexture: false
  6586. };
  6587. },
  6588. });
  6589. /**
  6590. * SpeedyProgramGroup
  6591. * A semantically correlated group
  6592. * of programs that run on the GPU
  6593. * @abstract
  6594. */
  6595. class SpeedyProgramGroup
  6596. {
  6597. /**
  6598. * Class constructor
  6599. * @protected
  6600. * @param {SpeedyGPU} gpu
  6601. */
  6602. constructor(gpu)
  6603. {
  6604. /** @type {SpeedyGPU} GPU-accelerated routines */
  6605. this._gpu = gpu;
  6606. /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
  6607. this._programs = [];
  6608. }
  6609. /**
  6610. * Declare a program
  6611. * @protected
  6612. * @param {string} name Program name
  6613. * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
  6614. * @param {SpeedyProgramOptions} [options] Program settings
  6615. * @returns {this}
  6616. */
  6617. declare(name, builder, options = {})
  6618. {
  6619. // lazy instantiation of kernels
  6620. Object.defineProperty(this, name, {
  6621. get: (() => {
  6622. // Why cast a symbol to symbol?
  6623. // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
  6624. const key = /** @type {symbol} */ ( Symbol(name) );
  6625. return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
  6626. })()
  6627. });
  6628. return this;
  6629. }
  6630. /**
  6631. * Neat helpers to be used when declaring programs
  6632. * @returns {SpeedyProgramHelpers}
  6633. */
  6634. get program()
  6635. {
  6636. return PROGRAM_HELPERS;
  6637. }
  6638. /**
  6639. * Releases all programs from this group
  6640. * @returns {null}
  6641. */
  6642. release()
  6643. {
  6644. for(let i = 0; i < this._programs.length; i++)
  6645. this._programs[i].release();
  6646. return null;
  6647. }
  6648. /**
  6649. * Spawn a SpeedyProgram
  6650. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6651. * @param {SpeedyProgramOptions} [options] Program settings
  6652. * @returns {SpeedyProgram}
  6653. */
  6654. _createProgram(shaderdecl, options = {})
  6655. {
  6656. const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
  6657. this._programs.push(program);
  6658. return program;
  6659. }
  6660. }
  6661. ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
  6662. /*
  6663. * speedy-vision.js
  6664. * GPU-accelerated Computer Vision for JavaScript
  6665. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6666. *
  6667. * Licensed under the Apache License, Version 2.0 (the "License");
  6668. * you may not use this file except in compliance with the License.
  6669. * You may obtain a copy of the License at
  6670. *
  6671. * http://www.apache.org/licenses/LICENSE-2.0
  6672. *
  6673. * Unless required by applicable law or agreed to in writing, software
  6674. * distributed under the License is distributed on an "AS IS" BASIS,
  6675. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6676. * See the License for the specific language governing permissions and
  6677. * limitations under the License.
  6678. *
  6679. * utils.js
  6680. * GPU utilities
  6681. */
  6682. //
  6683. // Shaders
  6684. //
  6685. // Copy image
  6686. const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
  6687. // Copy keypoints
  6688. const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({ 'TYPE': 1 }).withArguments('image');
  6689. // Copy 2D vectors
  6690. const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({ 'TYPE': 2 }).withArguments('image');
  6691. // Flip y-axis for output
  6692. const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
  6693. // Fill image with a constant
  6694. const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
  6695. // Fill zero or more color components of the input image with a constant value
  6696. const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
  6697. // Copy the src component of src to zero or more color components of a copy of dest
  6698. const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
  6699. // Scan the entire image and find the minimum & maximum pixel intensity
  6700. const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
  6701. // Compute the partial derivatives of an image
  6702. const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
  6703. /**
  6704. * SpeedyProgramGroupUtils
  6705. * Utility operations
  6706. */
  6707. class SpeedyProgramGroupUtils extends SpeedyProgramGroup
  6708. {
  6709. /**
  6710. * Class constructor
  6711. * @param {SpeedyGPU} gpu
  6712. */
  6713. constructor(gpu)
  6714. {
  6715. super(gpu);
  6716. this
  6717. // render to the canvas
  6718. .declare('renderToCanvas', flipY, {
  6719. ...this.program.rendersToCanvas()
  6720. })
  6721. // copy image
  6722. .declare('copy', copy)
  6723. // copy keypoints
  6724. .declare('copyKeypoints', copyKeypoints)
  6725. // copy 2D vectors
  6726. .declare('copy2DVectors', copy2DVectors)
  6727. // Fill image with a constant
  6728. .declare('fill', fill)
  6729. // Fill zero or more color components of the input image with a constant value
  6730. .declare('fillComponents', fillComponents)
  6731. // Copy the src component of src to zero or more color components of a copy of dest
  6732. .declare('copyComponents', copyComponents)
  6733. // find minimum & maximum pixel intensity
  6734. .declare('scanMinMax2D', scanMinMax2D, {
  6735. ...this.program.usesPingpongRendering()
  6736. })
  6737. // Compute the partial derivatives of an image
  6738. .declare('sobelDerivatives', sobelDerivatives)
  6739. ;
  6740. }
  6741. }
  6742. // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
  6743. var convolution = __nested_webpack_require_320900__(5282);
  6744. ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
  6745. /*
  6746. * speedy-vision.js
  6747. * GPU-accelerated Computer Vision for JavaScript
  6748. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6749. *
  6750. * Licensed under the Apache License, Version 2.0 (the "License");
  6751. * you may not use this file except in compliance with the License.
  6752. * You may obtain a copy of the License at
  6753. *
  6754. * http://www.apache.org/licenses/LICENSE-2.0
  6755. *
  6756. * Unless required by applicable law or agreed to in writing, software
  6757. * distributed under the License is distributed on an "AS IS" BASIS,
  6758. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6759. * See the License for the specific language governing permissions and
  6760. * limitations under the License.
  6761. *
  6762. * filters.js
  6763. * Image filtering on the GPU
  6764. */
  6765. //
  6766. // Shaders
  6767. //
  6768. // Convert to greyscale
  6769. const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl')
  6770. .withArguments('image');
  6771. // Convolution
  6772. const filters_convolution = [3, 5, 7].reduce((obj, ksize) => ((obj[ksize] =
  6773. (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl')
  6774. .withDefines({ 'KERNEL_SIZE_SQUARED': ksize * ksize })
  6775. .withArguments('image', 'kernel')
  6776. ), obj), {});
  6777. // Separable convolution
  6778. const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => ((obj[ksize] =
  6779. (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl')
  6780. .withDefines({ 'KERNEL_SIZE': ksize, 'AXIS': 0 })
  6781. .withArguments('image', 'kernel')
  6782. ), obj), {});
  6783. const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => ((obj[ksize] =
  6784. (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl')
  6785. .withDefines({ 'KERNEL_SIZE': ksize, 'AXIS': 1 })
  6786. .withArguments('image', 'kernel')
  6787. ), obj), {});
  6788. // Median filter
  6789. const median = [3, 5, 7].reduce((obj, ksize) => ((obj[ksize] =
  6790. (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl')
  6791. .withDefines({ 'KERNEL_SIZE': ksize })
  6792. .withArguments('image')
  6793. ), obj), {});
  6794. // Normalize image
  6795. const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl')
  6796. .withDefines({ 'GREYSCALE': 1 })
  6797. .withArguments('minmax2d', 'minValue', 'maxValue');
  6798. const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl')
  6799. .withDefines({ 'GREYSCALE': 0 })
  6800. .withArguments('minmax2dRGB', 'minValue', 'maxValue');
  6801. // Nightvision
  6802. const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl')
  6803. .withDefines({ 'GREYSCALE': 0 })
  6804. .withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6805. const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl')
  6806. .withDefines({ 'GREYSCALE': 1 })
  6807. .withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6808. //
  6809. // Utilities
  6810. //
  6811. // Handy conversion for Gaussian filters
  6812. // (symmetric kernel, approx. zero after 3*sigma)
  6813. const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
  6814. // Generate a 1D Gaussian kernel
  6815. const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
  6816. // Generate a 1D Box filter
  6817. const box = ksize => (new Array(ksize)).fill(1.0 / ksize);
  6818. /**
  6819. * SpeedyProgramGroupFilters
  6820. * Image filtering
  6821. */
  6822. class SpeedyProgramGroupFilters extends SpeedyProgramGroup
  6823. {
  6824. /**
  6825. * Class constructor
  6826. * @param {SpeedyGPU} gpu
  6827. */
  6828. constructor(gpu)
  6829. {
  6830. super(gpu);
  6831. this
  6832. // convert to greyscale
  6833. .declare('rgb2grey', rgb2grey)
  6834. // median filters
  6835. .declare('median3', median[3]) // 3x3 window
  6836. .declare('median5', median[5]) // 5x5 window
  6837. .declare('median7', median[7]) // 7x7 window
  6838. // 2D convolution
  6839. .declare('convolution3', filters_convolution[3]) // 3x3 kernel
  6840. .declare('convolution5', filters_convolution[5]) // 5x5 kernel
  6841. .declare('convolution7', filters_convolution[7]) // 7x7 kernel
  6842. // 1D separable convolution
  6843. .declare('convolution3x', convolutionX[3]) // 1x3 kernel
  6844. .declare('convolution3y', convolutionY[3]) // 3x1 kernel
  6845. .declare('convolution5x', convolutionX[5]) // 1x5 kernel
  6846. .declare('convolution5y', convolutionY[5]) // 5x1 kernel
  6847. .declare('convolution7x', convolutionX[7])
  6848. .declare('convolution7y', convolutionY[7])
  6849. .declare('convolution9x', convolutionX[9])
  6850. .declare('convolution9y', convolutionY[9])
  6851. .declare('convolution11x', convolutionX[11])
  6852. .declare('convolution11y', convolutionY[11])
  6853. .declare('convolution13x', convolutionX[13])
  6854. .declare('convolution13y', convolutionY[13])
  6855. .declare('convolution15x', convolutionX[15])
  6856. .declare('convolution15y', convolutionY[15])
  6857. // normalize image
  6858. .declare('normalizeGreyscale', normalizeGreyscale)
  6859. .declare('normalizeColored', normalizeColored)
  6860. // nightvision
  6861. .declare('nightvision', nightvision)
  6862. .declare('nightvisionGreyscale', nightvisionGreyscale)
  6863. .declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31)))
  6864. .declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31)))
  6865. .declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63)))
  6866. .declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63)))
  6867. .declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255)))
  6868. .declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
  6869. // gaussian: separable kernels
  6870. // see also: http://dev.theomader.com/gaussian-kernel-calculator/
  6871. .declare('gaussian3x', (0,convolution.convX)([ 0.25, 0.5, 0.25 ])) // sigma ~ 1.0
  6872. .declare('gaussian3y', (0,convolution.convY)([ 0.25, 0.5, 0.25 ]))
  6873. .declare('gaussian5x', (0,convolution.convX)([ 0.05, 0.25, 0.4, 0.25, 0.05 ])) // sigma ~ 1.0
  6874. .declare('gaussian5y', (0,convolution.convY)([ 0.05, 0.25, 0.4, 0.25, 0.05 ]))
  6875. .declare('gaussian7x', (0,convolution.convX)(gaussian(7)))
  6876. .declare('gaussian7y', (0,convolution.convY)(gaussian(7)))
  6877. .declare('gaussian9x', (0,convolution.convX)(gaussian(9)))
  6878. .declare('gaussian9y', (0,convolution.convY)(gaussian(9)))
  6879. .declare('gaussian11x', (0,convolution.convX)(gaussian(11)))
  6880. .declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
  6881. // box filter: separable kernels
  6882. .declare('box3x', (0,convolution.convX)(box(3)))
  6883. .declare('box3y', (0,convolution.convY)(box(3)))
  6884. .declare('box5x', (0,convolution.convX)(box(5)))
  6885. .declare('box5y', (0,convolution.convY)(box(5)))
  6886. .declare('box7x', (0,convolution.convX)(box(7)))
  6887. .declare('box7y', (0,convolution.convY)(box(7)))
  6888. .declare('box9x', (0,convolution.convX)(box(9)))
  6889. .declare('box9y', (0,convolution.convY)(box(9)))
  6890. .declare('box11x', (0,convolution.convX)(box(11)))
  6891. .declare('box11y', (0,convolution.convY)(box(11)))
  6892. ;
  6893. }
  6894. }
  6895. // EXTERNAL MODULE: ./src/core/speedy-namespace.js
  6896. var speedy_namespace = __nested_webpack_require_320900__(416);
  6897. ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
  6898. /*
  6899. * speedy-vision.js
  6900. * GPU-accelerated Computer Vision for JavaScript
  6901. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6902. *
  6903. * Licensed under the Apache License, Version 2.0 (the "License");
  6904. * you may not use this file except in compliance with the License.
  6905. * You may obtain a copy of the License at
  6906. *
  6907. * http://www.apache.org/licenses/LICENSE-2.0
  6908. *
  6909. * Unless required by applicable law or agreed to in writing, software
  6910. * distributed under the License is distributed on an "AS IS" BASIS,
  6911. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6912. * See the License for the specific language governing permissions and
  6913. * limitations under the License.
  6914. *
  6915. * speedy-descriptordb.js
  6916. * A database of binary descriptors in video memory
  6917. */
  6918. //
  6919. // A database of binary descriptors is a texture that stores
  6920. // a set of (descriptor: uint8_t[]) entries.
  6921. //
  6922. /** @type {number} we use RGBA8 textures to store the descriptors */
  6923. const DESCRIPTORDB_BYTESPERPIXEL = 4;
  6924. /** @type {number} texture size goes up to 16 MB */
  6925. const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
  6926. /**
  6927. * Utility for generating a database of binary descriptors in video memory
  6928. */
  6929. class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q
  6930. {
  6931. /**
  6932. * Create a database of binary descriptors
  6933. * @param {SpeedyTexture} texture output texture
  6934. * @param {Uint8Array[]} descriptors binary descriptors
  6935. * @param {number} descriptorSize in bytes, a multiple of 4
  6936. * @returns {SpeedyTexture} texture
  6937. */
  6938. static create(texture, descriptors, descriptorSize)
  6939. {
  6940. utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
  6941. const numberOfDescriptors = descriptors.length;
  6942. const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
  6943. // find an appropriate texture size
  6944. const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
  6945. const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
  6946. // setup texture parameters
  6947. const stride = 1 << log2stride;
  6948. const width = stride, height = stride; // we use powers-of-two
  6949. // are we within storage capacity?
  6950. const capacity = (width * height) / pixelsPerDescriptor;
  6951. if(numberOfDescriptors > capacity)
  6952. throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
  6953. // create texture data
  6954. const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
  6955. for(let i = 0; i < numberOfDescriptors; i++) {
  6956. const byteOffset = i * descriptorSize;
  6957. const descriptor = descriptors[i];
  6958. // validate input
  6959. utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
  6960. utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
  6961. // write data
  6962. data.set(descriptor, byteOffset);
  6963. }
  6964. // log data for further study
  6965. const MEGABYTE = 1048576;
  6966. const totalSize = numberOfDescriptors * descriptorSize;
  6967. utils/* Utils */.A.log(
  6968. `Creating a ${width}x${height} database of ${numberOfDescriptors} ` +
  6969. `${descriptorSize * 8}-bit descriptors ` +
  6970. `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`
  6971. );
  6972. // upload to the texture
  6973. texture.resize(width, height);
  6974. texture.upload(data);
  6975. return texture;
  6976. }
  6977. }
  6978. ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
  6979. /*
  6980. * speedy-vision.js
  6981. * GPU-accelerated Computer Vision for JavaScript
  6982. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6983. *
  6984. * Licensed under the Apache License, Version 2.0 (the "License");
  6985. * you may not use this file except in compliance with the License.
  6986. * You may obtain a copy of the License at
  6987. *
  6988. * http://www.apache.org/licenses/LICENSE-2.0
  6989. *
  6990. * Unless required by applicable law or agreed to in writing, software
  6991. * distributed under the License is distributed on an "AS IS" BASIS,
  6992. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6993. * See the License for the specific language governing permissions and
  6994. * limitations under the License.
  6995. *
  6996. * speedy-lsh.js
  6997. * GPU-based LSH tables for fast matching of binary descriptors
  6998. */
  6999. /*
  7000. * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
  7001. * ------------------------------------------------
  7002. *
  7003. * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
  7004. * Indices of keypoint descriptors are stored in several tables, each with many
  7005. * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
  7006. * size to match the keypoints.
  7007. *
  7008. * Buckets in video memory may get full. Wouldn't it be cool if we could use a
  7009. * probabilistic approach to let us work within their storage capacity?
  7010. *
  7011. * Let there be n buckets in a table, each with storage capacity c (holding
  7012. * up to c elements). Buckets are numbered from 0 to n-1.
  7013. *
  7014. * We pick uniformly a random bucket to store a new element in the table. Let
  7015. * X be the chosen bucket. The probability that we'll store the new element in
  7016. * any particular bucket k is:
  7017. *
  7018. * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
  7019. *
  7020. * On average, each new element stored in the table inserts 1/n of an element
  7021. * in each bucket. If we add m new elements to the table, each bucket receives
  7022. * m/n elements, on average(*).
  7023. *
  7024. * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
  7025. * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
  7026. * addition, the expected value of (m Ik) is m * E(ik) = m/n.
  7027. *
  7028. * Now let Yi be the number of elements inserted in bucket i in m additions to
  7029. * the table. We model Yi as Poisson(m/n), since on average, m additions to
  7030. * the table result in m/n new elements being inserted in bucket i. Buckets
  7031. * are picked independently. Hence, for all i, the probability that we insert
  7032. * q elements in bucket i in m additions to the table is:
  7033. *
  7034. * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
  7035. *
  7036. * Given that each bucket has storage capacity c, we require Yi <= c with a
  7037. * high probability p (say, p = 0.99). This means that, in m additions, we
  7038. * don't want to exceed the capacity c with high probability. So, let us find
  7039. * a (large) value of m such that:
  7040. *
  7041. * P(Yi <= c) >= p
  7042. *
  7043. * Sounds good! We can find the largest matching m using binary search.
  7044. *
  7045. * I don't think we need to enforce a high probability that ALL buckets stay
  7046. * within their capacity - n is large, we need to use the available space, and
  7047. * we have multiple tables anyway.
  7048. *
  7049. * In practice, the assumption that buckets are picked uniformly doesn't hold:
  7050. * keypoints that are nearby tend to have similar descriptors and buckets are
  7051. * picked according to those descriptors. Still, this model works well enough
  7052. * in practice and it is simple! That's what I like about it!
  7053. *
  7054. * ... now, how I actually do the matching is the theme of the next episode!
  7055. */
  7056. /** @type {number} Default number of tables in a LSH data structure */
  7057. const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
  7058. /** @type {number} Default number of bits of a hash */
  7059. const LSH_DEFAULT_HASH_SIZE = 15;
  7060. /** @type {number[]} Acceptable number of tables for a LSH data structure */
  7061. const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32];
  7062. /** @type {number[]} Acceptable values for hashSize, in bits */
  7063. const LSH_ACCEPTABLE_HASH_SIZES = [10,11,12,13,14,15,16,17,18,19,20];
  7064. /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
  7065. const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32,64];
  7066. /**
  7067. * @typedef {Object} LSHProfile LSH profile
  7068. * @property {string} name name of the profile
  7069. * @property {number} capacity maximum number of keypoints that can be stored in such a table
  7070. * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
  7071. * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
  7072. * @property {number} bucketCapacity maximum number of entries of a bucket of a table
  7073. */
  7074. /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
  7075. const generateLSHProfiles = (t,h,p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [
  7076. {
  7077. name: 'x-small',
  7078. bucketCapacity: 1,
  7079. tableCount: t,
  7080. hashSize: h,
  7081. capacity: findTableCapacity(h, 1, p),
  7082. },
  7083. {
  7084. name: 'small',
  7085. bucketCapacity: 2,
  7086. tableCount: t,
  7087. hashSize: h,
  7088. capacity: findTableCapacity(h, 2, p),
  7089. },
  7090. {
  7091. name: 'small-plus',
  7092. bucketCapacity: 3,
  7093. tableCount: t,
  7094. hashSize: h,
  7095. capacity: findTableCapacity(h, 3, p),
  7096. },
  7097. {
  7098. name: 'medium',
  7099. bucketCapacity: 4,
  7100. tableCount: t,
  7101. hashSize: h,
  7102. capacity: findTableCapacity(h, 4, p),
  7103. },
  7104. {
  7105. name: 'medium-plus',
  7106. bucketCapacity: 5,
  7107. tableCount: t,
  7108. hashSize: h,
  7109. capacity: findTableCapacity(h, 5, p),
  7110. },
  7111. {
  7112. name: 'large',
  7113. bucketCapacity: 6,
  7114. tableCount: t,
  7115. hashSize: h,
  7116. capacity: findTableCapacity(h, 6, p),
  7117. },
  7118. {
  7119. name: 'x-large',
  7120. bucketCapacity: 8,
  7121. tableCount: t,
  7122. hashSize: h,
  7123. capacity: findTableCapacity(h, 8, p),
  7124. },
  7125. ];
  7126. //
  7127. // LSH hash sequences: random bits in increasing order
  7128. // We generate a few sequences (one for each table) supporting up to 16 hash bits
  7129. // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
  7130. //
  7131. /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
  7132. /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
  7133. /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
  7134. /** @type {number} maximum number of elements of a sequence */
  7135. const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
  7136. /** @type {number} number of sequences in a BitSequences object */
  7137. const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
  7138. /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
  7139. const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT)
  7140. .forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i+1) * LSH_SEQUENCE_MAXLEN).sort()),
  7141. seq);
  7142. /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
  7143. const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT)
  7144. .forEach(i => seq.subarray((i+1) * LSH_SEQUENCE_MAXLEN - p, (i+1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)),
  7145. seq);
  7146. /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
  7147. const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p,o) => ((p[o]=f(o)), p), {}))(h => ({
  7148. // for 256-bit descriptors
  7149. 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([
  7150. ...(utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))),
  7151. ...(utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))),
  7152. ...(utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))),
  7153. ].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  7154. // for 512-bit descriptors
  7155. 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([
  7156. ...(utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))),
  7157. ...(utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))),
  7158. ].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  7159. }));
  7160. //
  7161. // Misc
  7162. //
  7163. /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
  7164. const LSH_BYTESPERPIXEL = 4;
  7165. /** @type {function(number): number} next power of 2 */
  7166. const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  7167. /**
  7168. * GPU-based LSH tables for fast matching of binary descriptors
  7169. */
  7170. class SpeedyLSH
  7171. {
  7172. /**
  7173. * Constructor
  7174. * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
  7175. * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
  7176. * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
  7177. * @param {number} [tableCount] number of LSH tables, preferably a power of two
  7178. * @param {number} [hashSize] number of bits of a hash of a descriptor
  7179. * @param {number} [probability] probability of no discard events happening in the theoretical model
  7180. */
  7181. constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95)
  7182. {
  7183. const descriptorCount = descriptors.length;
  7184. const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
  7185. const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
  7186. // validate input
  7187. utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
  7188. utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
  7189. utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
  7190. utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
  7191. utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
  7192. /** @type {LSHProfile} LSH profile */
  7193. this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
  7194. /** @type {number} descriptor size, in bytes */
  7195. this._descriptorSize = descriptorSize;
  7196. /** @type {number} number of descriptors */
  7197. this._descriptorCount = descriptorCount;
  7198. /** @type {BitSequences} bit sequences */
  7199. this._sequences = this._pickSequences(this._descriptorSize);
  7200. /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
  7201. this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
  7202. /** @type {SpeedyTexture} a storage of descriptors */
  7203. this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
  7204. }
  7205. /**
  7206. * Descriptor size, in bytes
  7207. * @returns {number}
  7208. */
  7209. get descriptorSize()
  7210. {
  7211. return this._descriptorSize;
  7212. }
  7213. /**
  7214. * Number of descriptors stored in this LSH data structure
  7215. * @returns {number}
  7216. */
  7217. get descriptorCount()
  7218. {
  7219. return this._descriptorCount;
  7220. }
  7221. /**
  7222. * LSH bit sequences
  7223. * @returns {BitSequences}
  7224. */
  7225. get sequences()
  7226. {
  7227. return this._sequences;
  7228. }
  7229. /**
  7230. * Number of bits that make a hash
  7231. * @returns {number}
  7232. */
  7233. get hashSize()
  7234. {
  7235. return this._profile.hashSize;
  7236. }
  7237. /**
  7238. * Maximum number of descriptors that can be stored in a bucket of a table
  7239. * @returns {number}
  7240. */
  7241. get bucketCapacity()
  7242. {
  7243. return this._profile.bucketCapacity;
  7244. }
  7245. /**
  7246. * How many buckets per table do we have?
  7247. * @returns {number}
  7248. */
  7249. get bucketsPerTable()
  7250. {
  7251. return 1 << this._profile.hashSize;
  7252. }
  7253. /**
  7254. * Number of LSH tables
  7255. * @returns {number}
  7256. */
  7257. get tableCount()
  7258. {
  7259. return this._profile.tableCount;
  7260. }
  7261. /**
  7262. * Size of one LSH table, in bytes
  7263. * @returns {number}
  7264. */
  7265. get tableSize()
  7266. {
  7267. return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
  7268. }
  7269. /**
  7270. * Size of all LSH tables combined, in bytes
  7271. * @returns {number}
  7272. */
  7273. get totalSize()
  7274. {
  7275. // actually, the total memory in VRAM may be a bit larger than
  7276. // this value, depending on the actual size of the texture
  7277. return this.tableCount * this.tableSize;
  7278. }
  7279. /**
  7280. * LSH tables texture
  7281. * @returns {SpeedyDrawableTexture}
  7282. */
  7283. get tables()
  7284. {
  7285. return this._tables;
  7286. }
  7287. /**
  7288. * A collection of descriptors
  7289. * @returns {SpeedyDrawableTexture}
  7290. */
  7291. get descriptorDB()
  7292. {
  7293. return this._descriptorDB;
  7294. }
  7295. /**
  7296. * Pick the appropriate LSH sequences for a particular descriptor size
  7297. * @param {number} descriptorSize in bytes
  7298. * @returns {BitSequences}
  7299. */
  7300. _pickSequences(descriptorSize)
  7301. {
  7302. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
  7303. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
  7304. return LSH_SEQUENCES[this.hashSize][descriptorSize];
  7305. }
  7306. /**
  7307. * Create LSH tables
  7308. * @param {SpeedyTexture} texture output texture
  7309. * @param {BitSequences} sequences bit sequences
  7310. * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
  7311. * @param {number} descriptorSize in bytes
  7312. * @returns {SpeedyTexture} texture
  7313. */
  7314. _createStaticTables(texture, sequences, descriptors, descriptorSize)
  7315. {
  7316. const END_OF_LIST = 0xFFFFFFFF;
  7317. const profileName = this._profile.name;
  7318. const tableCapacity = this._profile.capacity;
  7319. const tableCount = this.tableCount;
  7320. const bucketsPerTable = this.bucketsPerTable;
  7321. const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
  7322. const hashSize = this.hashSize;
  7323. const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
  7324. const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
  7325. const textureHeight = Math.ceil(numberOfPixels / textureWidth);
  7326. const numberOfDescriptors = descriptors.length;
  7327. // validate input
  7328. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
  7329. utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
  7330. utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
  7331. // log
  7332. const MEGABYTE = 1048576;
  7333. utils/* Utils */.A.log(
  7334. `Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` +
  7335. `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` +
  7336. `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` +
  7337. `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `
  7338. );
  7339. // warn the user if there are too many descriptors
  7340. if(numberOfDescriptors > tableCapacity) {
  7341. const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
  7342. utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
  7343. }
  7344. // create empty LSH tables
  7345. const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
  7346. const bytes = (new Uint8Array(buffer)).fill(0xFF);
  7347. const data = new DataView(buffer);
  7348. // shuffle the descriptors...
  7349. // it seems like a good idea to handle collisions of similar descriptors,
  7350. // which may be located next to each other in the array
  7351. const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
  7352. // for each descriptor
  7353. // do everything in little-endian format!
  7354. const numberOfDiscardedDescriptorsPerTable = (new Array(tableCount)).fill(0);
  7355. for(let i = 0; i < numberOfDescriptors; i++) {
  7356. const descriptorIndex = permutation[i]; //i;
  7357. const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
  7358. // for each table
  7359. for(let table = 0; table < tableCount; table++) {
  7360. // compute hash & memory addresses
  7361. const hash = hashes[table];
  7362. const tableByteOffset = table * bucketsPerTable * bucketSize;
  7363. const bucketByteOffset = tableByteOffset + hash * bucketSize;
  7364. // find the end of the list
  7365. let index = END_OF_LIST;
  7366. for(let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
  7367. const byteOffset = bucketByteOffset + entryByteOffset;
  7368. index = data.getUint32(byteOffset, true);
  7369. // add the keypoint
  7370. if(index == END_OF_LIST) {
  7371. data.setUint32(byteOffset, descriptorIndex, true);
  7372. break;
  7373. }
  7374. }
  7375. // note: if the bucket is full, we just discard the entry :\
  7376. // we give this event a probabilistic treatment (see above),
  7377. // so it happens with low probability
  7378. if(index != END_OF_LIST)
  7379. numberOfDiscardedDescriptorsPerTable[table]++;
  7380. }
  7381. }
  7382. // log data for further study
  7383. const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
  7384. const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
  7385. utils/* Utils */.A.log(
  7386. `When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` +
  7387. `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` +
  7388. `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` +
  7389. `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` +
  7390. `Minimum: ${Math.min(...profile).toFixed(2)}%. ` +
  7391. `Table capacity: ${tableCapacity}.`
  7392. );
  7393. // upload the LSH tables to the GPU
  7394. texture.resize(textureWidth, textureHeight);
  7395. texture.upload(bytes);
  7396. return texture;
  7397. }
  7398. /**
  7399. * Pick bits from a binary descriptor
  7400. * @param {Uint8Array} descriptor a single descriptor
  7401. * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
  7402. * @returns {number[]} hash code for each table
  7403. */
  7404. _hashCodes(descriptor, sequences)
  7405. {
  7406. const tableCount = this.tableCount;
  7407. const hashSize = this.hashSize;
  7408. const bucketsPerTable = this.bucketsPerTable;
  7409. const hashes = new Array(tableCount);
  7410. //const descriptorSize = descriptor.length;
  7411. // just to be sure...
  7412. utils/* Utils */.A.assert(
  7413. hashSize <= LSH_SEQUENCE_MAXLEN &&
  7414. sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount
  7415. );
  7416. // for each table
  7417. for(let table = 0; table < tableCount; table++) {
  7418. const offset = LSH_SEQUENCE_MAXLEN * table;
  7419. // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
  7420. let hash = 0;
  7421. for(let i = 0; i < hashSize; i++) {
  7422. let bit = sequences[offset + i];
  7423. let b = bit >>> 3;
  7424. let m = 1 << (bit & 7);
  7425. //Utils.assert(b < descriptorSize);
  7426. hash = (hash << 1) | ((descriptor[b] & m) != 0);
  7427. }
  7428. // validate & store
  7429. utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
  7430. hashes[table] = hash;
  7431. }
  7432. // done!
  7433. return hashes;
  7434. }
  7435. }
  7436. /**
  7437. * Compute P(X <= k), where X ~ Poisson(lambda)
  7438. * @param {number} lambda positive number
  7439. * @param {number} k non-negative integer
  7440. * @returns {number}
  7441. */
  7442. function cumulativePoisson(lambda, k)
  7443. {
  7444. const exp = Math.exp(-lambda);
  7445. let sum = 1, fat = 1, pow = 1;
  7446. // k should be small!!!
  7447. for(let i = 1; i <= k; i++)
  7448. sum += (pow *= lambda) / (fat *= i);
  7449. return sum * exp;
  7450. }
  7451. /**
  7452. * Find the maximum number of keypoint descriptors that a table can hold
  7453. * @param {number} hashSize positive integer
  7454. * @param {number} bucketCapacity positive integer
  7455. * @param {number} [probability] probability of no discard events happening in the theoretical model
  7456. * @return {number} optimal table capacity
  7457. */
  7458. function findTableCapacity(hashSize, bucketCapacity, probability = 0.99)
  7459. {
  7460. const n = 1 << hashSize // number of buckets
  7461. const c = bucketCapacity;
  7462. const p = probability;
  7463. let l = 1, r = n * c; // watch for overflow!
  7464. let m = 0, pm = 0;
  7465. // binary search
  7466. while(l < r) {
  7467. m = Math.floor((l + r) / 2);
  7468. pm = cumulativePoisson(m / n, c);
  7469. if(pm > p) //if(1-pm < 1-p)
  7470. l = m + 1;
  7471. else
  7472. r = m;
  7473. }
  7474. return m;
  7475. }
  7476. ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
  7477. /*
  7478. * speedy-vision.js
  7479. * GPU-accelerated Computer Vision for JavaScript
  7480. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7481. *
  7482. * Licensed under the Apache License, Version 2.0 (the "License");
  7483. * you may not use this file except in compliance with the License.
  7484. * You may obtain a copy of the License at
  7485. *
  7486. * http://www.apache.org/licenses/LICENSE-2.0
  7487. *
  7488. * Unless required by applicable law or agreed to in writing, software
  7489. * distributed under the License is distributed on an "AS IS" BASIS,
  7490. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7491. * See the License for the specific language governing permissions and
  7492. * limitations under the License.
  7493. *
  7494. * keypoints.js
  7495. * Facade for various keypoint detection algorithms
  7496. */
  7497. // FAST corner detector
  7498. const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl')
  7499. .withDefines({ 'FAST_TYPE': 916 })
  7500. .withArguments('corners', 'pyramid', 'lod', 'threshold');
  7501. // Harris corner detector
  7502. const harris = [1, 3, 5, 7].reduce((obj, win) => ((obj[win] =
  7503. (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl')
  7504. .withDefines({ 'WINDOW_SIZE': win })
  7505. .withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian')
  7506. ), obj), {});
  7507. const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl')
  7508. .withArguments('corners', 'iterationNumber');
  7509. const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl')
  7510. .withArguments('corners', 'maxScore', 'quality');
  7511. // Subpixel refinement
  7512. const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl')
  7513. .withDefines({ 'METHOD': 0 })
  7514. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7515. const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl')
  7516. .withDefines({ 'METHOD': 1 })
  7517. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7518. const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl')
  7519. .withDefines({ 'METHOD': 2 })
  7520. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7521. const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl')
  7522. .withDefines({ 'METHOD': 3 })
  7523. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7524. // Scale refinement
  7525. const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl')
  7526. .withDefines({ 'METHOD': 0 })
  7527. .withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7528. const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl')
  7529. .withDefines({ 'METHOD': 1 })
  7530. .withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7531. // Pixel allocation
  7532. const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl')
  7533. .withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  7534. const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl')
  7535. .withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  7536. const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl')
  7537. .withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7538. // ORB descriptors
  7539. const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl')
  7540. .withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
  7541. const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl')
  7542. .withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7543. // Non-maximum suppression
  7544. const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl')
  7545. .withDefines({ 'MULTISCALE': 0 })
  7546. .withArguments('image', 'lodStep');
  7547. const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl')
  7548. .withDefines({ 'MULTISCALE': 1 })
  7549. .withArguments('image', 'lodStep');
  7550. const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl')
  7551. .withArguments('corners');
  7552. const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl')
  7553. .withDefines({ 'USE_LAPLACIAN': 1 })
  7554. .withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
  7555. const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl')
  7556. .withDefines({ 'USE_LAPLACIAN': 0 })
  7557. .withArguments('corners', 'pyramid', 'lodStep');
  7558. const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl')
  7559. .withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
  7560. // Keypoint tracking & optical-flow
  7561. const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => ((obj[win] =
  7562. (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl')
  7563. .withDefines({ 'WINDOW_SIZE': win })
  7564. .withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength')
  7565. ), obj), {});
  7566. const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl')
  7567. .withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7568. // Brute-force matching
  7569. const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl')
  7570. .withDefines({ 'ENCODE_FILTERS': 0 });
  7571. const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl')
  7572. .withDefines({ 'ENCODE_FILTERS': 1 });
  7573. const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl')
  7574. .withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7575. const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl')
  7576. .withDefines({
  7577. 'DESCRIPTOR_SIZE': 32,
  7578. 'NUMBER_OF_KEYPOINTS_PER_PASS': 16,
  7579. })
  7580. .withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7581. const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl')
  7582. .withDefines({
  7583. 'DESCRIPTOR_SIZE': 64,
  7584. 'NUMBER_OF_KEYPOINTS_PER_PASS': 8,
  7585. })
  7586. .withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7587. // LSH-based KNN matching
  7588. const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl')
  7589. .withDefines({ 'ENCODE_FILTERS': 0 });
  7590. const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl')
  7591. .withDefines({ 'ENCODE_FILTERS': 1 });
  7592. const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => ((obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => ((obj[hashSize] = [0, 1, 2].reduce((obj, level) => ((obj[level] =
  7593. (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl')
  7594. .withDefines({
  7595. 'DESCRIPTOR_SIZE': descriptorSize,
  7596. 'HASH_SIZE': hashSize,
  7597. 'LEVEL': level,
  7598. 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
  7599. 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT,
  7600. })
  7601. .withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength')
  7602. ), obj), {})), obj), {})), obj), {});
  7603. const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl')
  7604. .withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7605. // Keypoint sorting
  7606. const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl')
  7607. .withDefines({ 'STAGE': 1 })
  7608. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7609. const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl')
  7610. .withDefines({ 'STAGE': 2 })
  7611. .withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
  7612. const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl')
  7613. .withDefines({ 'STAGE': 3 })
  7614. .withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
  7615. // Keypoint mixing
  7616. const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl')
  7617. .withDefines({ 'STAGE': 1 })
  7618. .withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
  7619. const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl')
  7620. .withDefines({ 'STAGE': 2 })
  7621. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7622. const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl')
  7623. .withDefines({ 'STAGE': 3 })
  7624. .withArguments('array', 'blockSize');
  7625. const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl')
  7626. .withDefines({ 'STAGE': 5 })
  7627. .withArguments('array');
  7628. const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl')
  7629. .withDefines({ 'STAGE': 4 })
  7630. .withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7631. // Keypoint encoding
  7632. const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl')
  7633. .withDefines({ 'FS_OUTPUT_TYPE': 2, 'STAGE': 1 })
  7634. .withArguments('corners');
  7635. const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl')
  7636. .withDefines({ 'FS_OUTPUT_TYPE': 2, 'FS_USE_CUSTOM_PRECISION': 1, 'STAGE': 2 })
  7637. .withArguments('lookupTable', 'blockSize', 'width', 'height');
  7638. const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl')
  7639. .withDefines({ 'STAGE': -1 })
  7640. .withArguments('lookupTable');
  7641. const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl')
  7642. .withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
  7643. const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl')
  7644. .withArguments('corners', 'imageSize');
  7645. const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl')
  7646. .withDefines({ 'MAX_ITERATIONS': 6 }) // dependent texture reads :(
  7647. .withArguments('offsetsImage', 'imageSize');
  7648. const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl')
  7649. .withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7650. const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl')
  7651. .withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7652. const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl')
  7653. .withArguments();
  7654. const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl')
  7655. .withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7656. const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl')
  7657. .withDefines({
  7658. // UBOs can hold at least 16KB of data;
  7659. // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
  7660. // according to the GL ES 3 reference.
  7661. // Each keypoint uses 16 bytes (vec4)
  7662. 'BUFFER_SIZE': 1024 //16384 / 16
  7663. })
  7664. .withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
  7665. // Geometric transformations
  7666. const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl')
  7667. .withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7668. // Keypoint filters
  7669. const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl')
  7670. .withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7671. const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl')
  7672. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7673. const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl')
  7674. .withDefines({ 'DESCRIPTOR_SIZE': 32 })
  7675. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7676. const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl')
  7677. .withDefines({ 'DESCRIPTOR_SIZE': 64 })
  7678. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7679. // Other utilities
  7680. const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl')
  7681. .withDefines({ 'PERMUTATION_MAXLEN': 2048 })
  7682. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7683. const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl')
  7684. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7685. /**
  7686. * SpeedyProgramGroupKeypoints
  7687. * Keypoint detection
  7688. */
  7689. class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup
  7690. {
  7691. /**
  7692. * Class constructor
  7693. * @param {SpeedyGPU} gpu
  7694. */
  7695. constructor(gpu)
  7696. {
  7697. super(gpu);
  7698. this
  7699. //
  7700. // FAST corner detector
  7701. //
  7702. .declare('fast9_16', fast9_16, {
  7703. ...this.program.usesPingpongRendering()
  7704. })
  7705. //
  7706. // Harris corner detector
  7707. //
  7708. .declare('harris1', harris[1], {
  7709. ...this.program.usesPingpongRendering()
  7710. })
  7711. .declare('harris3', harris[3], {
  7712. ...this.program.usesPingpongRendering()
  7713. })
  7714. .declare('harris5', harris[5], {
  7715. ...this.program.usesPingpongRendering()
  7716. })
  7717. .declare('harris7', harris[7], {
  7718. ...this.program.usesPingpongRendering()
  7719. })
  7720. .declare('harrisScoreFindMax', harrisScoreFindMax, {
  7721. ...this.program.usesPingpongRendering()
  7722. })
  7723. .declare('harrisScoreCutoff', harrisScoreCutoff)
  7724. //
  7725. // Subpixel refinement
  7726. //
  7727. .declare('subpixelQuadratic1d', subpixelQuadratic1d)
  7728. .declare('subpixelTaylor2d', subpixelTaylor2d)
  7729. .declare('subpixelBicubic', subpixelBicubic)
  7730. .declare('subpixelBilinear', subpixelBilinear)
  7731. //
  7732. // Scale refinement
  7733. //
  7734. .declare('refineScaleLoG', refineScaleLoG)
  7735. .declare('refineScaleFAST916', refineScaleFAST916)
  7736. //
  7737. // Pixel allocation
  7738. //
  7739. .declare('allocateDescriptors', allocateDescriptors)
  7740. .declare('allocateExtra', allocateExtra)
  7741. .declare('transferToExtra', transferToExtra)
  7742. //
  7743. // ORB descriptors
  7744. //
  7745. .declare('orbDescriptor', orbDescriptor)
  7746. .declare('orbOrientation', orbOrientation)
  7747. //
  7748. // Non-maximum suppression
  7749. //
  7750. .declare('nonmax', nonMaxSuppression)
  7751. .declare('pyrnonmax', multiscaleNonMaxSuppression)
  7752. .declare('nonmaxSpace', nonmaxSpace)
  7753. .declare('nonmaxScale', nonmaxScale)
  7754. .declare('nonmaxScaleSimple', nonmaxScaleSimple)
  7755. .declare('laplacian', laplacian)
  7756. //
  7757. // LK optical-flow
  7758. //
  7759. .declare('lk21', lk[21], {
  7760. ...this.program.usesPingpongRendering()
  7761. })
  7762. .declare('lk19', lk[19], {
  7763. ...this.program.usesPingpongRendering()
  7764. })
  7765. .declare('lk17', lk[17], {
  7766. ...this.program.usesPingpongRendering()
  7767. })
  7768. .declare('lk15', lk[15], {
  7769. ...this.program.usesPingpongRendering()
  7770. })
  7771. .declare('lk13', lk[13], {
  7772. ...this.program.usesPingpongRendering()
  7773. })
  7774. .declare('lk11', lk[11], {
  7775. ...this.program.usesPingpongRendering()
  7776. })
  7777. .declare('lk9', lk[9], {
  7778. ...this.program.usesPingpongRendering()
  7779. })
  7780. .declare('lk7', lk[7], {
  7781. ...this.program.usesPingpongRendering()
  7782. })
  7783. .declare('lk5', lk[5], {
  7784. ...this.program.usesPingpongRendering()
  7785. })
  7786. .declare('lk3', lk[3], {
  7787. ...this.program.usesPingpongRendering()
  7788. })
  7789. .declare('transferFlow', transferFlow)
  7790. //
  7791. // Brute-force KNN matching
  7792. //
  7793. .declare('bfMatcherInitCandidates', bfMatcherInitCandidates)
  7794. .declare('bfMatcherInitFilters', bfMatcherInitFilters)
  7795. .declare('bfMatcherTransfer', bfMatcherTransfer, {
  7796. ...this.program.usesPingpongRendering()
  7797. })
  7798. .declare('bfMatcher32', bfMatcher32, {
  7799. ...this.program.usesPingpongRendering()
  7800. })
  7801. .declare('bfMatcher64', bfMatcher64, {
  7802. ...this.program.usesPingpongRendering()
  7803. })
  7804. //
  7805. // LSH-based KNN matching
  7806. //
  7807. .declare('lshKnnInitCandidates', lshKnnInitCandidates)
  7808. .declare('lshKnnInitFilters', lshKnnInitFilters)
  7809. .declare('lshKnnTransfer', lshKnnTransfer, {
  7810. ...this.program.usesPingpongRendering()
  7811. })
  7812. //
  7813. // Keypoint sorting
  7814. //
  7815. .declare('sortCreatePermutation', sortCreatePermutation)
  7816. .declare('sortMergePermutation', sortMergePermutation, {
  7817. ...this.program.usesPingpongRendering()
  7818. })
  7819. .declare('sortApplyPermutation', sortApplyPermutation)
  7820. //
  7821. // Keypoint mixing
  7822. //
  7823. .declare('mixKeypointsPreInit', mixKeypointsPreInit)
  7824. .declare('mixKeypointsInit', mixKeypointsInit)
  7825. .declare('mixKeypointsSort', mixKeypointsSort, {
  7826. ...this.program.usesPingpongRendering()
  7827. })
  7828. .declare('mixKeypointsView', mixKeypointsView)
  7829. .declare('mixKeypointsApply', mixKeypointsApply)
  7830. //
  7831. // Keypoint encoders
  7832. //
  7833. .declare('encodeNullKeypoints', encodeNullKeypoints)
  7834. .declare('encodeKeypoints', encodeKeypoints)
  7835. .declare('initLookupTable', initLookupTable)
  7836. .declare('sortLookupTable', sortLookupTable, {
  7837. ...this.program.usesPingpongRendering()
  7838. })
  7839. .declare('viewLookupTable', viewLookupTable)
  7840. .declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets)
  7841. .declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, {
  7842. ...this.program.usesPingpongRendering()
  7843. })
  7844. .declare('encodeKeypointPositions', encodeKeypointPositions, {
  7845. ...this.program.usesPingpongRendering()
  7846. })
  7847. .declare('encodeKeypointProperties', encodeKeypointProperties)
  7848. .declare('transferOrientation', transferOrientation)
  7849. .declare('uploadKeypoints', uploadKeypoints, {
  7850. ...this.program.usesPingpongRendering()
  7851. })
  7852. //
  7853. // Geometric transformations
  7854. //
  7855. .declare('applyHomography', applyHomography)
  7856. //
  7857. // Keypoint filters
  7858. //
  7859. .declare('clipBorder', clipBorder)
  7860. .declare('distanceFilter', distanceFilter)
  7861. .declare('hammingDistanceFilter32', hammingDistanceFilter32)
  7862. .declare('hammingDistanceFilter64', hammingDistanceFilter64)
  7863. //
  7864. // Other utilities
  7865. //
  7866. .declare('shuffle', shuffle)
  7867. .declare('clip', clip)
  7868. ;
  7869. //
  7870. // LSH-based KNN matching
  7871. //
  7872. for(const descriptorSize of Object.keys(lshKnn)) {
  7873. for(const hashSize of Object.keys(lshKnn[descriptorSize])) {
  7874. for(const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
  7875. const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
  7876. this.declare(name, lshKnn[descriptorSize][hashSize][level], {
  7877. ...this.program.usesPingpongRendering()
  7878. });
  7879. }
  7880. }
  7881. }
  7882. }
  7883. }
  7884. ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
  7885. /*
  7886. * speedy-vision.js
  7887. * GPU-accelerated Computer Vision for JavaScript
  7888. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7889. *
  7890. * Licensed under the Apache License, Version 2.0 (the "License");
  7891. * you may not use this file except in compliance with the License.
  7892. * You may obtain a copy of the License at
  7893. *
  7894. * http://www.apache.org/licenses/LICENSE-2.0
  7895. *
  7896. * Unless required by applicable law or agreed to in writing, software
  7897. * distributed under the License is distributed on an "AS IS" BASIS,
  7898. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7899. * See the License for the specific language governing permissions and
  7900. * limitations under the License.
  7901. *
  7902. * pyramids.js
  7903. * Image pyramids
  7904. */
  7905. //
  7906. // Shaders
  7907. //
  7908. const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
  7909. const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
  7910. /**
  7911. * SpeedyProgramGroupPyramids
  7912. * Image pyramids
  7913. */
  7914. class SpeedyProgramGroupPyramids extends SpeedyProgramGroup
  7915. {
  7916. /**
  7917. * Class constructor
  7918. * @param {SpeedyGPU} gpu
  7919. */
  7920. constructor(gpu)
  7921. {
  7922. super(gpu);
  7923. this
  7924. // upsampling & downsampling
  7925. .declare('upsample2', upsample2)
  7926. .declare('downsample2', downsample2)
  7927. // separable kernels for gaussian smoothing
  7928. // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
  7929. // pick a = 0.4 for gaussian approximation (sigma = 1)
  7930. .declare('smoothX', (0,convolution.convX)([
  7931. 0.05, 0.25, 0.4, 0.25, 0.05
  7932. ]))
  7933. .declare('smoothY', (0,convolution.convY)([
  7934. 0.05, 0.25, 0.4, 0.25, 0.05
  7935. ]))
  7936. /*
  7937. .declare('reduce', conv2D([
  7938. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
  7939. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7940. 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
  7941. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7942. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
  7943. ]))
  7944. */
  7945. // smoothing for 2x image
  7946. // same rules as above with sum(k) = 2
  7947. .declare('smoothX2', (0,convolution.convX)([
  7948. 0.1, 0.5, 0.8, 0.5, 0.1
  7949. // NOTE: this would saturate the image, but we apply it
  7950. // on a 2x upsampled version with lots of zero pixels
  7951. ]))
  7952. .declare('smoothY2', (0,convolution.convY)([
  7953. 0.1, 0.5, 0.8, 0.5, 0.1
  7954. ], 1.0 / 2.0))
  7955. ;
  7956. }
  7957. }
  7958. ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
  7959. /*
  7960. * speedy-vision.js
  7961. * GPU-accelerated Computer Vision for JavaScript
  7962. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7963. *
  7964. * Licensed under the Apache License, Version 2.0 (the "License");
  7965. * you may not use this file except in compliance with the License.
  7966. * You may obtain a copy of the License at
  7967. *
  7968. * http://www.apache.org/licenses/LICENSE-2.0
  7969. *
  7970. * Unless required by applicable law or agreed to in writing, software
  7971. * distributed under the License is distributed on an "AS IS" BASIS,
  7972. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7973. * See the License for the specific language governing permissions and
  7974. * limitations under the License.
  7975. *
  7976. * transforms.js
  7977. * Geometric transformations
  7978. */
  7979. //
  7980. // Shaders
  7981. //
  7982. // Perspective warp
  7983. const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl')
  7984. .withArguments('image', 'inverseHomography');
  7985. // Resize image
  7986. const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl')
  7987. .withDefines({
  7988. 'INTERPOLATION_METHOD': 0 // Nearest neighbors
  7989. })
  7990. .withArguments('image');
  7991. const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl')
  7992. .withDefines({
  7993. 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
  7994. })
  7995. .withArguments('image');
  7996. // Additive mix (TODO create a new program group?)
  7997. const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl')
  7998. .withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
  7999. /**
  8000. * SpeedyProgramGroupTransforms
  8001. * Geometric transformations
  8002. */
  8003. class SpeedyProgramGroupTransforms extends SpeedyProgramGroup
  8004. {
  8005. /**
  8006. * Class constructor
  8007. * @param {SpeedyGPU} gpu
  8008. */
  8009. constructor(gpu)
  8010. {
  8011. super(gpu);
  8012. this
  8013. .declare('warpPerspective', warpPerspective)
  8014. .declare('resizeNearest', resizeNearest)
  8015. .declare('resizeBilinear', resizeBilinear)
  8016. .declare('additiveMix', additiveMix)
  8017. ;
  8018. }
  8019. }
  8020. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
  8021. /*
  8022. * speedy-vision.js
  8023. * GPU-accelerated Computer Vision for JavaScript
  8024. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8025. *
  8026. * Licensed under the Apache License, Version 2.0 (the "License");
  8027. * you may not use this file except in compliance with the License.
  8028. * You may obtain a copy of the License at
  8029. *
  8030. * http://www.apache.org/licenses/LICENSE-2.0
  8031. *
  8032. * Unless required by applicable law or agreed to in writing, software
  8033. * distributed under the License is distributed on an "AS IS" BASIS,
  8034. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8035. * See the License for the specific language governing permissions and
  8036. * limitations under the License.
  8037. *
  8038. * speedy-program-center.js
  8039. * An access point to all programs that run on the GPU
  8040. */
  8041. /**
  8042. * An access point to all programs that run on the CPU
  8043. * All program groups can be accessed via this class
  8044. */
  8045. class SpeedyProgramCenter
  8046. {
  8047. /**
  8048. * Class constructor
  8049. * @param {SpeedyGPU} gpu reference to SpeedyGPU
  8050. */
  8051. constructor(gpu)
  8052. {
  8053. // Note: we instantiate the program groups lazily
  8054. /** @type {SpeedyGPU} reference to SpeedyGPU */
  8055. this._gpu = gpu;
  8056. /** @type {SpeedyProgramGroupFilters} image filters */
  8057. this._filters = null;
  8058. /** @type {SpeedyProgramGroupTransforms} geometric transformations */
  8059. this._transforms = null;
  8060. /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
  8061. this._pyramids = null;
  8062. /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
  8063. this._keypoints = null;
  8064. /** @type {SpeedyProgramGroupUtils} utility programs */
  8065. this._utils = null;
  8066. }
  8067. /**
  8068. * Image filters & convolutions
  8069. * @returns {SpeedyProgramGroupFilters}
  8070. */
  8071. get filters()
  8072. {
  8073. return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
  8074. }
  8075. /**
  8076. * Geometric transformations
  8077. * @returns {SpeedyProgramGroupTransforms}
  8078. */
  8079. get transforms()
  8080. {
  8081. return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
  8082. }
  8083. /**
  8084. * Image pyramids & scale-space
  8085. * @returns {SpeedyProgramGroupPyramids}
  8086. */
  8087. get pyramids()
  8088. {
  8089. return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
  8090. }
  8091. /**
  8092. * Keypoint detection & description
  8093. * @returns {SpeedyProgramGroupKeypoints}
  8094. */
  8095. get keypoints()
  8096. {
  8097. return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
  8098. }
  8099. /**
  8100. * Utility programs
  8101. * @returns {SpeedyProgramGroupUtils}
  8102. */
  8103. get utils()
  8104. {
  8105. return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
  8106. }
  8107. /**
  8108. * Release all programs from all groups. You'll
  8109. * no longer be able to use any of them.
  8110. * @returns {null}
  8111. */
  8112. release()
  8113. {
  8114. for(const key in this) {
  8115. if(Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
  8116. const group = this[key];
  8117. if(group instanceof SpeedyProgramGroup)
  8118. group.release();
  8119. }
  8120. }
  8121. return null;
  8122. }
  8123. }
  8124. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
  8125. /*
  8126. * speedy-vision.js
  8127. * GPU-accelerated Computer Vision for JavaScript
  8128. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8129. *
  8130. * Licensed under the Apache License, Version 2.0 (the "License");
  8131. * you may not use this file except in compliance with the License.
  8132. * You may obtain a copy of the License at
  8133. *
  8134. * http://www.apache.org/licenses/LICENSE-2.0
  8135. *
  8136. * Unless required by applicable law or agreed to in writing, software
  8137. * distributed under the License is distributed on an "AS IS" BASIS,
  8138. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8139. * See the License for the specific language governing permissions and
  8140. * limitations under the License.
  8141. *
  8142. * speedy-texture-pool.js
  8143. * Texture pool
  8144. */
  8145. // Constants
  8146. const DEFAULT_CAPACITY = 1024;
  8147. const BUCKET = Symbol('Bucket');
  8148. /*
  8149. === Heuristics to figure out the capacity of a texture pool ===
  8150. 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
  8151. 2. Figure out the average texture size in your application (say, 640x360 pixels).
  8152. 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
  8153. uses 4 bytes (RGBA format).
  8154. 4. Divide the maximum amount of VRAM by the average texture size in bytes
  8155. (say, 72). That's the capacity of the pool.
  8156. Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
  8157. Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  8158. */
  8159. /**
  8160. * @typedef {number} TextureBucketIndex index of a bucket in a pool
  8161. */
  8162. /**
  8163. * A bucket
  8164. */
  8165. class TextureBucket
  8166. {
  8167. /**
  8168. * Constructor
  8169. * @param {SpeedyDrawableTexture} texture managed texture
  8170. * @param {TextureBucketIndex} index index of this bucket
  8171. * @param {TextureBucketIndex} next index of the next bucket
  8172. */
  8173. constructor(texture, index, next)
  8174. {
  8175. /** @type {SpeedyDrawableTexture} managed texture */
  8176. this.texture = texture;
  8177. /** @type {TextureBucketIndex} index of this bucket */
  8178. this.index = index;
  8179. /** @type {TextureBucketIndex} index of the next bucket */
  8180. this.next = next;
  8181. /** @type {boolean} whether the texture is available or not */
  8182. this.free = true;
  8183. }
  8184. }
  8185. /**
  8186. * Texture pool
  8187. */
  8188. class SpeedyTexturePool
  8189. {
  8190. /**
  8191. * Constructor
  8192. * @param {SpeedyGPU} gpu
  8193. * @param {number} [capacity] number of textures in the pool
  8194. */
  8195. constructor(gpu, capacity = DEFAULT_CAPACITY)
  8196. {
  8197. utils/* Utils */.A.assert(capacity > 0);
  8198. /** @type {TextureBucket[]} buckets */
  8199. this._bucket = Array.from({ length: capacity }, (_, i) => new TextureBucket(null, i, i - 1));
  8200. /** @type {TextureBucketIndex} index of an available bucket */
  8201. this._head = capacity - 1;
  8202. /** @type {SpeedyGPU} GPU instance */
  8203. this._gpu = gpu;
  8204. }
  8205. /**
  8206. * Get a texture from the pool
  8207. * @returns {SpeedyDrawableTexture}
  8208. */
  8209. allocate()
  8210. {
  8211. if(this._head < 0)
  8212. throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
  8213. const bucket = this._bucket[this._head];
  8214. bucket.free = false;
  8215. this._head = bucket.next;
  8216. if(bucket.texture == null) // lazy instantiation
  8217. bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
  8218. return bucket.texture;
  8219. }
  8220. /**
  8221. * Put a texture back in the pool
  8222. * @param {SpeedyDrawableTexture} texture
  8223. * @returns {null}
  8224. */
  8225. free(texture)
  8226. {
  8227. const bucket = texture[BUCKET];
  8228. utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
  8229. bucket.next = this._head;
  8230. bucket.free = true;
  8231. this._head = bucket.index;
  8232. return null;
  8233. }
  8234. /**
  8235. * Release the texture pool
  8236. * @returns {null}
  8237. */
  8238. release()
  8239. {
  8240. for(let i = 0; i < this._bucket.length; i++) {
  8241. if(this._bucket[i].texture != null)
  8242. this._bucket[i].texture = this._bucket[i].texture.release();
  8243. }
  8244. return null;
  8245. }
  8246. /**
  8247. * Create a texture with a reference to a bucket
  8248. * @param {WebGL2RenderingContext} gl
  8249. * @param {TextureBucket} bucket
  8250. * @returns {SpeedyDrawableTexture}
  8251. */
  8252. static _createManagedTexture(gl, bucket)
  8253. {
  8254. const texture = new SpeedyDrawableTexture(gl, 1, 1);
  8255. return Object.defineProperty(texture, BUCKET, {
  8256. configurable: false,
  8257. enumerable: false,
  8258. writable: false,
  8259. value: bucket
  8260. });
  8261. }
  8262. }
  8263. // EXTERNAL MODULE: ./src/utils/types.js
  8264. var types = __nested_webpack_require_320900__(6467);
  8265. ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
  8266. /*
  8267. * speedy-vision.js
  8268. * GPU-accelerated Computer Vision for JavaScript
  8269. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8270. *
  8271. * Licensed under the Apache License, Version 2.0 (the "License");
  8272. * you may not use this file except in compliance with the License.
  8273. * You may obtain a copy of the License at
  8274. *
  8275. * http://www.apache.org/licenses/LICENSE-2.0
  8276. *
  8277. * Unless required by applicable law or agreed to in writing, software
  8278. * distributed under the License is distributed on an "AS IS" BASIS,
  8279. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8280. * See the License for the specific language governing permissions and
  8281. * limitations under the License.
  8282. *
  8283. * speedy-media-source.js
  8284. * Wrappers around <img>, <video>, <canvas>, etc.
  8285. */
  8286. /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
  8287. /** Internal token for protected constructors */
  8288. const PRIVATE_TOKEN = Symbol();
  8289. /**
  8290. * An abstract media source: a wrapper around native
  8291. * elements such as: HTMLImageElement, HTMLVideoElement,
  8292. * and so on
  8293. * @abstract
  8294. */
  8295. class SpeedyMediaSource
  8296. {
  8297. /**
  8298. * @protected Constructor
  8299. * @param {symbol} token
  8300. */
  8301. constructor(token)
  8302. {
  8303. // the constructor is not public
  8304. if(token !== PRIVATE_TOKEN)
  8305. throw new utils_errors/* IllegalOperationError */.Er();
  8306. /** @type {SpeedyMediaSourceNativeElement} underlying media object */
  8307. this._data = null;
  8308. }
  8309. /**
  8310. * Load a media source
  8311. * @param {SpeedyMediaSourceNativeElement} wrappedObject
  8312. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8313. */
  8314. static load(wrappedObject)
  8315. {
  8316. if(wrappedObject instanceof HTMLImageElement)
  8317. return SpeedyImageMediaSource.load(wrappedObject);
  8318. else if(wrappedObject instanceof HTMLVideoElement)
  8319. return SpeedyVideoMediaSource.load(wrappedObject);
  8320. else if(wrappedObject instanceof HTMLCanvasElement)
  8321. return SpeedyCanvasMediaSource.load(wrappedObject);
  8322. else if(typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas)
  8323. return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);
  8324. else if(wrappedObject instanceof ImageBitmap)
  8325. return SpeedyBitmapMediaSource.load(wrappedObject);
  8326. else if(wrappedObject instanceof ImageData)
  8327. return SpeedyDataMediaSource.load(wrappedObject);
  8328. else
  8329. throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
  8330. }
  8331. /**
  8332. * The underlying wrapped object
  8333. * @returns {SpeedyMediaSourceNativeElement}
  8334. */
  8335. get data()
  8336. {
  8337. return this._data;
  8338. }
  8339. /**
  8340. * Is the underlying media loaded?
  8341. * @returns {boolean}
  8342. */
  8343. isLoaded()
  8344. {
  8345. return this._data !== null;
  8346. }
  8347. /**
  8348. * The type of the underlying media source
  8349. * @abstract
  8350. * @returns {MediaType}
  8351. */
  8352. get type()
  8353. {
  8354. throw new utils_errors/* AbstractMethodError */.aQ();
  8355. }
  8356. /**
  8357. * Media width, in pixels
  8358. * @abstract
  8359. * @returns {number}
  8360. */
  8361. get width()
  8362. {
  8363. throw new utils_errors/* AbstractMethodError */.aQ();
  8364. }
  8365. /**
  8366. * Media height, in pixels
  8367. * @abstract
  8368. * @returns {number}
  8369. */
  8370. get height()
  8371. {
  8372. throw new utils_errors/* AbstractMethodError */.aQ();
  8373. }
  8374. /**
  8375. * Clone this media source
  8376. * @abstract
  8377. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8378. */
  8379. clone()
  8380. {
  8381. throw new utils_errors/* AbstractMethodError */.aQ();
  8382. }
  8383. /**
  8384. * Release resources associated with this object
  8385. * @returns {null}
  8386. */
  8387. release()
  8388. {
  8389. return (this._data = null);
  8390. }
  8391. /**
  8392. * Load the underlying media
  8393. * @abstract
  8394. * @param {SpeedyMediaSourceNativeElement} element
  8395. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8396. */
  8397. _load(element)
  8398. {
  8399. throw new utils_errors/* AbstractMethodError */.aQ();
  8400. }
  8401. /**
  8402. * Wait for an event to be triggered in an element
  8403. * @param {Element} element
  8404. * @param {string} eventName
  8405. * @param {number} [timeout] in ms
  8406. * @returns {SpeedyPromise<Element>}
  8407. */
  8408. static _waitUntil(element, eventName, timeout = 30000)
  8409. {
  8410. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  8411. utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
  8412. const timer = setTimeout(() => {
  8413. clear();
  8414. reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
  8415. }, timeout);
  8416. function clear()
  8417. {
  8418. clearTimeout(timer);
  8419. element.removeEventListener('error', handleError, false);
  8420. element.removeEventListener(eventName, handleSuccess, false);
  8421. }
  8422. function handleError()
  8423. {
  8424. const hasError = (element.error !== null && typeof element.error === 'object');
  8425. const error = hasError ? element.error : ({ code: -1, message: '' });
  8426. const info = `${error.message} (error code ${error.code})`;
  8427. clear();
  8428. reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
  8429. }
  8430. function handleSuccess()
  8431. {
  8432. clear();
  8433. resolve(element);
  8434. }
  8435. element.addEventListener('error', handleError, false);
  8436. element.addEventListener(eventName, handleSuccess, false);
  8437. });
  8438. }
  8439. }
  8440. /**
  8441. * Image media source:
  8442. * a wrapper around HTMLImageElement
  8443. */
  8444. class SpeedyImageMediaSource extends SpeedyMediaSource
  8445. {
  8446. /**
  8447. * @private Constructor
  8448. * @param {symbol} token
  8449. */
  8450. constructor(token)
  8451. {
  8452. super(token);
  8453. /** @type {HTMLImageElement} image element */
  8454. this._data = null;
  8455. }
  8456. /**
  8457. * The underlying wrapped object
  8458. * @returns {HTMLImageElement}
  8459. */
  8460. get data()
  8461. {
  8462. return this._data;
  8463. }
  8464. /**
  8465. * The type of the underlying media source
  8466. * @returns {MediaType}
  8467. */
  8468. get type()
  8469. {
  8470. return types/* MediaType */.zu.Image;
  8471. }
  8472. /**
  8473. * Media width, in pixels
  8474. * @returns {number}
  8475. */
  8476. get width()
  8477. {
  8478. return this._data ? this._data.naturalWidth : 0;
  8479. }
  8480. /**
  8481. * Media height, in pixels
  8482. * @returns {number}
  8483. */
  8484. get height()
  8485. {
  8486. return this._data ? this._data.naturalHeight : 0;
  8487. }
  8488. /**
  8489. * Clone this media source
  8490. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8491. */
  8492. clone()
  8493. {
  8494. if(this._data == null)
  8495. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8496. const newNode = /** @type {HTMLImageElement} */ ( this._data.cloneNode(true) );
  8497. return SpeedyImageMediaSource.load(newNode);
  8498. }
  8499. /**
  8500. * Load the underlying media
  8501. * @param {HTMLImageElement} image
  8502. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8503. */
  8504. _load(image)
  8505. {
  8506. if(this.isLoaded())
  8507. this.release();
  8508. if(image.complete && image.naturalWidth !== 0) { // already loaded?
  8509. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8510. this._data = image;
  8511. resolve(this);
  8512. });
  8513. }
  8514. else {
  8515. return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
  8516. this._data = image;
  8517. return this;
  8518. });
  8519. }
  8520. }
  8521. /**
  8522. * Load the underlying media
  8523. * @param {HTMLImageElement} image
  8524. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8525. */
  8526. static load(image)
  8527. {
  8528. return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
  8529. }
  8530. }
  8531. /**
  8532. * Video media source:
  8533. * a wrapper around HTMLVideoElement
  8534. */
  8535. class SpeedyVideoMediaSource extends SpeedyMediaSource
  8536. {
  8537. /**
  8538. * @private Constructor
  8539. * @param {symbol} token
  8540. */
  8541. constructor(token)
  8542. {
  8543. super(token);
  8544. /** @type {HTMLVideoElement} video element */
  8545. this._data = null;
  8546. }
  8547. /**
  8548. * The underlying wrapped object
  8549. * @returns {HTMLVideoElement}
  8550. */
  8551. get data()
  8552. {
  8553. return this._data;
  8554. }
  8555. /**
  8556. * The type of the underlying media source
  8557. * @returns {MediaType}
  8558. */
  8559. get type()
  8560. {
  8561. return types/* MediaType */.zu.Video;
  8562. }
  8563. /**
  8564. * Media width, in pixels
  8565. * @returns {number}
  8566. */
  8567. get width()
  8568. {
  8569. // Warning: videoWidth & videoHeight may change at any time !!!
  8570. // so you can't cache these dimensions
  8571. return this._data ? this._data.videoWidth : 0;
  8572. }
  8573. /**
  8574. * Media height, in pixels
  8575. * @returns {number}
  8576. */
  8577. get height()
  8578. {
  8579. return this._data ? this._data.videoHeight : 0;
  8580. }
  8581. /**
  8582. * Clone this media source
  8583. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8584. */
  8585. clone()
  8586. {
  8587. if(this._data == null)
  8588. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8589. const newNode = /** @type {HTMLVideoElement} */ ( this._data.cloneNode(true) );
  8590. return SpeedyVideoMediaSource.load(newNode);
  8591. }
  8592. /**
  8593. * Load the underlying media
  8594. * @param {HTMLVideoElement} video
  8595. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8596. */
  8597. _load(video)
  8598. {
  8599. if(this.isLoaded())
  8600. this.release();
  8601. if(video.readyState >= 4) { // already loaded?
  8602. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8603. this._data = video;
  8604. resolve(this);
  8605. });
  8606. }
  8607. else {
  8608. // waitUntil('canplay'); // use readyState >= 3
  8609. setTimeout(() => video.load());
  8610. return SpeedyMediaSource._waitUntil(video, 'canplaythrough').then(() => {
  8611. this._data = video;
  8612. return this;
  8613. });
  8614. }
  8615. }
  8616. /**
  8617. * Load the underlying media
  8618. * @param {HTMLVideoElement} video
  8619. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8620. */
  8621. static load(video)
  8622. {
  8623. return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
  8624. }
  8625. }
  8626. /**
  8627. * Canvas media source:
  8628. * a wrapper around HTMLCanvasElement
  8629. */
  8630. class SpeedyCanvasMediaSource extends SpeedyMediaSource
  8631. {
  8632. /**
  8633. * @private Constructor
  8634. * @param {symbol} token
  8635. */
  8636. constructor(token)
  8637. {
  8638. super(token);
  8639. /** @type {HTMLCanvasElement} canvas element */
  8640. this._data = null;
  8641. }
  8642. /**
  8643. * The underlying wrapped object
  8644. * @returns {HTMLCanvasElement}
  8645. */
  8646. get data()
  8647. {
  8648. return this._data;
  8649. }
  8650. /**
  8651. * The type of the underlying media source
  8652. * @returns {MediaType}
  8653. */
  8654. get type()
  8655. {
  8656. return types/* MediaType */.zu.Canvas;
  8657. }
  8658. /**
  8659. * Media width, in pixels
  8660. * @returns {number}
  8661. */
  8662. get width()
  8663. {
  8664. return this._data ? this._data.width : 0;
  8665. }
  8666. /**
  8667. * Media height, in pixels
  8668. * @returns {number}
  8669. */
  8670. get height()
  8671. {
  8672. return this._data ? this._data.height : 0;
  8673. }
  8674. /**
  8675. * Clone this media source
  8676. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8677. */
  8678. clone()
  8679. {
  8680. if(this._data == null)
  8681. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8682. const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
  8683. const newContext = newCanvas.getContext('2d');
  8684. newContext.drawImage(this._data, 0, 0);
  8685. return SpeedyCanvasMediaSource.load(newCanvas);
  8686. }
  8687. /**
  8688. * Load the underlying media
  8689. * @param {HTMLCanvasElement} canvas
  8690. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8691. */
  8692. _load(canvas)
  8693. {
  8694. if(this.isLoaded())
  8695. this.release();
  8696. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8697. this._data = canvas;
  8698. resolve(this);
  8699. });
  8700. }
  8701. /**
  8702. * Load the underlying media
  8703. * @param {HTMLCanvasElement} canvas
  8704. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8705. */
  8706. static load(canvas)
  8707. {
  8708. return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
  8709. }
  8710. }
  8711. /**
  8712. * OffscreenCanvas media source:
  8713. * a wrapper around OffscreenCanvas
  8714. */
  8715. class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource
  8716. {
  8717. /**
  8718. * @private Constructor
  8719. * @param {symbol} token
  8720. */
  8721. constructor(token)
  8722. {
  8723. super(token);
  8724. /** @type {OffscreenCanvas} offscreen canvas element */
  8725. this._data = null;
  8726. }
  8727. /**
  8728. * The underlying wrapped object
  8729. * @returns {OffscreenCanvas}
  8730. */
  8731. get data()
  8732. {
  8733. return this._data;
  8734. }
  8735. /**
  8736. * The type of the underlying media source
  8737. * @returns {MediaType}
  8738. */
  8739. get type()
  8740. {
  8741. return types/* MediaType */.zu.OffscreenCanvas;
  8742. }
  8743. /**
  8744. * Media width, in pixels
  8745. * @returns {number}
  8746. */
  8747. get width()
  8748. {
  8749. return this._data ? this._data.width : 0;
  8750. }
  8751. /**
  8752. * Media height, in pixels
  8753. * @returns {number}
  8754. */
  8755. get height()
  8756. {
  8757. return this._data ? this._data.height : 0;
  8758. }
  8759. /**
  8760. * Clone this media source
  8761. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8762. */
  8763. clone()
  8764. {
  8765. if(this._data == null)
  8766. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8767. const newCanvas = new OffscreenCanvas(this.width, this.height);
  8768. const newContext = newCanvas.getContext('2d');
  8769. newContext.drawImage(this._data, 0, 0);
  8770. return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
  8771. }
  8772. /**
  8773. * Load the underlying media
  8774. * @param {OffscreenCanvas} offscreenCanvas
  8775. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8776. */
  8777. _load(offscreenCanvas)
  8778. {
  8779. if(this.isLoaded())
  8780. this.release();
  8781. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8782. this._data = offscreenCanvas;
  8783. resolve(this);
  8784. });
  8785. }
  8786. /**
  8787. * Load the underlying media
  8788. * @param {OffscreenCanvas} offscreenCanvas
  8789. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8790. */
  8791. static load(offscreenCanvas)
  8792. {
  8793. return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
  8794. }
  8795. }
  8796. /**
  8797. * Bitmap media source:
  8798. * a wrapper around ImageBitmap
  8799. */
  8800. class SpeedyBitmapMediaSource extends SpeedyMediaSource
  8801. {
  8802. /**
  8803. * @private Constructor
  8804. * @param {symbol} token
  8805. */
  8806. constructor(token)
  8807. {
  8808. super(token);
  8809. /** @type {ImageBitmap} image bitmap */
  8810. this._data = null;
  8811. }
  8812. /**
  8813. * The underlying wrapped object
  8814. * @returns {ImageBitmap}
  8815. */
  8816. get data()
  8817. {
  8818. return this._data;
  8819. }
  8820. /**
  8821. * The type of the underlying media source
  8822. * @returns {MediaType}
  8823. */
  8824. get type()
  8825. {
  8826. return types/* MediaType */.zu.Bitmap;
  8827. }
  8828. /**
  8829. * Media width, in pixels
  8830. * @returns {number}
  8831. */
  8832. get width()
  8833. {
  8834. return this._data ? this._data.width : 0;
  8835. }
  8836. /**
  8837. * Media height, in pixels
  8838. * @returns {number}
  8839. */
  8840. get height()
  8841. {
  8842. return this._data ? this._data.height : 0;
  8843. }
  8844. /**
  8845. * Clone this media source
  8846. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8847. */
  8848. clone()
  8849. {
  8850. if(this._data == null)
  8851. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8852. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  8853. createImageBitmap(this._data).then(
  8854. newBitmap => {
  8855. const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
  8856. newSource._load(newBitmap).then(resolve, reject);
  8857. },
  8858. reject
  8859. );
  8860. });
  8861. }
  8862. /**
  8863. * Release resources associated with this object
  8864. * @returns {null}
  8865. */
  8866. release()
  8867. {
  8868. if(this._data != null)
  8869. this._data.close();
  8870. return super.release();
  8871. }
  8872. /**
  8873. * Load the underlying media
  8874. * @param {ImageBitmap} bitmap
  8875. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8876. */
  8877. _load(bitmap)
  8878. {
  8879. if(this.isLoaded())
  8880. this.release();
  8881. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8882. this._data = bitmap;
  8883. resolve(this);
  8884. });
  8885. }
  8886. /**
  8887. * Load the underlying media
  8888. * @param {ImageBitmap} bitmap
  8889. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8890. */
  8891. static load(bitmap)
  8892. {
  8893. return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
  8894. }
  8895. }
  8896. /**
  8897. * Data media source:
  8898. * a wrapper around ImageData
  8899. */
  8900. class SpeedyDataMediaSource extends SpeedyMediaSource
  8901. {
  8902. /**
  8903. * @private Constructor
  8904. * @param {symbol} token
  8905. */
  8906. constructor(token)
  8907. {
  8908. super(token);
  8909. /** @type {ImageData} image data */
  8910. this._data = null;
  8911. }
  8912. /**
  8913. * The underlying wrapped object
  8914. * @returns {ImageData}
  8915. */
  8916. get data()
  8917. {
  8918. return this._data;
  8919. }
  8920. /**
  8921. * The type of the underlying media source
  8922. * @returns {MediaType}
  8923. */
  8924. get type()
  8925. {
  8926. return types/* MediaType */.zu.Data;
  8927. }
  8928. /**
  8929. * Media width, in pixels
  8930. * @returns {number}
  8931. */
  8932. get width()
  8933. {
  8934. return this._data ? this._data.width : 0;
  8935. }
  8936. /**
  8937. * Media height, in pixels
  8938. * @returns {number}
  8939. */
  8940. get height()
  8941. {
  8942. return this._data ? this._data.height : 0;
  8943. }
  8944. /**
  8945. * Clone this media source
  8946. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8947. */
  8948. clone()
  8949. {
  8950. if(this._data == null)
  8951. throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8952. const imageDataCopy = new ImageData(
  8953. new Uint8ClampedArray(this._data.data),
  8954. this._data.width,
  8955. this._data.height
  8956. )
  8957. return SpeedyDataMediaSource.load(imageDataCopy);
  8958. }
  8959. /**
  8960. * Load the underlying media
  8961. * @param {ImageData} imageData
  8962. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8963. */
  8964. _load(imageData)
  8965. {
  8966. if(this.isLoaded())
  8967. this.release();
  8968. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8969. this._data = imageData;
  8970. resolve(this);
  8971. });
  8972. }
  8973. /**
  8974. * Load the underlying media
  8975. * @param {ImageData} imageData
  8976. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8977. */
  8978. static load(imageData)
  8979. {
  8980. return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
  8981. }
  8982. }
  8983. // EXTERNAL MODULE: ./src/utils/observable.js
  8984. var observable = __nested_webpack_require_320900__(4109);
  8985. ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
  8986. /*
  8987. * speedy-vision.js
  8988. * GPU-accelerated Computer Vision for JavaScript
  8989. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8990. *
  8991. * Licensed under the Apache License, Version 2.0 (the "License");
  8992. * you may not use this file except in compliance with the License.
  8993. * You may obtain a copy of the License at
  8994. *
  8995. * http://www.apache.org/licenses/LICENSE-2.0
  8996. *
  8997. * Unless required by applicable law or agreed to in writing, software
  8998. * distributed under the License is distributed on an "AS IS" BASIS,
  8999. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9000. * See the License for the specific language governing permissions and
  9001. * limitations under the License.
  9002. *
  9003. * speedy-gpu.js
  9004. * GPU-accelerated routines for Computer Vision
  9005. */
  9006. /**
  9007. * GPU-accelerated routines for Computer Vision
  9008. */
  9009. class SpeedyGPU extends observable/* Observable */.c
  9010. {
  9011. /**
  9012. * Constructor
  9013. */
  9014. constructor()
  9015. {
  9016. super();
  9017. /** @type {SpeedyGL} cached reference */
  9018. this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
  9019. /** @type {SpeedyProgramCenter} GPU-based programs */
  9020. this._programs = new SpeedyProgramCenter(this);
  9021. /** @type {SpeedyTexturePool} texture pool */
  9022. this._texturePool = new SpeedyTexturePool(this);
  9023. // recreate the state if necessary
  9024. this._speedyGL.subscribe(this._reset, this);
  9025. }
  9026. /**
  9027. * Access point to all GPU programs
  9028. * @returns {SpeedyProgramCenter}
  9029. */
  9030. get programs()
  9031. {
  9032. return this._programs;
  9033. }
  9034. /**
  9035. * The WebGL Rendering Context
  9036. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  9037. * @returns {WebGL2RenderingContext}
  9038. */
  9039. get gl()
  9040. {
  9041. return this._speedyGL.gl;
  9042. }
  9043. /**
  9044. * Internal canvas
  9045. * @returns {HTMLCanvasElement}
  9046. */
  9047. get canvas()
  9048. {
  9049. return this._speedyGL.canvas;
  9050. }
  9051. /**
  9052. * Texture pool
  9053. * @returns {SpeedyTexturePool}
  9054. */
  9055. get texturePool()
  9056. {
  9057. return this._texturePool;
  9058. }
  9059. /**
  9060. * Renders a texture to the canvas
  9061. * @param {SpeedyTexture} texture
  9062. * @returns {HTMLCanvasElement} returned for convenience
  9063. */
  9064. renderToCanvas(texture)
  9065. {
  9066. const width = texture.width;
  9067. const height = texture.height;
  9068. const canvas = this.canvas;
  9069. // do we need to resize the canvas?
  9070. if(width > canvas.width || height > canvas.height) {
  9071. utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
  9072. canvas.width = width;
  9073. canvas.height = height;
  9074. }
  9075. // render
  9076. this.programs.utils.renderToCanvas.outputs(width, height, null);
  9077. this.programs.utils.renderToCanvas(texture);
  9078. // done!
  9079. return canvas;
  9080. }
  9081. /**
  9082. * Upload an image to the GPU
  9083. * @param {SpeedyMediaSource} source
  9084. * @param {SpeedyTexture} outputTexture
  9085. * @returns {SpeedyTexture} outputTexture
  9086. */
  9087. upload(source, outputTexture)
  9088. {
  9089. return outputTexture.upload(source.data, source.width, source.height);
  9090. }
  9091. /**
  9092. * Releases resources
  9093. * @returns {null}
  9094. */
  9095. release()
  9096. {
  9097. utils/* Utils */.A.assert(!this.isReleased());
  9098. // release internal components
  9099. this._programs = this._programs.release();
  9100. this._texturePool = this._texturePool.release();
  9101. // unsubscribe
  9102. this._speedyGL.unsubscribe(this._reset);
  9103. return null;
  9104. }
  9105. /**
  9106. * Has this SpeedyGPU been released?
  9107. * @returns {boolean}
  9108. */
  9109. isReleased()
  9110. {
  9111. return this._programs == null;
  9112. }
  9113. /**
  9114. * Lose & restore the WebGL context (useful for testing purposes)
  9115. * @return {SpeedyPromise<void>} resolves as soon as the context is restored
  9116. */
  9117. loseAndRestoreWebGLContext()
  9118. {
  9119. return this._speedyGL.loseAndRestoreContext().then(() => void(0));
  9120. }
  9121. /**
  9122. * Reset the internal state
  9123. * (called on context reset)
  9124. */
  9125. _reset()
  9126. {
  9127. if(this.isReleased())
  9128. return;
  9129. this._programs = new SpeedyProgramCenter(this);
  9130. this._texturePool = new SpeedyTexturePool(this);
  9131. this._notify();
  9132. }
  9133. }
  9134. ;// CONCATENATED MODULE: ./src/core/speedy-size.js
  9135. /*
  9136. * speedy-vision.js
  9137. * GPU-accelerated Computer Vision for JavaScript
  9138. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9139. *
  9140. * Licensed under the Apache License, Version 2.0 (the "License");
  9141. * you may not use this file except in compliance with the License.
  9142. * You may obtain a copy of the License at
  9143. *
  9144. * http://www.apache.org/licenses/LICENSE-2.0
  9145. *
  9146. * Unless required by applicable law or agreed to in writing, software
  9147. * distributed under the License is distributed on an "AS IS" BASIS,
  9148. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9149. * See the License for the specific language governing permissions and
  9150. * limitations under the License.
  9151. *
  9152. * speedy-size.js
  9153. * Size of a rectangle
  9154. */
  9155. /**
  9156. * Size of a rectangle
  9157. */
  9158. class SpeedySize
  9159. {
  9160. /**
  9161. * Constructor
  9162. * @param {number} width non-negative number
  9163. * @param {number} height non-negative number
  9164. */
  9165. constructor(width, height)
  9166. {
  9167. /** @type {number} width */
  9168. this._width = Math.max(0, +width);
  9169. /** @type {number} height */
  9170. this._height = Math.max(0, +height);
  9171. }
  9172. //
  9173. // ===== METHODS =====
  9174. //
  9175. /**
  9176. * Width
  9177. * @returns {number}
  9178. */
  9179. get width()
  9180. {
  9181. return this._width;
  9182. }
  9183. /**
  9184. * Width
  9185. * @param {number} value
  9186. */
  9187. set width(value)
  9188. {
  9189. this._width = Math.max(0, +value);
  9190. }
  9191. /**
  9192. * Height
  9193. * @returns {number}
  9194. */
  9195. get height()
  9196. {
  9197. return this._height;
  9198. }
  9199. /**
  9200. * Height
  9201. * @param {number} value
  9202. */
  9203. set height(value)
  9204. {
  9205. this._height = Math.max(0, +value);
  9206. }
  9207. /**
  9208. * Convert to string
  9209. * @returns {string}
  9210. */
  9211. toString()
  9212. {
  9213. return `SpeedySize(${this.width}, ${this.height})`;
  9214. }
  9215. /**
  9216. * Is this size equal to anotherSize?
  9217. * @param {SpeedySize} anotherSize
  9218. * @returns {boolean}
  9219. */
  9220. equals(anotherSize)
  9221. {
  9222. return this.width === anotherSize.width && this.height === anotherSize.height;
  9223. }
  9224. /**
  9225. * The area of the rectangle
  9226. * @returns {number}
  9227. */
  9228. area()
  9229. {
  9230. return this.width * this.height;
  9231. }
  9232. }
  9233. ;// CONCATENATED MODULE: ./src/core/speedy-media.js
  9234. /*
  9235. * speedy-vision.js
  9236. * GPU-accelerated Computer Vision for JavaScript
  9237. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9238. *
  9239. * Licensed under the Apache License, Version 2.0 (the "License");
  9240. * you may not use this file except in compliance with the License.
  9241. * You may obtain a copy of the License at
  9242. *
  9243. * http://www.apache.org/licenses/LICENSE-2.0
  9244. *
  9245. * Unless required by applicable law or agreed to in writing, software
  9246. * distributed under the License is distributed on an "AS IS" BASIS,
  9247. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9248. * See the License for the specific language governing permissions and
  9249. * limitations under the License.
  9250. *
  9251. * speedy-media.js
  9252. * SpeedyMedia implementation
  9253. */
  9254. /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  9255. /**
  9256. * @typedef {object} SpeedyMediaOptions
  9257. * @property {ImageFormat} [format] default is RGBA
  9258. */
  9259. /** A helper used to keep the constructor of SpeedyMedia private */
  9260. const speedy_media_PRIVATE_TOKEN = Symbol();
  9261. /**
  9262. * SpeedyMedia encapsulates a media element
  9263. * (e.g., image, video, canvas)
  9264. */
  9265. class SpeedyMedia
  9266. {
  9267. /**
  9268. * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
  9269. * @param {symbol} token
  9270. * @param {SpeedyMediaSource} source
  9271. * @param {SpeedyMediaOptions} [options] options object
  9272. */
  9273. constructor(token, source, options = {})
  9274. {
  9275. // private constructor
  9276. if(token !== speedy_media_PRIVATE_TOKEN)
  9277. throw new utils_errors/* IllegalOperationError */.Er();
  9278. /** @type {SpeedyMediaSource} media source */
  9279. this._source = source;
  9280. /** @type {ImageFormat} format */
  9281. this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
  9282. /** @type {SpeedyMediaOptions} options */
  9283. this._options = Object.freeze({ ...options, format: this._format });
  9284. // validate
  9285. if(!source.isLoaded())
  9286. throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);
  9287. else if(this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY)
  9288. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
  9289. }
  9290. /**
  9291. * Load a media source
  9292. * Will wait until the HTML media source is loaded
  9293. * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
  9294. * @param {SpeedyMediaOptions} [options] options object
  9295. * @param {boolean} [log] show log message?
  9296. * @returns {SpeedyPromise<SpeedyMedia>}
  9297. */
  9298. static load(mediaSource, options = {}, log = true)
  9299. {
  9300. return SpeedyMediaSource.load(mediaSource).then(source => {
  9301. utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
  9302. // FIXME user could pass an invalid format in options if ImageFormat is made public
  9303. const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
  9304. // show log message
  9305. if(log)
  9306. utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
  9307. // done!
  9308. return media;
  9309. });
  9310. }
  9311. /**
  9312. * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
  9313. * @returns {SpeedyMediaSourceNativeElement} the media element
  9314. */
  9315. get source()
  9316. {
  9317. return this._source ? this._source.data : null;
  9318. }
  9319. /**
  9320. * The type of the media attached to this SpeedyMedia object
  9321. * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
  9322. */
  9323. get type()
  9324. {
  9325. if(this.isReleased())
  9326. return 'unknown';
  9327. switch(this._source.type) {
  9328. case types/* MediaType */.zu.Image:
  9329. return 'image';
  9330. case types/* MediaType */.zu.Video:
  9331. return 'video';
  9332. case types/* MediaType */.zu.Canvas:
  9333. return 'canvas';
  9334. case types/* MediaType */.zu.OffscreenCanvas:
  9335. return 'offscreen-canvas';
  9336. case types/* MediaType */.zu.Bitmap:
  9337. return 'bitmap';
  9338. case types/* MediaType */.zu.Data:
  9339. return 'data';
  9340. default: // this shouldn't happen
  9341. return 'unknown';
  9342. }
  9343. }
  9344. /**
  9345. * Gets the width of the media
  9346. * @returns {number} media width
  9347. */
  9348. get width()
  9349. {
  9350. return this._source ? this._source.width : 0;
  9351. }
  9352. /**
  9353. * Gets the height of the media
  9354. * @returns {number} media height
  9355. */
  9356. get height()
  9357. {
  9358. return this._source ? this._source.height : 0;
  9359. }
  9360. /**
  9361. * The size of this media, in pixels
  9362. * @returns {SpeedySize}
  9363. */
  9364. get size()
  9365. {
  9366. return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
  9367. }
  9368. /**
  9369. * Returns a read-only object featuring advanced options
  9370. * related to this SpeedyMedia object
  9371. * @returns {SpeedyMediaOptions}
  9372. */
  9373. get options()
  9374. {
  9375. return this._options;
  9376. }
  9377. /**
  9378. * Releases resources associated with this media
  9379. * @returns {null}
  9380. */
  9381. release()
  9382. {
  9383. if(!this.isReleased()) {
  9384. utils/* Utils */.A.log('Releasing SpeedyMedia object...');
  9385. this._source = this._source.release();
  9386. }
  9387. return null;
  9388. }
  9389. /**
  9390. * Has this media been released?
  9391. * @returns {boolean}
  9392. */
  9393. isReleased()
  9394. {
  9395. return this._source == null;
  9396. }
  9397. /**
  9398. * Clones the SpeedyMedia object
  9399. * @returns {SpeedyPromise<SpeedyMedia>} a clone object
  9400. */
  9401. clone()
  9402. {
  9403. // has the media been released?
  9404. if(this.isReleased())
  9405. throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
  9406. // clone the object
  9407. const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
  9408. // done!
  9409. return speedy_promise/* SpeedyPromise */.i.resolve(clone);
  9410. }
  9411. /**
  9412. * Converts the media to an ImageBitmap
  9413. * @returns {SpeedyPromise<ImageBitmap>}
  9414. */
  9415. toBitmap()
  9416. {
  9417. if(this.isReleased())
  9418. throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');
  9419. else if(!this._source.isLoaded())
  9420. throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');
  9421. else if(this._source.type == types/* MediaType */.zu.Bitmap)
  9422. return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);
  9423. else
  9424. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
  9425. }
  9426. }
  9427. ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
  9428. /*
  9429. * speedy-vision.js
  9430. * GPU-accelerated Computer Vision for JavaScript
  9431. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9432. *
  9433. * Licensed under the Apache License, Version 2.0 (the "License");
  9434. * you may not use this file except in compliance with the License.
  9435. * You may obtain a copy of the License at
  9436. *
  9437. * http://www.apache.org/licenses/LICENSE-2.0
  9438. *
  9439. * Unless required by applicable law or agreed to in writing, software
  9440. * distributed under the License is distributed on an "AS IS" BASIS,
  9441. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9442. * See the License for the specific language governing permissions and
  9443. * limitations under the License.
  9444. *
  9445. * fps-counter.js
  9446. * A FPS counter
  9447. */
  9448. /** @const {number} update interval in milliseconds */
  9449. const UPDATE_INTERVAL = 500;
  9450. /** @type {FPSCounter|null} Singleton */
  9451. let instance = null;
  9452. /**
  9453. * FPS counter
  9454. */
  9455. class FPSCounter
  9456. {
  9457. /**
  9458. * Creates a new FPSCounter
  9459. * @private
  9460. */
  9461. constructor()
  9462. {
  9463. /** @type {number} current FPS rate */
  9464. this._fps = 60;
  9465. /** @type {number} frame counter */
  9466. this._frames = 0;
  9467. /** @type {number} update interval in milliseconds */
  9468. this._updateInterval = UPDATE_INTERVAL;
  9469. /** @type {number} time of the last update */
  9470. this._lastUpdate = performance.now();
  9471. /** @type {function(): void} bound update function */
  9472. this._boundUpdate = this._update.bind(this);
  9473. // this should never happen...
  9474. if(instance !== null)
  9475. throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
  9476. // start FPS counter
  9477. this._boundUpdate();
  9478. }
  9479. /**
  9480. * Gets an instance of the FPS counter.
  9481. * We use lazy loading, i.e., we will not
  9482. * create a FPS counter unless we need to!
  9483. * @returns {FPSCounter}
  9484. */
  9485. static get instance()
  9486. {
  9487. if(instance === null)
  9488. instance = new FPSCounter();
  9489. return instance;
  9490. }
  9491. /**
  9492. * Get the FPS rate
  9493. * @returns {number} frames per second
  9494. */
  9495. get fps()
  9496. {
  9497. return this._fps;
  9498. }
  9499. /**
  9500. * Updates the FPS counter
  9501. */
  9502. _update()
  9503. {
  9504. const now = performance.now();
  9505. const deltaTime = now - this._lastUpdate;
  9506. if(deltaTime >= this._updateInterval) {
  9507. this._fps = Math.round(this._frames / (deltaTime * 0.001));
  9508. this._frames = 0;
  9509. this._lastUpdate = now;
  9510. }
  9511. this._frames++;
  9512. requestAnimationFrame(this._boundUpdate);
  9513. }
  9514. }
  9515. ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
  9516. /*
  9517. * speedy-vision.js
  9518. * GPU-accelerated Computer Vision for JavaScript
  9519. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9520. *
  9521. * Licensed under the Apache License, Version 2.0 (the "License");
  9522. * you may not use this file except in compliance with the License.
  9523. * You may obtain a copy of the License at
  9524. *
  9525. * http://www.apache.org/licenses/LICENSE-2.0
  9526. *
  9527. * Unless required by applicable law or agreed to in writing, software
  9528. * distributed under the License is distributed on an "AS IS" BASIS,
  9529. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9530. * See the License for the specific language governing permissions and
  9531. * limitations under the License.
  9532. *
  9533. * speedy-vector.js
  9534. * Vectors
  9535. */
  9536. /**
  9537. * 2D vector of floating-point numbers
  9538. */
  9539. class SpeedyVector2
  9540. {
  9541. /**
  9542. * Create a 2D vector
  9543. * @param {number} x
  9544. * @param {number} y
  9545. */
  9546. constructor(x, y)
  9547. {
  9548. /** @type {number} x coordinate */
  9549. this._x = +x;
  9550. /** @type {number} y coordinate */
  9551. this._y = +y;
  9552. }
  9553. //
  9554. // ===== METHODS =====
  9555. //
  9556. /**
  9557. * x-coordinate
  9558. * @returns {number}
  9559. */
  9560. get x()
  9561. {
  9562. return this._x;
  9563. }
  9564. /**
  9565. * x-coordinate
  9566. * @param {number} value
  9567. */
  9568. set x(value)
  9569. {
  9570. this._x = +value;
  9571. }
  9572. /**
  9573. * y-coordinate
  9574. * @returns {number}
  9575. */
  9576. get y()
  9577. {
  9578. return this._y;
  9579. }
  9580. /**
  9581. * y-coordinate
  9582. * @param {number} value
  9583. */
  9584. set y(value)
  9585. {
  9586. this._y = +value;
  9587. }
  9588. /**
  9589. * Convert to string
  9590. * @returns {string}
  9591. */
  9592. toString()
  9593. {
  9594. return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  9595. }
  9596. /**
  9597. * Is this vector equal to v?
  9598. * @param {SpeedyVector2} v
  9599. * @returns {boolean}
  9600. */
  9601. equals(v)
  9602. {
  9603. return this.x === v.x && this.y === v.y;
  9604. }
  9605. /**
  9606. * Dot product between this vector and another vector
  9607. * @param {SpeedyVector2} v another vector
  9608. * @returns {number}
  9609. */
  9610. dot(v)
  9611. {
  9612. return this.x * v.x + this.y * v.y;
  9613. }
  9614. /**
  9615. * The distance between this vector and another vector
  9616. * @param {SpeedyVector2} v another vector
  9617. * @returns {number}
  9618. */
  9619. distanceTo(v)
  9620. {
  9621. const dx = this.x - v.x;
  9622. const dy = this.y - v.y;
  9623. return Math.sqrt(dx * dx + dy * dy);
  9624. }
  9625. /**
  9626. * Euclidean norm
  9627. * @returns {number}
  9628. */
  9629. length()
  9630. {
  9631. return Math.sqrt(this.x * this.x + this.y * this.y);
  9632. }
  9633. /**
  9634. * Returns a normalized version of this vector
  9635. * @returns {SpeedyVector2}
  9636. */
  9637. normalized()
  9638. {
  9639. const len = this.length();
  9640. if(len > 0.0)
  9641. return new SpeedyVector2(this.x / len, this.y / len);
  9642. else
  9643. return new SpeedyVector2(0.0, 0.0);
  9644. }
  9645. /**
  9646. * Returns a copy of this vector translated by offset
  9647. * @param {SpeedyVector2} offset
  9648. * @returns {SpeedyVector2}
  9649. */
  9650. plus(offset)
  9651. {
  9652. return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
  9653. }
  9654. /**
  9655. * Returns a copy of this vector translated by -offset
  9656. * @param {SpeedyVector2} offset
  9657. * @returns {SpeedyVector2}
  9658. */
  9659. minus(offset)
  9660. {
  9661. return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
  9662. }
  9663. /**
  9664. * Returns a copy of this vector scaled by a scalar
  9665. * @param {number} scalar
  9666. * @returns {SpeedyVector2}
  9667. */
  9668. times(scalar)
  9669. {
  9670. return new SpeedyVector2(this.x * scalar, this.y * scalar);
  9671. }
  9672. }
  9673. ;// CONCATENATED MODULE: ./src/core/speedy-point.js
  9674. /*
  9675. * speedy-vision.js
  9676. * GPU-accelerated Computer Vision for JavaScript
  9677. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9678. *
  9679. * Licensed under the Apache License, Version 2.0 (the "License");
  9680. * you may not use this file except in compliance with the License.
  9681. * You may obtain a copy of the License at
  9682. *
  9683. * http://www.apache.org/licenses/LICENSE-2.0
  9684. *
  9685. * Unless required by applicable law or agreed to in writing, software
  9686. * distributed under the License is distributed on an "AS IS" BASIS,
  9687. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9688. * See the License for the specific language governing permissions and
  9689. * limitations under the License.
  9690. *
  9691. * speedy-point.js
  9692. * Points in space
  9693. */
  9694. /**
  9695. * 2D point
  9696. */
  9697. class SpeedyPoint2
  9698. {
  9699. /**
  9700. * Create a 2D point
  9701. * @param {number} x
  9702. * @param {number} y
  9703. */
  9704. constructor(x, y)
  9705. {
  9706. /** @type {number} x coordinate */
  9707. this._x = +x;
  9708. /** @type {number} y coordinate */
  9709. this._y = +y;
  9710. }
  9711. //
  9712. // ===== METHODS =====
  9713. //
  9714. /**
  9715. * x-coordinate
  9716. * @returns {number}
  9717. */
  9718. get x()
  9719. {
  9720. return this._x;
  9721. }
  9722. /**
  9723. * x-coordinate
  9724. * @param {number} value
  9725. */
  9726. set x(value)
  9727. {
  9728. this._x = +value;
  9729. }
  9730. /**
  9731. * y-coordinate
  9732. * @returns {number}
  9733. */
  9734. get y()
  9735. {
  9736. return this._y;
  9737. }
  9738. /**
  9739. * y-coordinate
  9740. * @param {number} value
  9741. */
  9742. set y(value)
  9743. {
  9744. this._y = +value;
  9745. }
  9746. /**
  9747. * Convert to string
  9748. * @returns {string}
  9749. */
  9750. toString()
  9751. {
  9752. return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  9753. }
  9754. /**
  9755. * Add a vector to this point
  9756. * @param {SpeedyVector2} v
  9757. * @returns {SpeedyPoint2}
  9758. */
  9759. plus(v)
  9760. {
  9761. return new SpeedyPoint2(this.x + v.x, this.y + v.y);
  9762. }
  9763. /**
  9764. * Subtracts a point p from this point
  9765. * @param {SpeedyPoint2} p
  9766. * @returns {SpeedyVector2}
  9767. */
  9768. minus(p)
  9769. {
  9770. return new SpeedyVector2(this.x - p.x, this.y - p.y);
  9771. }
  9772. /**
  9773. * Is this point equal to p?
  9774. * @param {SpeedyPoint2} p
  9775. * @returns {boolean}
  9776. */
  9777. equals(p)
  9778. {
  9779. return this.x === p.x && this.y === p.y;
  9780. }
  9781. }
  9782. // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
  9783. var speedy_matrix_expr = __nested_webpack_require_320900__(4292);
  9784. // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
  9785. var speedy_matrix_wasm = __nested_webpack_require_320900__(4247);
  9786. // EXTERNAL MODULE: ./src/core/speedy-matrix.js
  9787. var speedy_matrix = __nested_webpack_require_320900__(3286);
  9788. ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
  9789. /*
  9790. * speedy-vision.js
  9791. * GPU-accelerated Computer Vision for JavaScript
  9792. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9793. *
  9794. * Licensed under the Apache License, Version 2.0 (the "License");
  9795. * you may not use this file except in compliance with the License.
  9796. * You may obtain a copy of the License at
  9797. *
  9798. * http://www.apache.org/licenses/LICENSE-2.0
  9799. *
  9800. * Unless required by applicable law or agreed to in writing, software
  9801. * distributed under the License is distributed on an "AS IS" BASIS,
  9802. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9803. * See the License for the specific language governing permissions and
  9804. * limitations under the License.
  9805. *
  9806. * speedy-matrix-factory.js
  9807. * A factory of matrices
  9808. */
  9809. /**
  9810. * Matrix routines
  9811. */
  9812. class SpeedyMatrixFactory extends Function
  9813. {
  9814. /**
  9815. * Constructor
  9816. */
  9817. constructor()
  9818. {
  9819. // This factory can be invoked as a function
  9820. super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
  9821. return this.bind(this);
  9822. }
  9823. /**
  9824. * @private
  9825. *
  9826. * Create a new matrix filled with the specified size and entries
  9827. * @param {number} rows
  9828. * @param {number} [columns]
  9829. * @param {number[]} [entries] in column-major format
  9830. * @returns {SpeedyMatrix}
  9831. */
  9832. _create(rows, columns = rows, entries = [])
  9833. {
  9834. return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
  9835. }
  9836. /**
  9837. * @private
  9838. *
  9839. * Evaluate an expression synchronously and store the result in a new matrix
  9840. * @param {SpeedyMatrixExpr} expr matrix expression
  9841. * @returns {SpeedyMatrix}
  9842. */
  9843. _from(expr)
  9844. {
  9845. return speedy_matrix.SpeedyMatrix.From(expr);
  9846. }
  9847. /**
  9848. * Create a new matrix filled with zeros with the specified size
  9849. * @param {number} rows
  9850. * @param {number} [columns]
  9851. * @returns {SpeedyMatrix}
  9852. */
  9853. Zeros(rows, columns = rows)
  9854. {
  9855. return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
  9856. }
  9857. /**
  9858. * Create a new matrix filled with ones with the specified size
  9859. * @param {number} rows
  9860. * @param {number} [columns]
  9861. * @returns {SpeedyMatrix}
  9862. */
  9863. Ones(rows, columns = rows)
  9864. {
  9865. return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
  9866. }
  9867. /**
  9868. * Create an identity matrix with the specified size
  9869. * @param {number} rows
  9870. * @param {number} [columns]
  9871. * @returns {SpeedyMatrix}
  9872. */
  9873. Eye(rows, columns = rows)
  9874. {
  9875. return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
  9876. }
  9877. /**
  9878. * Returns a promise that resolves immediately if the WebAssembly routines
  9879. * are ready to be used, or as soon as they do become ready
  9880. * @returns {SpeedyPromise<void>}
  9881. */
  9882. ready()
  9883. {
  9884. return speedy_matrix.SpeedyMatrix.ready();
  9885. }
  9886. /**
  9887. * QR decomposition
  9888. * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
  9889. * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
  9890. * @param {SpeedyMatrix} mat is m x n, input
  9891. * @param {object} [options]
  9892. * @param {'reduced'|'full'} [options.mode]
  9893. * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
  9894. */
  9895. qr(Q, R, mat, { mode = 'reduced' } = {})
  9896. {
  9897. const A = mat, m = mat.rows, n = mat.columns;
  9898. // validate shapes & mode
  9899. if(mode == 'reduced') {
  9900. if(Q.rows != m || Q.columns != n || R.rows != n || R.columns != n)
  9901. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
  9902. }
  9903. else if(mode == 'full') {
  9904. if(Q.rows != m || Q.columns != m || R.rows != m || R.columns != n)
  9905. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
  9906. }
  9907. else
  9908. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
  9909. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  9910. // allocate matrices
  9911. const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
  9912. const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
  9913. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  9914. // copy input matrices to WASM memory
  9915. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  9916. // run the WASM routine
  9917. if(mode == 'reduced')
  9918. wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);
  9919. else
  9920. wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
  9921. // copy output matrices from WASM memory
  9922. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
  9923. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
  9924. // deallocate matrices
  9925. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  9926. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
  9927. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
  9928. // done!
  9929. return [Q, R];
  9930. });
  9931. }
  9932. /**
  9933. * Solve a possibly overdetermined system of linear
  9934. * equations Ax = b for x using ordinary least squares
  9935. * @param {SpeedyMatrix} solution n x 1, output
  9936. * @param {SpeedyMatrix} A m x n, m >= n, input
  9937. * @param {SpeedyMatrix} b m x 1, output
  9938. * @param {object} [options]
  9939. * @param {'qr'} [options.method] method of resolution
  9940. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9941. */
  9942. ols(solution, A, b, { method = 'qr' } = {})
  9943. {
  9944. const m = A.rows, n = A.columns;
  9945. const x = solution;
  9946. // validate shapes
  9947. if(m < n || n == 0)
  9948. throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);
  9949. else if(b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1)
  9950. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9951. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  9952. // allocate matrices
  9953. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  9954. const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
  9955. const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
  9956. // copy input matrices to WASM memory
  9957. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  9958. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
  9959. // run the WASM routine
  9960. switch(method) {
  9961. case 'qr':
  9962. wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
  9963. break;
  9964. default:
  9965. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9966. }
  9967. // copy output matrix from WASM memory
  9968. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
  9969. // deallocate matrices
  9970. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
  9971. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
  9972. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  9973. // done!
  9974. return solution;
  9975. });
  9976. }
  9977. /**
  9978. * Solve a system of linear equations Ax = b for x
  9979. * @param {SpeedyMatrix} solution m x 1, output
  9980. * @param {SpeedyMatrix} A m x m, input
  9981. * @param {SpeedyMatrix} b m x 1, output
  9982. * @param {object} [options]
  9983. * @param {'qr'} [options.method] method of resolution
  9984. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9985. */
  9986. solve(solution, A, b, { method = 'qr' } = {})
  9987. {
  9988. const m = A.rows, n = A.columns;
  9989. const x = solution;
  9990. // validate shapes
  9991. if(m != n)
  9992. throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);
  9993. else if(b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1)
  9994. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9995. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  9996. // select method
  9997. switch(method) {
  9998. case 'qr':
  9999. return this.ols(x, A, b, { method });
  10000. /*case 'lu':
  10001. break;*/
  10002. default:
  10003. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  10004. }
  10005. });
  10006. }
  10007. /**
  10008. * Compute a perspective transformation using 4 correspondences of points
  10009. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  10010. * @param {SpeedyMatrix} src 2x4 input points - source coordinates
  10011. * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
  10012. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  10013. */
  10014. perspective(homography, src, dest)
  10015. {
  10016. // validate shapes
  10017. if(src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4)
  10018. throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);
  10019. else if(homography.rows != 3 || homography.columns != 3)
  10020. throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
  10021. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10022. // allocate matrices
  10023. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  10024. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10025. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10026. // copy input matrices to WASM memory
  10027. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10028. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  10029. // run the WASM routine
  10030. wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
  10031. // copy output matrix from WASM memory
  10032. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  10033. // deallocate matrices
  10034. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10035. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10036. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  10037. // done!
  10038. return homography;
  10039. });
  10040. }
  10041. /**
  10042. * Compute a perspective transformation using n >= 4 correspondences of points
  10043. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  10044. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  10045. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  10046. * @param {object} [options]
  10047. * @param {'default'|'pransac'} [options.method] method of computation
  10048. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  10049. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  10050. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  10051. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  10052. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  10053. */
  10054. findHomography(homography, src, dest, {
  10055. method = 'default',
  10056. mask = null,
  10057. reprojectionError = 3,
  10058. numberOfHypotheses = 512,
  10059. bundleSize = 128,
  10060. } = {})
  10061. {
  10062. // validate shapes
  10063. if(src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns)
  10064. throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);
  10065. else if(homography.rows != 3 || homography.columns != 3)
  10066. throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);
  10067. else if(mask != null && (mask.rows != 1 || mask.columns != src.columns))
  10068. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  10069. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10070. // allocate matrices
  10071. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  10072. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10073. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10074. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  10075. // copy input matrices to WASM memory
  10076. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10077. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  10078. // run the WASM routine
  10079. switch(method) {
  10080. case 'pransac':
  10081. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  10082. wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  10083. break;
  10084. case 'default':
  10085. case 'dlt': // obsolete
  10086. wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
  10087. break;
  10088. default:
  10089. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
  10090. }
  10091. // copy output matrices from WASM memory
  10092. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  10093. if(mask != null)
  10094. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  10095. // deallocate matrices
  10096. if(mask != null)
  10097. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  10098. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10099. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10100. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  10101. // done!
  10102. return homography;
  10103. });
  10104. }
  10105. /**
  10106. * Apply a perspective transformation to a set of 2D points
  10107. * @param {SpeedyMatrix} dest 2 x n output matrix
  10108. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  10109. * @param {SpeedyMatrix} transform 3x3 homography matrix
  10110. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  10111. */
  10112. applyPerspectiveTransform(dest, src, transform)
  10113. {
  10114. // validate shapes
  10115. if(src.rows != 2 || dest.rows != 2 || src.columns != dest.columns)
  10116. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  10117. else if(transform.rows != 3 || transform.columns != 3)
  10118. throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
  10119. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10120. // allocate matrices
  10121. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  10122. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10123. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10124. // copy input matrices to WASM memory
  10125. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10126. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  10127. // run the WASM routine
  10128. wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
  10129. // copy output matrix from WASM memory
  10130. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  10131. // deallocate matrices
  10132. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10133. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10134. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  10135. // done!
  10136. return dest;
  10137. });
  10138. }
  10139. /**
  10140. * Compute an affine transform using 3 correspondences of points
  10141. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  10142. * @param {SpeedyMatrix} src 2x3 input points - source coordinates
  10143. * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
  10144. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  10145. */
  10146. affine(transform, src, dest)
  10147. {
  10148. // validate shapes
  10149. if(src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3)
  10150. throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);
  10151. else if(transform.rows != 2 || transform.columns != 3)
  10152. throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
  10153. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10154. // allocate matrices
  10155. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  10156. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10157. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10158. // copy input matrices to WASM memory
  10159. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10160. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  10161. // run the WASM routine
  10162. wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
  10163. // copy output matrix from WASM memory
  10164. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  10165. // deallocate matrices
  10166. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10167. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10168. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  10169. // done!
  10170. return transform;
  10171. });
  10172. }
  10173. /**
  10174. * Compute an affine transformation using n >= 3 correspondences of points
  10175. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  10176. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  10177. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  10178. * @param {object} [options]
  10179. * @param {'default'|'pransac'} [options.method] method of computation
  10180. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  10181. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  10182. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  10183. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  10184. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
  10185. */
  10186. findAffineTransform(transform, src, dest, {
  10187. method = 'default',
  10188. mask = null,
  10189. reprojectionError = 3,
  10190. numberOfHypotheses = 512,
  10191. bundleSize = 128,
  10192. } = {})
  10193. {
  10194. // validate shapes
  10195. if(src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns)
  10196. throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);
  10197. else if(transform.rows != 2 || transform.columns != 3)
  10198. throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);
  10199. else if(mask != null && (mask.rows != 1 || mask.columns != src.columns))
  10200. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  10201. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10202. // allocate matrices
  10203. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  10204. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10205. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10206. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  10207. // copy input matrices to WASM memory
  10208. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10209. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  10210. // run the WASM routine
  10211. switch(method) {
  10212. case 'pransac':
  10213. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  10214. wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  10215. break;
  10216. case 'default':
  10217. wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
  10218. break;
  10219. default:
  10220. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
  10221. }
  10222. // copy output matrices from WASM memory
  10223. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  10224. if(mask != null)
  10225. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  10226. // deallocate matrices
  10227. if(mask != null)
  10228. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  10229. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10230. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10231. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  10232. // done!
  10233. return transform;
  10234. });
  10235. }
  10236. /**
  10237. * Apply an affine transformation to a set of 2D points
  10238. * @param {SpeedyMatrix} dest 2 x n output matrix
  10239. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  10240. * @param {SpeedyMatrix} transform 2x3 affine transform
  10241. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  10242. */
  10243. applyAffineTransform(dest, src, transform)
  10244. {
  10245. // validate shapes
  10246. if(src.rows != 2 || dest.rows != 2 || src.columns != dest.columns)
  10247. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  10248. else if(transform.rows != 2 || transform.columns != 3)
  10249. throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
  10250. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({wasm, memory}) => {
  10251. // allocate matrices
  10252. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  10253. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  10254. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  10255. // copy input matrices to WASM memory
  10256. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  10257. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  10258. // run the WASM routine
  10259. wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
  10260. // copy output matrix from WASM memory
  10261. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  10262. // deallocate matrices
  10263. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  10264. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  10265. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  10266. // done!
  10267. return dest;
  10268. });
  10269. }
  10270. }
  10271. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
  10272. /*
  10273. * speedy-vision.js
  10274. * GPU-accelerated Computer Vision for JavaScript
  10275. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10276. *
  10277. * Licensed under the Apache License, Version 2.0 (the "License");
  10278. * you may not use this file except in compliance with the License.
  10279. * You may obtain a copy of the License at
  10280. *
  10281. * http://www.apache.org/licenses/LICENSE-2.0
  10282. *
  10283. * Unless required by applicable law or agreed to in writing, software
  10284. * distributed under the License is distributed on an "AS IS" BASIS,
  10285. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10286. * See the License for the specific language governing permissions and
  10287. * limitations under the License.
  10288. *
  10289. * pipeline-message.js
  10290. * A message that is shared between nodes of a pipeline
  10291. */
  10292. /**
  10293. * Types of messages
  10294. * @enum {Symbol}
  10295. */
  10296. const SpeedyPipelineMessageType = Object.freeze({
  10297. Nothing: Symbol('Nothing'),
  10298. Image: Symbol('Image'),
  10299. Keypoints: Symbol('Keypoints'),
  10300. Vector2: Symbol('Vector2'),
  10301. LSHTables: Symbol('LSHTables'),
  10302. KeypointMatches: Symbol('KeypointMatches'),
  10303. });
  10304. /**
  10305. * Diagnostic data
  10306. * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
  10307. */
  10308. /**
  10309. * A message that is shared between nodes of a pipeline
  10310. * @abstract
  10311. */
  10312. class SpeedyPipelineMessage
  10313. {
  10314. /**
  10315. * Constructor
  10316. * @param {SpeedyPipelineMessageType} type message type
  10317. */
  10318. constructor(type)
  10319. {
  10320. /** @type {SpeedyPipelineMessageType} message type */
  10321. this._type = type;
  10322. }
  10323. /**
  10324. * Message type
  10325. * @returns {SpeedyPipelineMessageType}
  10326. */
  10327. get type()
  10328. {
  10329. return this._type;
  10330. }
  10331. /**
  10332. * Checks if the type of this message is equal to parameter type
  10333. * @param {SpeedyPipelineMessageType} type
  10334. * @returns {boolean}
  10335. */
  10336. hasType(type)
  10337. {
  10338. return this._type === type;
  10339. }
  10340. /**
  10341. * Is this an empty message?
  10342. * @returns {boolean}
  10343. */
  10344. isEmpty()
  10345. {
  10346. return this.hasType(SpeedyPipelineMessageType.Nothing);
  10347. }
  10348. /**
  10349. * Convert to string
  10350. * @returns {string}
  10351. */
  10352. toString()
  10353. {
  10354. const type = Object.keys(SpeedyPipelineMessageType).find(
  10355. type => SpeedyPipelineMessageType[type] === this.type
  10356. );
  10357. return `message of type ${type}`;
  10358. }
  10359. /**
  10360. * Inspect this message for debugging purposes
  10361. * @param {SpeedyGPU} gpu
  10362. * @returns {SpeedyPipelineMessageDiagnosticData}
  10363. */
  10364. inspect(gpu)
  10365. {
  10366. throw new utils_errors/* AbstractMethodError */.aQ();
  10367. }
  10368. /**
  10369. * Set parameters
  10370. * @abstract
  10371. * @param {...any} args
  10372. * @returns {SpeedyPipelineMessage} this message
  10373. */
  10374. set(...args)
  10375. {
  10376. throw new utils_errors/* AbstractMethodError */.aQ();
  10377. }
  10378. /**
  10379. * Create a message of the specified type
  10380. * @param {SpeedyPipelineMessageType} type
  10381. * @returns {SpeedyPipelineMessage}
  10382. */
  10383. static create(type)
  10384. {
  10385. return createMessage(type);
  10386. }
  10387. }
  10388. /**
  10389. * An empty message carrying nothing
  10390. */
  10391. class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage
  10392. {
  10393. /**
  10394. * Constructor
  10395. */
  10396. constructor()
  10397. {
  10398. super(SpeedyPipelineMessageType.Nothing);
  10399. }
  10400. /**
  10401. * Set parameters
  10402. * @returns {SpeedyPipelineMessage} this message
  10403. */
  10404. set()
  10405. {
  10406. return this;
  10407. }
  10408. /**
  10409. * Inspect this message for debugging purposes
  10410. * @param {SpeedyGPU} gpu
  10411. * @returns {SpeedyPipelineMessageDiagnosticData}
  10412. */
  10413. inspect(gpu)
  10414. {
  10415. return {
  10416. type: this.constructor.name
  10417. };
  10418. }
  10419. }
  10420. /**
  10421. * A message transporting an image
  10422. */
  10423. class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage
  10424. {
  10425. /**
  10426. * Constructor
  10427. */
  10428. constructor()
  10429. {
  10430. super(SpeedyPipelineMessageType.Image);
  10431. /** @type {SpeedyDrawableTexture} the image we carry */
  10432. this._image = null;
  10433. /** @type {ImageFormat} image format */
  10434. this._format = types/* ImageFormat */.f5.RGBA;
  10435. }
  10436. /**
  10437. * Set parameters
  10438. * @param {SpeedyDrawableTexture} image the image we carry
  10439. * @param {ImageFormat} [format] image format
  10440. * @returns {SpeedyPipelineMessage} this message
  10441. */
  10442. set(image, format = types/* ImageFormat */.f5.RGBA)
  10443. {
  10444. // set parameters
  10445. this._image = image;
  10446. this._format = format;
  10447. // done!
  10448. return this;
  10449. }
  10450. /**
  10451. * Inspect this message for debugging purposes
  10452. * @param {SpeedyGPU} gpu
  10453. * @returns {SpeedyPipelineMessageDiagnosticData}
  10454. */
  10455. inspect(gpu)
  10456. {
  10457. const formatName = Object.keys(types/* ImageFormat */.f5).find(
  10458. format => types/* ImageFormat */.f5[format] === this.format
  10459. );
  10460. return {
  10461. type: this.constructor.name,
  10462. format: String(formatName),
  10463. imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
  10464. image: this.image ? '<image data>' /* possibly MBs of data */ : '',
  10465. hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
  10466. };
  10467. }
  10468. /**
  10469. * The image we carry
  10470. * @returns {SpeedyDrawableTexture}
  10471. */
  10472. get image()
  10473. {
  10474. return this._image;
  10475. }
  10476. /**
  10477. * Image format
  10478. * @returns {ImageFormat}
  10479. */
  10480. get format()
  10481. {
  10482. return this._format;
  10483. }
  10484. }
  10485. /**
  10486. * A message transporting keypoints
  10487. */
  10488. class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage
  10489. {
  10490. /**
  10491. * Constructor
  10492. */
  10493. constructor()
  10494. {
  10495. super(SpeedyPipelineMessageType.Keypoints);
  10496. /** @type {SpeedyDrawableTexture} encoded keypoints */
  10497. this._encodedKeypoints = null;
  10498. /** @type {number} descriptor size in bytes */
  10499. this._descriptorSize = 0;
  10500. /** @type {number} extra size in bytes */
  10501. this._extraSize = 0;
  10502. /** @type {number} encoder length */
  10503. this._encoderLength = 1;
  10504. }
  10505. /**
  10506. * Set parameters
  10507. * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
  10508. * @param {number} descriptorSize in bytes
  10509. * @param {number} extraSize in bytes
  10510. * @param {number} encoderLength positive integer
  10511. * @returns {SpeedyPipelineMessage} this message
  10512. */
  10513. set(encodedKeypoints, descriptorSize, extraSize, encoderLength)
  10514. {
  10515. // set parameters
  10516. this._encodedKeypoints = encodedKeypoints;
  10517. this._descriptorSize = descriptorSize | 0;
  10518. this._extraSize = extraSize | 0;
  10519. this._encoderLength = encoderLength | 0;
  10520. // validate
  10521. utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
  10522. utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
  10523. utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
  10524. // done!
  10525. return this;
  10526. }
  10527. /**
  10528. * Inspect this message for debugging purposes
  10529. * @param {SpeedyGPU} gpu
  10530. * @returns {SpeedyPipelineMessageDiagnosticData}
  10531. */
  10532. inspect(gpu)
  10533. {
  10534. return {
  10535. type: this.constructor.name,
  10536. descriptorSize: this.descriptorSize,
  10537. extraSize: this.extraSize,
  10538. encoderLength: this.encoderLength,
  10539. encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
  10540. encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : '',
  10541. };
  10542. }
  10543. /**
  10544. * Encoded keypoints
  10545. * @returns {SpeedyDrawableTexture}
  10546. */
  10547. get encodedKeypoints()
  10548. {
  10549. return this._encodedKeypoints;
  10550. }
  10551. /**
  10552. * Descriptor size, in bytes
  10553. * @returns {number}
  10554. */
  10555. get descriptorSize()
  10556. {
  10557. return this._descriptorSize;
  10558. }
  10559. /**
  10560. * Extra size, in bytes
  10561. * @returns {number}
  10562. */
  10563. get extraSize()
  10564. {
  10565. return this._extraSize;
  10566. }
  10567. /**
  10568. * Encoder length
  10569. * @returns {number}
  10570. */
  10571. get encoderLength()
  10572. {
  10573. return this._encoderLength;
  10574. }
  10575. }
  10576. /*
  10577. * A message transporting a set of 2D vectors
  10578. */
  10579. class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage
  10580. {
  10581. /**
  10582. * Constructor
  10583. */
  10584. constructor()
  10585. {
  10586. super(SpeedyPipelineMessageType.Vector2);
  10587. /** @type {SpeedyDrawableTexture} the set of vectors */
  10588. this._vectors = null;
  10589. }
  10590. /**
  10591. * Set parameters
  10592. * @param {SpeedyDrawableTexture} vectors the set of vectors
  10593. * @returns {SpeedyPipelineMessage} this message
  10594. */
  10595. set(vectors)
  10596. {
  10597. // set parameters
  10598. this._vectors = vectors;
  10599. // done!
  10600. return this;
  10601. }
  10602. /**
  10603. * Inspect this message for debugging purposes
  10604. * @param {SpeedyGPU} gpu
  10605. * @returns {SpeedyPipelineMessageDiagnosticData}
  10606. */
  10607. inspect(gpu)
  10608. {
  10609. return {
  10610. type: this.constructor.name,
  10611. vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
  10612. vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
  10613. };
  10614. }
  10615. /**
  10616. * The set of vectors
  10617. * @returns {SpeedyDrawableTexture}
  10618. */
  10619. get vectors()
  10620. {
  10621. return this._vectors;
  10622. }
  10623. }
  10624. /**
  10625. * A message transporting LSH tables
  10626. */
  10627. class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage
  10628. {
  10629. /**
  10630. * Constructor
  10631. */
  10632. constructor()
  10633. {
  10634. super(SpeedyPipelineMessageType.LSHTables);
  10635. /** @type {SpeedyLSH} LSH data structure */
  10636. this._lsh = null;
  10637. }
  10638. /**
  10639. * Set parameters
  10640. * @param {SpeedyLSH} lsh
  10641. * @returns {SpeedyPipelineMessage} this message
  10642. */
  10643. set(lsh)
  10644. {
  10645. // set parameters
  10646. this._lsh = lsh;
  10647. // done!
  10648. return this;
  10649. }
  10650. /**
  10651. * Inspect this message for debugging purposes
  10652. * @param {SpeedyGPU} gpu
  10653. * @returns {SpeedyPipelineMessageDiagnosticData}
  10654. */
  10655. inspect(gpu)
  10656. {
  10657. return {
  10658. type: this.constructor.name,
  10659. lsh: '<LSH tables>'
  10660. };
  10661. }
  10662. /**
  10663. * LSH data structure
  10664. * @returns {SpeedyLSH}
  10665. */
  10666. get lsh()
  10667. {
  10668. return this._lsh;
  10669. }
  10670. }
  10671. /*
  10672. * A message transporting a set of keypoint matches
  10673. */
  10674. class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage
  10675. {
  10676. /**
  10677. * Constructor
  10678. */
  10679. constructor()
  10680. {
  10681. super(SpeedyPipelineMessageType.KeypointMatches);
  10682. /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
  10683. this._encodedMatches = null;
  10684. /** @type {number} number of matches per keypoint */
  10685. this._matchesPerKeypoint = 1;
  10686. }
  10687. /**
  10688. * Set parameters
  10689. * @param {SpeedyDrawableTexture} encodedMatches
  10690. * @param {number} matchesPerKeypoint
  10691. * @returns {SpeedyPipelineMessage} this message
  10692. */
  10693. set(encodedMatches, matchesPerKeypoint)
  10694. {
  10695. // set parameters
  10696. this._encodedMatches = encodedMatches;
  10697. this._matchesPerKeypoint = matchesPerKeypoint | 0;
  10698. // validate
  10699. utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
  10700. // done!
  10701. return this;
  10702. }
  10703. /**
  10704. * Inspect this message for debugging purposes
  10705. * @param {SpeedyGPU} gpu
  10706. * @returns {SpeedyPipelineMessageDiagnosticData}
  10707. */
  10708. inspect(gpu)
  10709. {
  10710. return {
  10711. type: this.constructor.name,
  10712. matchesPerKeypoint: this.matchesPerKeypoint,
  10713. encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
  10714. encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
  10715. };
  10716. }
  10717. /**
  10718. * The matches
  10719. * @returns {SpeedyDrawableTexture}
  10720. */
  10721. get encodedMatches()
  10722. {
  10723. return this._encodedMatches;
  10724. }
  10725. /**
  10726. * Number of matches per keypoint
  10727. * @returns {number}
  10728. */
  10729. get matchesPerKeypoint()
  10730. {
  10731. return this._matchesPerKeypoint;
  10732. }
  10733. }
  10734. //
  10735. // Utilities
  10736. //
  10737. /** Map message type to message class */
  10738. const MESSAGE_CLASS = Object.freeze({
  10739. [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
  10740. [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
  10741. [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
  10742. [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
  10743. [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
  10744. [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches,
  10745. });
  10746. /**
  10747. * Create a message of the specified type
  10748. * @param {SpeedyPipelineMessageType} type
  10749. * @returns {SpeedyPipelineMessage}
  10750. */
  10751. function createMessage(type)
  10752. {
  10753. //return Reflect.construct(MESSAGE_CLASS[type], []);
  10754. return new MESSAGE_CLASS[
  10755. // error TS2538: Type 'Symbol' cannot be used as an index type.
  10756. // heck, what the hack...
  10757. /** @type {any} */ ( type )
  10758. ];
  10759. }
  10760. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
  10761. /*
  10762. * speedy-vision.js
  10763. * GPU-accelerated Computer Vision for JavaScript
  10764. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10765. *
  10766. * Licensed under the Apache License, Version 2.0 (the "License");
  10767. * you may not use this file except in compliance with the License.
  10768. * You may obtain a copy of the License at
  10769. *
  10770. * http://www.apache.org/licenses/LICENSE-2.0
  10771. *
  10772. * Unless required by applicable law or agreed to in writing, software
  10773. * distributed under the License is distributed on an "AS IS" BASIS,
  10774. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10775. * See the License for the specific language governing permissions and
  10776. * limitations under the License.
  10777. *
  10778. * pipeline-portspec.js
  10779. * Specification (requirements) of a port of a node of a pipeline
  10780. */
  10781. /**
  10782. * A message constraint is a message validation predicate
  10783. * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
  10784. */
  10785. /**
  10786. * A validation predicate that validates all messages
  10787. * @type {SpeedyPipelineMessageConstraint}
  10788. */
  10789. const always = message => true;
  10790. /**
  10791. * Specification (requirements) of a port of a node of a pipeline
  10792. */
  10793. class SpeedyPipelinePortSpec
  10794. {
  10795. /**
  10796. * Constructor
  10797. * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
  10798. * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
  10799. */
  10800. constructor(expectedMessageType, messageConstraint = always)
  10801. {
  10802. /** @type {SpeedyPipelineMessageType} expected message type */
  10803. this._expectedMessageType = expectedMessageType;
  10804. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10805. this._isValidMessage = (typeof messageConstraint === 'function') ? messageConstraint : always;
  10806. // expect a valid type
  10807. utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
  10808. }
  10809. /**
  10810. * Checks if two specs have the same expected type
  10811. * @param {SpeedyPipelinePortSpec} spec
  10812. * @returns {boolean}
  10813. */
  10814. isCompatibleWith(spec)
  10815. {
  10816. return this._expectedMessageType == spec._expectedMessageType;
  10817. }
  10818. /**
  10819. * Is the given message accepted by a port that abides by this specification?
  10820. * @param {SpeedyPipelineMessage} message
  10821. * @returns {boolean}
  10822. */
  10823. accepts(message)
  10824. {
  10825. return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
  10826. }
  10827. /**
  10828. * Convert to string
  10829. * @returns {string}
  10830. */
  10831. toString()
  10832. {
  10833. const type = Object.keys(SpeedyPipelineMessageType).find(
  10834. type => SpeedyPipelineMessageType[type] === this._expectedMessageType
  10835. );
  10836. return `Port expects ${type} satisfying ${this._isValidMessage}`;
  10837. }
  10838. /**
  10839. * Expected message type
  10840. * @returns {SpeedyPipelineMessageType}
  10841. */
  10842. get expectedMessageType()
  10843. {
  10844. return this._expectedMessageType;
  10845. }
  10846. }
  10847. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
  10848. /*
  10849. * speedy-vision.js
  10850. * GPU-accelerated Computer Vision for JavaScript
  10851. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10852. *
  10853. * Licensed under the Apache License, Version 2.0 (the "License");
  10854. * you may not use this file except in compliance with the License.
  10855. * You may obtain a copy of the License at
  10856. *
  10857. * http://www.apache.org/licenses/LICENSE-2.0
  10858. *
  10859. * Unless required by applicable law or agreed to in writing, software
  10860. * distributed under the License is distributed on an "AS IS" BASIS,
  10861. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10862. * See the License for the specific language governing permissions and
  10863. * limitations under the License.
  10864. *
  10865. * pipeline-port.js
  10866. * Port of a node of a pipeline
  10867. */
  10868. // Constants
  10869. const DEFAULT_INPUT_PORT_NAME = 'in';
  10870. const DEFAULT_OUTPUT_PORT_NAME = 'out';
  10871. const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
  10872. const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
  10873. /**
  10874. * Diagnostic data
  10875. * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
  10876. */
  10877. /**
  10878. * Port of a node of a pipeline
  10879. * @abstract
  10880. */
  10881. class SpeedyPipelinePort
  10882. {
  10883. /**
  10884. * Constructor
  10885. * @param {string} name the name of this port
  10886. * @param {SpeedyPipelinePortSpec} spec port specification
  10887. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10888. */
  10889. constructor(name, spec, node)
  10890. {
  10891. /** @type {string} the name of this port */
  10892. this._name = String(name);
  10893. /** @type {SpeedyPipelinePortSpec} the specification of this port */
  10894. this._spec = spec;
  10895. /** @type {SpeedyPipelineNode} the node to which this port belongs */
  10896. this._node = node;
  10897. /** @type {SpeedyPipelineMessage} the message located in this port */
  10898. this._message = EMPTY_MESSAGE;
  10899. // check if we've got an acceptable port name
  10900. utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
  10901. }
  10902. /**
  10903. * The name of this port
  10904. * @returns {string}
  10905. */
  10906. get name()
  10907. {
  10908. return this._name;
  10909. }
  10910. /**
  10911. * The node to which this port belongs
  10912. * @returns {SpeedyPipelineNode}
  10913. */
  10914. get node()
  10915. {
  10916. return this._node;
  10917. }
  10918. /**
  10919. * Connect this port to another
  10920. * @abstract
  10921. * @param {SpeedyPipelinePort} port
  10922. */
  10923. connectTo(port)
  10924. {
  10925. throw new utils_errors/* AbstractMethodError */.aQ();
  10926. }
  10927. /**
  10928. * Is this an input port?
  10929. * @abstract
  10930. * @returns {boolean}
  10931. */
  10932. isInputPort()
  10933. {
  10934. throw new utils_errors/* AbstractMethodError */.aQ();
  10935. }
  10936. /**
  10937. * Is this an output port?
  10938. * @returns {boolean}
  10939. */
  10940. isOutputPort()
  10941. {
  10942. return !this.isInputPort();
  10943. }
  10944. /**
  10945. * Clear the message stored in this port
  10946. */
  10947. clearMessage()
  10948. {
  10949. this._message = EMPTY_MESSAGE;
  10950. }
  10951. /**
  10952. * Is there a valid message located in this port?
  10953. * @returns {boolean}
  10954. */
  10955. hasMessage()
  10956. {
  10957. return !this._message.isEmpty();
  10958. }
  10959. /**
  10960. * Read the message that is in this port
  10961. * @returns {SpeedyPipelineMessage}
  10962. */
  10963. read()
  10964. {
  10965. if(this._message.isEmpty())
  10966. throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
  10967. return this._message;
  10968. }
  10969. /**
  10970. * Write a message to this port
  10971. * @param {SpeedyPipelineMessage} message
  10972. */
  10973. write(message)
  10974. {
  10975. throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
  10976. }
  10977. /**
  10978. * Inspect this port for debugging purposes
  10979. * @param {SpeedyGPU} gpu
  10980. * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
  10981. */
  10982. inspect(gpu)
  10983. {
  10984. return this._message.inspect(gpu);
  10985. }
  10986. /**
  10987. * Default port name
  10988. * @abstract
  10989. * @returns {string}
  10990. */
  10991. static get DEFAULT_NAME()
  10992. {
  10993. throw new utils_errors/* AbstractMethodError */.aQ();
  10994. }
  10995. }
  10996. /**
  10997. * Output port
  10998. */
  10999. class SpeedyPipelineOutputPort extends SpeedyPipelinePort
  11000. {
  11001. /**
  11002. * Constructor
  11003. * @param {string} name the name of this port
  11004. * @param {SpeedyPipelinePortSpec} spec port specification
  11005. * @param {SpeedyPipelineNode} node the node to which this port belongs
  11006. */
  11007. constructor(name, spec, node)
  11008. {
  11009. super(name, spec, node);
  11010. /** @type {SpeedyPipelineMessage} cached message */
  11011. this._cachedMessage = null;
  11012. }
  11013. /**
  11014. * Connect this port to another
  11015. * @param {SpeedyPipelineInputPort} port
  11016. */
  11017. connectTo(port)
  11018. {
  11019. if(!port.isInputPort())
  11020. throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
  11021. port.connectTo(this);
  11022. }
  11023. /**
  11024. * Is this an input port?
  11025. * @returns {boolean}
  11026. */
  11027. isInputPort()
  11028. {
  11029. return false;
  11030. }
  11031. /**
  11032. * Write a message to this port
  11033. * @param {SpeedyPipelineMessage} message
  11034. */
  11035. write(message)
  11036. {
  11037. if(!this._spec.accepts(message))
  11038. throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
  11039. this._message = message;
  11040. }
  11041. /**
  11042. * Write a message to this port using a cached message object
  11043. * @param {...any} args to be passed to SpeedyPipelineMessage.set()
  11044. */
  11045. swrite(...args)
  11046. {
  11047. if(this._cachedMessage == null)
  11048. this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
  11049. this.write(this._cachedMessage.set(...args));
  11050. }
  11051. /**
  11052. * Default port name
  11053. * @returns {string}
  11054. */
  11055. static get DEFAULT_NAME()
  11056. {
  11057. return DEFAULT_OUTPUT_PORT_NAME;
  11058. }
  11059. }
  11060. /**
  11061. * Input port
  11062. */
  11063. class SpeedyPipelineInputPort extends SpeedyPipelinePort
  11064. {
  11065. /**
  11066. * Constructor
  11067. * @param {string} name the name of this port
  11068. * @param {SpeedyPipelinePortSpec} spec port specification
  11069. * @param {SpeedyPipelineNode} node the node to which this port belongs
  11070. */
  11071. constructor(name, spec, node)
  11072. {
  11073. super(name, spec, node);
  11074. /** @type {SpeedyPipelineOutputPort|null} incoming link */
  11075. this._incomingLink = null;
  11076. }
  11077. /**
  11078. * Incoming link
  11079. * @returns {SpeedyPipelineOutputPort|null}
  11080. */
  11081. get incomingLink()
  11082. {
  11083. return this._incomingLink;
  11084. }
  11085. /**
  11086. * Connect this port to another
  11087. * @param {SpeedyPipelineOutputPort} port
  11088. */
  11089. connectTo(port)
  11090. {
  11091. if(!port.isOutputPort())
  11092. throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);
  11093. else if(!this._spec.isCompatibleWith(port._spec))
  11094. throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
  11095. this._incomingLink = port;
  11096. }
  11097. /**
  11098. * Unlink this port
  11099. */
  11100. disconnect()
  11101. {
  11102. this._incomingLink = null;
  11103. }
  11104. /**
  11105. * Is this an input port?
  11106. * @returns {boolean}
  11107. */
  11108. isInputPort()
  11109. {
  11110. return true;
  11111. }
  11112. /**
  11113. * Receive a message using the incoming link
  11114. * @param {string} [nodeName]
  11115. * @returns {SpeedyPipelineMessage}
  11116. */
  11117. pullMessage(nodeName = '')
  11118. {
  11119. const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
  11120. if(this._incomingLink == null)
  11121. throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
  11122. const message = this._incomingLink.read();
  11123. if(!this._spec.accepts(message))
  11124. throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
  11125. return (this._message = message);
  11126. }
  11127. /**
  11128. * Default port name
  11129. * @returns {string}
  11130. */
  11131. static get DEFAULT_NAME()
  11132. {
  11133. return DEFAULT_INPUT_PORT_NAME;
  11134. }
  11135. }
  11136. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
  11137. /*
  11138. * speedy-vision.js
  11139. * GPU-accelerated Computer Vision for JavaScript
  11140. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11141. *
  11142. * Licensed under the Apache License, Version 2.0 (the "License");
  11143. * you may not use this file except in compliance with the License.
  11144. * You may obtain a copy of the License at
  11145. *
  11146. * http://www.apache.org/licenses/LICENSE-2.0
  11147. *
  11148. * Unless required by applicable law or agreed to in writing, software
  11149. * distributed under the License is distributed on an "AS IS" BASIS,
  11150. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11151. * See the License for the specific language governing permissions and
  11152. * limitations under the License.
  11153. *
  11154. * pipeline-portbuilder.js
  11155. * Builder of a port of a node of a pipeline
  11156. */
  11157. /**
  11158. * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
  11159. */
  11160. /**
  11161. * Builder of a port of a node of a pipeline
  11162. */
  11163. class SpeedyPipelinePortBuilder
  11164. {
  11165. /**
  11166. * Constructor
  11167. * @param {typeof SpeedyPipelinePort} portClass input or output?
  11168. * @param {string} portName
  11169. */
  11170. constructor(portClass, portName)
  11171. {
  11172. /** @type {typeof SpeedyPipelinePort} input or output? */
  11173. this._class = portClass;
  11174. /** @type {string} port name */
  11175. this._name = String(portName);
  11176. /** @type {SpeedyPipelineMessageType} accepted message type */
  11177. this._type = SpeedyPipelineMessageType.Nothing;
  11178. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  11179. this._messageConstraint = undefined;
  11180. }
  11181. /**
  11182. * Declare that the new port expects a certain type of message
  11183. * @param {SpeedyPipelineMessageType} type expected type
  11184. * @returns {SpeedyPipelinePortBuilder} this builder
  11185. */
  11186. expects(type)
  11187. {
  11188. utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
  11189. utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
  11190. this._type = type;
  11191. return this;
  11192. }
  11193. /**
  11194. * Declare that the new port expects messages satisfying a constraint
  11195. * @param {SpeedyPipelineMessageConstraint} constraint
  11196. * @returns {SpeedyPipelinePortBuilder} this builder
  11197. */
  11198. satisfying(constraint)
  11199. {
  11200. utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
  11201. utils/* Utils */.A.assert(this._messageConstraint === undefined);
  11202. utils/* Utils */.A.assert(typeof constraint === 'function');
  11203. this._messageConstraint = constraint;
  11204. return this;
  11205. }
  11206. /**
  11207. * Build a port
  11208. * @param {SpeedyPipelineNode} node the node to which the new port will belong
  11209. * @returns {SpeedyPipelinePort}
  11210. */
  11211. build(node)
  11212. {
  11213. const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
  11214. return Reflect.construct(this._class, [this._name, spec, node]);
  11215. }
  11216. }
  11217. /**
  11218. * Creates a builder for an input port
  11219. * @param {string} [portName]
  11220. * @returns {SpeedyPipelinePortBuilder}
  11221. */
  11222. function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME)
  11223. {
  11224. return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
  11225. }
  11226. /**
  11227. * Creates a builder for an output port
  11228. * @param {string} [portName]
  11229. * @returns {SpeedyPipelinePortBuilder}
  11230. */
  11231. function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME)
  11232. {
  11233. return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
  11234. }
  11235. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
  11236. /*
  11237. * speedy-vision.js
  11238. * GPU-accelerated Computer Vision for JavaScript
  11239. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11240. *
  11241. * Licensed under the Apache License, Version 2.0 (the "License");
  11242. * you may not use this file except in compliance with the License.
  11243. * You may obtain a copy of the License at
  11244. *
  11245. * http://www.apache.org/licenses/LICENSE-2.0
  11246. *
  11247. * Unless required by applicable law or agreed to in writing, software
  11248. * distributed under the License is distributed on an "AS IS" BASIS,
  11249. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11250. * See the License for the specific language governing permissions and
  11251. * limitations under the License.
  11252. *
  11253. * pipeline-node.js
  11254. * Node of a pipeline
  11255. */
  11256. /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
  11257. /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
  11258. /** Generate a random name for a node */
  11259. const generateRandomName = () => Math.random().toString(16).substr(2);
  11260. /** Create an empty input port dictionary */
  11261. const createInputPortDictionary = () => /** @type {InputPortDictionary} */ ( Object.create(null) );
  11262. /** Create an empty output port dictionary */
  11263. const createOutputPortDictionary = () => /** @type {OutputPortDictionary} */ ( Object.create(null) );
  11264. /**
  11265. * Map an array of input ports to an InputPortDictionary whose keys are their names
  11266. * @param {SpeedyPipelineInputPort[]} ports
  11267. * @returns {InputPortDictionary}
  11268. */
  11269. function InputPortDictionary(ports)
  11270. {
  11271. return ports.reduce((dict, port) => ((dict[port.name] = port), dict), createInputPortDictionary());
  11272. }
  11273. /**
  11274. * Map an array of output ports to an OutputPortDictionary whose keys are their names
  11275. * @param {SpeedyPipelineOutputPort[]} ports
  11276. * @returns {OutputPortDictionary}
  11277. */
  11278. function OutputPortDictionary(ports)
  11279. {
  11280. return ports.reduce((dict, port) => ((dict[port.name] = port), dict), createOutputPortDictionary());
  11281. }
  11282. /** A flag used for debugging purposes */
  11283. let _texView = false;
  11284. /**
  11285. * Node of a pipeline
  11286. * @abstract
  11287. */
  11288. class SpeedyPipelineNode
  11289. {
  11290. /**
  11291. * Constructor
  11292. * @param {string} [name] the name of this node
  11293. * @param {number} [texCount] number of work textures
  11294. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  11295. */
  11296. constructor(name = generateRandomName(), texCount = 0, portBuilders = [])
  11297. {
  11298. /** @type {string} the name of this node */
  11299. this._name = String(name);
  11300. /** @type {SpeedyDrawableTexture[]} work texture(s) */
  11301. this._tex = (new Array(texCount)).fill(null);
  11302. // build the ports
  11303. const ports = portBuilders.map(builder => builder.build(this));
  11304. const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ ( ports.filter(port => port.isInputPort()) );
  11305. const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ ( ports.filter(port => port.isOutputPort()) );
  11306. /** @type {InputPortDictionary} input ports */
  11307. this._inputPorts = InputPortDictionary(inputPorts);
  11308. /** @type {OutputPortDictionary} output ports */
  11309. this._outputPorts = OutputPortDictionary(outputPorts);
  11310. // validate
  11311. if(this._name.length == 0)
  11312. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);
  11313. else if(portBuilders.length == 0)
  11314. throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
  11315. }
  11316. /**
  11317. * The name of this node
  11318. * @returns {string}
  11319. */
  11320. get name()
  11321. {
  11322. return this._name;
  11323. }
  11324. /**
  11325. * Name and type of this node
  11326. * @returns {string}
  11327. */
  11328. get fullName()
  11329. {
  11330. return `${this.constructor.name}[${this.name}]`;
  11331. }
  11332. /**
  11333. * Find input port by name
  11334. * @param {string} [portName]
  11335. * @returns {SpeedyPipelineInputPort}
  11336. */
  11337. input(portName = SpeedyPipelineInputPort.DEFAULT_NAME)
  11338. {
  11339. if(portName in this._inputPorts)
  11340. return this._inputPorts[portName];
  11341. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
  11342. }
  11343. /**
  11344. * Find output port by name
  11345. * @param {string} [portName]
  11346. * @returns {SpeedyPipelineOutputPort}
  11347. */
  11348. output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME)
  11349. {
  11350. if(portName in this._outputPorts)
  11351. return this._outputPorts[portName];
  11352. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
  11353. }
  11354. /**
  11355. * Get data from the input ports and execute
  11356. * the task that this node is supposed to!
  11357. * @param {SpeedyGPU} gpu
  11358. * @returns {void|SpeedyPromise<void>}
  11359. */
  11360. execute(gpu)
  11361. {
  11362. let portName;
  11363. // clear output ports
  11364. for(portName in this._outputPorts)
  11365. this._outputPorts[portName].clearMessage();
  11366. // let the input ports receive what is due
  11367. for(portName in this._inputPorts)
  11368. this._inputPorts[portName].pullMessage(this.fullName);
  11369. // run the task
  11370. const runTask = this._run(gpu);
  11371. if(typeof runTask === 'undefined')
  11372. return void(this._finishExecution(gpu));
  11373. else
  11374. return runTask.then(() => this._finishExecution(gpu));
  11375. }
  11376. /**
  11377. * Finish the execution of this node;
  11378. * to be called after execute()
  11379. * @param {SpeedyGPU} gpu
  11380. */
  11381. _finishExecution(gpu)
  11382. {
  11383. // ensure that no output ports are empty
  11384. for(const portName in this._outputPorts) {
  11385. utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
  11386. }
  11387. // diagnosticize the node / pipeline
  11388. if(settings/* Settings */.w.logging === 'diagnostic') {
  11389. utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
  11390. // Inspecting the data has performance implications.
  11391. // It is for diagnostic purposes only, not meant to be done in production!
  11392. for(const portName in this._inputPorts)
  11393. utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
  11394. for(const portName in this._outputPorts)
  11395. utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
  11396. }
  11397. }
  11398. /**
  11399. * Run the specific task of this node
  11400. * @abstract
  11401. * @param {SpeedyGPU} gpu
  11402. * @returns {void|SpeedyPromise<void>}
  11403. */
  11404. _run(gpu)
  11405. {
  11406. throw new utils_errors/* AbstractMethodError */.aQ();
  11407. }
  11408. /**
  11409. * Initializes this node
  11410. * @param {SpeedyGPU} gpu
  11411. */
  11412. init(gpu)
  11413. {
  11414. gpu.subscribe(this._allocateWorkTextures, this, gpu);
  11415. this._allocateWorkTextures(gpu);
  11416. }
  11417. /**
  11418. * Releases this node
  11419. * @param {SpeedyGPU} gpu
  11420. */
  11421. release(gpu)
  11422. {
  11423. this._deallocateWorkTextures(gpu);
  11424. gpu.unsubscribe(this._allocateWorkTextures, this);
  11425. }
  11426. /**
  11427. * Clear all ports
  11428. */
  11429. clearPorts()
  11430. {
  11431. let portName;
  11432. for(portName in this._inputPorts)
  11433. this._inputPorts[portName].clearMessage();
  11434. for(portName in this._outputPorts)
  11435. this._outputPorts[portName].clearMessage();
  11436. }
  11437. /**
  11438. * Find all nodes that feed input to this node
  11439. * @returns {SpeedyPipelineNode[]}
  11440. */
  11441. inputNodes()
  11442. {
  11443. const nodes = [];
  11444. for(const portName in this._inputPorts) {
  11445. const port = this._inputPorts[portName];
  11446. if(port.incomingLink != null)
  11447. nodes.push(port.incomingLink.node);
  11448. }
  11449. return nodes;
  11450. }
  11451. /**
  11452. * Is this a source of the pipeline?
  11453. * @returns {boolean}
  11454. */
  11455. isSource()
  11456. {
  11457. return false;
  11458. }
  11459. /**
  11460. * Is this a sink of the pipeline?
  11461. * @returns {boolean}
  11462. */
  11463. isSink()
  11464. {
  11465. return false;
  11466. // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
  11467. //return Object.keys(this._outputPorts).length == 0;
  11468. }
  11469. /**
  11470. * Allocate work texture(s)
  11471. * @param {SpeedyGPU} gpu
  11472. */
  11473. _allocateWorkTextures(gpu)
  11474. {
  11475. for(let j = 0; j < this._tex.length; j++)
  11476. this._tex[j] = gpu.texturePool.allocate();
  11477. }
  11478. /**
  11479. * Deallocate work texture(s)
  11480. * @param {SpeedyGPU} gpu
  11481. */
  11482. _deallocateWorkTextures(gpu)
  11483. {
  11484. for(let j = this._tex.length - 1; j >= 0; j--)
  11485. this._tex[j] = gpu.texturePool.free(this._tex[j]);
  11486. }
  11487. /**
  11488. * Visually inspect a texture for debugging purposes
  11489. * @param {SpeedyGPU} gpu
  11490. * @param {SpeedyDrawableTexture} texture
  11491. */
  11492. _visualize(gpu, texture)
  11493. {
  11494. const canvas = gpu.renderToCanvas(texture);
  11495. if(!_texView) {
  11496. document.body.appendChild(canvas);
  11497. _texView = true;
  11498. }
  11499. }
  11500. }
  11501. /**
  11502. * Source node (a node with no input ports)
  11503. * @abstract
  11504. */
  11505. class SpeedyPipelineSourceNode extends SpeedyPipelineNode
  11506. {
  11507. /**
  11508. * Constructor
  11509. * @param {string} [name] the name of this node
  11510. * @param {number} [texCount] number of work textures
  11511. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  11512. */
  11513. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  11514. {
  11515. super(name, texCount, portBuilders);
  11516. utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
  11517. }
  11518. /**
  11519. * Is this a source of the pipeline?
  11520. * @returns {boolean}
  11521. */
  11522. isSource()
  11523. {
  11524. return true;
  11525. }
  11526. }
  11527. /**
  11528. * Sink node (a node with no output ports)
  11529. * @abstract
  11530. */
  11531. class SpeedyPipelineSinkNode extends SpeedyPipelineNode
  11532. {
  11533. /**
  11534. * Constructor
  11535. * @param {string} [name] the name of this node
  11536. * @param {number} [texCount] number of work textures
  11537. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  11538. */
  11539. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  11540. {
  11541. super(name, texCount, portBuilders);
  11542. utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
  11543. }
  11544. /**
  11545. * Export data from this node to the user
  11546. * @abstract
  11547. * @returns {SpeedyPromise<any>}
  11548. */
  11549. export()
  11550. {
  11551. throw new utils_errors/* AbstractMethodError */.aQ();
  11552. }
  11553. /**
  11554. * Is this a sink of the pipeline?
  11555. * @returns {boolean}
  11556. */
  11557. isSink()
  11558. {
  11559. return true;
  11560. }
  11561. }
  11562. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
  11563. /*
  11564. * speedy-vision.js
  11565. * GPU-accelerated Computer Vision for JavaScript
  11566. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11567. *
  11568. * Licensed under the Apache License, Version 2.0 (the "License");
  11569. * you may not use this file except in compliance with the License.
  11570. * You may obtain a copy of the License at
  11571. *
  11572. * http://www.apache.org/licenses/LICENSE-2.0
  11573. *
  11574. * Unless required by applicable law or agreed to in writing, software
  11575. * distributed under the License is distributed on an "AS IS" BASIS,
  11576. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11577. * See the License for the specific language governing permissions and
  11578. * limitations under the License.
  11579. *
  11580. * speedy-match.js
  11581. * A match between two keypoint descriptors
  11582. */
  11583. // Constants
  11584. const MATCH_NOT_FOUND = -1;
  11585. /**
  11586. * A match between two keypoint descriptors
  11587. */
  11588. class SpeedyKeypointMatch
  11589. {
  11590. /**
  11591. * Constructor
  11592. * @param {number} index index of the stored keypoint, a non-negative integer
  11593. * @param {number} distance a measure of the quality of the match, a non-negative number
  11594. */
  11595. constructor(index, distance)
  11596. {
  11597. const isValid = distance < globals.MATCH_MAX_DISTANCE;
  11598. /** @type {number} index of the stored keypoint */
  11599. this._index = isValid ? (index | 0) : MATCH_NOT_FOUND;
  11600. /** @type {number} a measure of the quality of the match */
  11601. this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
  11602. // done!
  11603. return Object.freeze(this);
  11604. }
  11605. /**
  11606. * The index of the stored keypoint
  11607. * @returns {number}
  11608. */
  11609. get index()
  11610. {
  11611. return this._index;
  11612. }
  11613. /**
  11614. * A measure of the quality of the match (lower values indicate better matches)
  11615. * @returns {number}
  11616. */
  11617. get distance()
  11618. {
  11619. return this._distance;
  11620. }
  11621. /**
  11622. * A string representation of the keypoint match
  11623. * @returns {string}
  11624. */
  11625. toString()
  11626. {
  11627. return `SpeedyKeypointMatch(${this.index},${this.distance})`;
  11628. }
  11629. }
  11630. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
  11631. /*
  11632. * speedy-vision.js
  11633. * GPU-accelerated Computer Vision for JavaScript
  11634. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11635. *
  11636. * Licensed under the Apache License, Version 2.0 (the "License");
  11637. * you may not use this file except in compliance with the License.
  11638. * You may obtain a copy of the License at
  11639. *
  11640. * http://www.apache.org/licenses/LICENSE-2.0
  11641. *
  11642. * Unless required by applicable law or agreed to in writing, software
  11643. * distributed under the License is distributed on an "AS IS" BASIS,
  11644. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11645. * See the License for the specific language governing permissions and
  11646. * limitations under the License.
  11647. *
  11648. * speedy-keypoint.js
  11649. * Keypoint class
  11650. */
  11651. /**
  11652. * Represents a keypoint
  11653. */
  11654. class SpeedyKeypoint
  11655. {
  11656. /**
  11657. * Constructor
  11658. * @param {number} x X position
  11659. * @param {number} y Y position
  11660. * @param {number} [lod] Level-of-detail
  11661. * @param {number} [rotation] Rotation in radians
  11662. * @param {number} [score] Cornerness measure
  11663. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11664. */
  11665. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null)
  11666. {
  11667. /** @type {SpeedyPoint2} keypoint position */
  11668. this._position = new SpeedyPoint2(+x, +y);
  11669. /** @type {number} level of detail */
  11670. this._lod = +lod;
  11671. /** @type {number} rotation in radians */
  11672. this._rotation = +rotation;
  11673. /** @type {number} a cornerness measure */
  11674. this._score = +score;
  11675. /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
  11676. this._descriptor = descriptor;
  11677. }
  11678. /**
  11679. * Converts this keypoint to a descriptive string
  11680. * @returns {string}
  11681. */
  11682. toString()
  11683. {
  11684. return `SpeedyKeypoint(${this.x},${this.y})`;
  11685. }
  11686. /**
  11687. * The position of this keypoint
  11688. * @returns {SpeedyPoint2}
  11689. */
  11690. get position()
  11691. {
  11692. return this._position;
  11693. }
  11694. /**
  11695. * The x-position of this keypoint
  11696. * @returns {number}
  11697. */
  11698. get x()
  11699. {
  11700. return this._position.x;
  11701. }
  11702. /**
  11703. * The x-position of this keypoint
  11704. * @param {number} value
  11705. */
  11706. set x(value)
  11707. {
  11708. this._position.x = +value;
  11709. }
  11710. /**
  11711. * The y-position of this keypoint
  11712. * @returns {number}
  11713. */
  11714. get y()
  11715. {
  11716. return this._position.y;
  11717. }
  11718. /**
  11719. * The y-position of this keypoint
  11720. * @param {number} value
  11721. */
  11722. set y(value)
  11723. {
  11724. this._position.y = +value;
  11725. }
  11726. /**
  11727. * The pyramid level-of-detail from which this keypoint was extracted
  11728. * @returns {number}
  11729. */
  11730. get lod()
  11731. {
  11732. return this._lod;
  11733. }
  11734. /**
  11735. * Scale: 2^lod
  11736. * @returns {number}
  11737. */
  11738. get scale()
  11739. {
  11740. return Math.pow(2, this._lod);
  11741. }
  11742. /**
  11743. * The orientation of the keypoint, in radians
  11744. * @returns {number} Angle in radians
  11745. */
  11746. get rotation()
  11747. {
  11748. return this._rotation;
  11749. }
  11750. /**
  11751. * Score: a cornerness measure
  11752. * @returns {number} Score
  11753. */
  11754. get score()
  11755. {
  11756. return this._score;
  11757. }
  11758. /**
  11759. * Keypoint descriptor
  11760. * @return {SpeedyKeypointDescriptor|null}
  11761. */
  11762. get descriptor()
  11763. {
  11764. return this._descriptor;
  11765. }
  11766. }
  11767. /**
  11768. * Represents a tracked keypoint
  11769. */
  11770. class SpeedyTrackedKeypoint extends SpeedyKeypoint
  11771. {
  11772. /**
  11773. * Constructor
  11774. * @param {number} x X position
  11775. * @param {number} y Y position
  11776. * @param {number} [lod] Level-of-detail
  11777. * @param {number} [rotation] Rotation in radians
  11778. * @param {number} [score] Cornerness measure
  11779. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11780. * @param {SpeedyVector2} [flow] flow vector
  11781. */
  11782. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0,0))
  11783. {
  11784. super(x, y, lod, rotation, score, descriptor);
  11785. /** @type {SpeedyVector2} flow vector */
  11786. this._flow = flow;
  11787. }
  11788. /**
  11789. * Flow vector
  11790. * @returns {SpeedyVector2}
  11791. */
  11792. get flow()
  11793. {
  11794. return this._flow;
  11795. }
  11796. }
  11797. /**
  11798. * Represents a matched keypoint
  11799. */
  11800. class SpeedyMatchedKeypoint extends SpeedyKeypoint
  11801. {
  11802. /**
  11803. * Constructor
  11804. * @param {number} x X position
  11805. * @param {number} y Y position
  11806. * @param {number} [lod] Level-of-detail
  11807. * @param {number} [rotation] Rotation in radians
  11808. * @param {number} [score] Cornerness measure
  11809. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11810. * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
  11811. */
  11812. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = [])
  11813. {
  11814. super(x, y, lod, rotation, score, descriptor);
  11815. /** @type {SpeedyKeypointMatch[]} keypoint matches */
  11816. this._matches = matches;
  11817. }
  11818. /**
  11819. * Keypoint matches
  11820. * @returns {SpeedyKeypointMatch[]}
  11821. */
  11822. get matches()
  11823. {
  11824. return this._matches;
  11825. }
  11826. }
  11827. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
  11828. /*
  11829. * speedy-vision.js
  11830. * GPU-accelerated Computer Vision for JavaScript
  11831. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11832. *
  11833. * Licensed under the Apache License, Version 2.0 (the "License");
  11834. * you may not use this file except in compliance with the License.
  11835. * You may obtain a copy of the License at
  11836. *
  11837. * http://www.apache.org/licenses/LICENSE-2.0
  11838. *
  11839. * Unless required by applicable law or agreed to in writing, software
  11840. * distributed under the License is distributed on an "AS IS" BASIS,
  11841. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11842. * See the License for the specific language governing permissions and
  11843. * limitations under the License.
  11844. *
  11845. * pipeline.js
  11846. * A pipeline is a network of nodes in which data flows to a sink
  11847. */
  11848. /**
  11849. * A dictionary indexed by the names of the sink nodes
  11850. * @typedef {Object<string,any>} SpeedyPipelineOutput
  11851. */
  11852. /** @type {SpeedyGPU} shared GPU programs & textures */
  11853. let gpu = null;
  11854. /** @type {number} gpu reference count */
  11855. let referenceCount = 0;
  11856. /**
  11857. * A pipeline is a network of nodes in which data flows to a sink
  11858. */
  11859. class SpeedyPipeline
  11860. {
  11861. /**
  11862. * Constructor
  11863. */
  11864. constructor()
  11865. {
  11866. /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
  11867. this._nodes = [];
  11868. /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
  11869. this._sequence = [];
  11870. /** @type {boolean} are we running the pipeline at this moment? */
  11871. this._busy = false;
  11872. }
  11873. /**
  11874. * Find a node by its name
  11875. * @template T extends SpeedyPipelineNode
  11876. * @param {string} name
  11877. * @returns {T|null}
  11878. */
  11879. node(name)
  11880. {
  11881. for(let i = 0, n = this._nodes.length; i < n; i++) {
  11882. if(this._nodes[i].name === name)
  11883. return this._nodes[i];
  11884. }
  11885. return null;
  11886. }
  11887. /**
  11888. * Initialize the pipeline
  11889. * @param {...SpeedyPipelineNode} nodes
  11890. * @returns {SpeedyPipeline} this pipeline
  11891. */
  11892. init(...nodes)
  11893. {
  11894. // validate
  11895. if(this._nodes.length > 0)
  11896. throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);
  11897. else if(nodes.length == 0)
  11898. throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
  11899. // create a GPU instance and increase the reference count
  11900. if(0 == referenceCount++) {
  11901. utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
  11902. gpu = new SpeedyGPU();
  11903. }
  11904. // add nodes to the network
  11905. for(let i = 0; i < nodes.length; i++) {
  11906. const node = nodes[i];
  11907. if(!this._nodes.includes(node))
  11908. this._nodes.push(node);
  11909. }
  11910. // generate the sequence of nodes
  11911. this._sequence = SpeedyPipeline._tsort(this._nodes);
  11912. SpeedyPipeline._validateSequence(this._sequence);
  11913. // initialize nodes
  11914. for(let i = 0; i < this._sequence.length; i++)
  11915. this._sequence[i].init(gpu);
  11916. // done!
  11917. return this;
  11918. }
  11919. /**
  11920. * Release the resources associated with this pipeline
  11921. * @returns {null}
  11922. */
  11923. release()
  11924. {
  11925. if(this._nodes.length == 0)
  11926. throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
  11927. // release nodes
  11928. for(let i = this._sequence.length - 1; i >= 0; i--)
  11929. this._sequence[i].release(gpu);
  11930. this._sequence.length = 0;
  11931. this._nodes.length = 0;
  11932. // decrease reference count and release GPU if necessary
  11933. if(0 == --referenceCount)
  11934. gpu = gpu.release();
  11935. // done!
  11936. return null;
  11937. }
  11938. /**
  11939. * Run the pipeline
  11940. * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
  11941. */
  11942. run()
  11943. {
  11944. utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
  11945. // is the pipeline busy?
  11946. if(this._busy) {
  11947. // if so, we need to wait 'til it finishes
  11948. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  11949. setTimeout(() => this.run().then(resolve, reject), 0);
  11950. });
  11951. }
  11952. else {
  11953. // the pipeline is now busy and won't accept concurrent tasks
  11954. // (we allocate textures using a single pool)
  11955. this._busy = true;
  11956. }
  11957. // find the sinks
  11958. const sinks = /** @type {SpeedyPipelineSinkNode[]} */ ( this._sequence.filter(node => node.isSink()) );
  11959. // create output template
  11960. const template = SpeedyPipeline._createOutputTemplate(sinks);
  11961. // diagnostic log
  11962. if(settings/* Settings */.w.logging === 'diagnostic')
  11963. utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
  11964. // run the pipeline
  11965. return SpeedyPipeline._runSequence(this._sequence).then(() =>
  11966. // export results
  11967. speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
  11968. // aggregate results by the names of the sinks
  11969. results.reduce((obj, val, idx) => ((obj[sinks[idx].name] = val), obj), template)
  11970. )
  11971. ).finally(() => {
  11972. // clear all ports
  11973. for(let i = this._sequence.length - 1; i >= 0; i--)
  11974. this._sequence[i].clearPorts();
  11975. // the pipeline is no longer busy
  11976. this._busy = false;
  11977. // diagnostic log
  11978. if(settings/* Settings */.w.logging === 'diagnostic') {
  11979. utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
  11980. Object.keys(template).forEach(entry => {
  11981. utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
  11982. });
  11983. }
  11984. }).turbocharge();
  11985. }
  11986. /**
  11987. * @internal
  11988. *
  11989. * GPU instance
  11990. * @returns {SpeedyGPU}
  11991. */
  11992. get _gpu()
  11993. {
  11994. return gpu;
  11995. }
  11996. /**
  11997. * Execute the tasks of a sequence of nodes
  11998. * @param {SpeedyPipelineNode[]} sequence sequence of nodes
  11999. * @param {number} [i] in [0,n)
  12000. * @param {number} [n] number of nodes
  12001. * @returns {SpeedyPromise<void>}
  12002. */
  12003. static _runSequence(sequence, i = 0, n = sequence.length)
  12004. {
  12005. for(; i < n; i++) {
  12006. const runTask = sequence[i].execute(gpu);
  12007. // this call greatly improves performance when downloading pixel data using PBOs
  12008. gpu.gl.flush();
  12009. if(typeof runTask !== 'undefined')
  12010. return runTask.then(() => SpeedyPipeline._runSequence(sequence, i+1, n));
  12011. }
  12012. return speedy_promise/* SpeedyPromise */.i.resolve();
  12013. }
  12014. /**
  12015. * Topological sorting
  12016. * @param {SpeedyPipelineNode[]} nodes
  12017. * @returns {SpeedyPipelineNode[]}
  12018. */
  12019. static _tsort(nodes)
  12020. {
  12021. /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
  12022. const outlinks = SpeedyPipeline._outlinks(nodes);
  12023. const stack = nodes.map(node => /** @type {StackNode} */ ([ node, false ]) );
  12024. const trash = new Set();
  12025. const sorted = new Array(nodes.length);
  12026. let j = sorted.length;
  12027. while(stack.length > 0) {
  12028. const [ node, done ] = stack.pop();
  12029. if(!done) {
  12030. if(!trash.has(node)) {
  12031. const outnodes = outlinks.get(node);
  12032. trash.add(node);
  12033. stack.push([ node, true ]);
  12034. stack.push(...(outnodes.map(node => /** @type {StackNode} */ ([ node, false ]) )));
  12035. if(outnodes.some(node => trash.has(node) && !sorted.includes(node)))
  12036. throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
  12037. }
  12038. }
  12039. else
  12040. sorted[--j] = node;
  12041. }
  12042. return sorted;
  12043. }
  12044. /**
  12045. * Figure out the outgoing links of all nodes
  12046. * @param {SpeedyPipelineNode[]} nodes
  12047. * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
  12048. */
  12049. static _outlinks(nodes)
  12050. {
  12051. const outlinks = new Map();
  12052. for(let k = 0; k < nodes.length; k++)
  12053. outlinks.set(nodes[k], []);
  12054. for(let i = 0; i < nodes.length; i++) {
  12055. const to = nodes[i];
  12056. const inputs = to.inputNodes();
  12057. for(let j = 0; j < inputs.length; j++) {
  12058. const from = inputs[j];
  12059. const links = outlinks.get(from);
  12060. if(!links)
  12061. throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
  12062. if(!links.includes(to))
  12063. links.push(to);
  12064. }
  12065. }
  12066. return outlinks;
  12067. }
  12068. /**
  12069. * Generate the output template by aggregating the names of the sinks
  12070. * @param {SpeedyPipelineNode[]} [sinks]
  12071. * @returns {SpeedyPipelineOutput}
  12072. */
  12073. static _createOutputTemplate(sinks = [])
  12074. {
  12075. const template = Object.create(null);
  12076. for(let i = sinks.length - 1; i >= 0; i--)
  12077. template[sinks[i].name] = null;
  12078. return template;
  12079. }
  12080. /**
  12081. * Validate a sequence of nodes
  12082. * @param {SpeedyPipelineNode[]} sequence
  12083. */
  12084. static _validateSequence(sequence)
  12085. {
  12086. if(sequence.length == 0)
  12087. throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);
  12088. else if(!sequence[0].isSource())
  12089. throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);
  12090. else if(!sequence.find(node => node.isSink()))
  12091. throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
  12092. }
  12093. }
  12094. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
  12095. /*
  12096. * speedy-vision.js
  12097. * GPU-accelerated Computer Vision for JavaScript
  12098. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12099. *
  12100. * Licensed under the Apache License, Version 2.0 (the "License");
  12101. * you may not use this file except in compliance with the License.
  12102. * You may obtain a copy of the License at
  12103. *
  12104. * http://www.apache.org/licenses/LICENSE-2.0
  12105. *
  12106. * Unless required by applicable law or agreed to in writing, software
  12107. * distributed under the License is distributed on an "AS IS" BASIS,
  12108. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12109. * See the License for the specific language governing permissions and
  12110. * limitations under the License.
  12111. *
  12112. * image-input.js
  12113. * Gets an image into a pipeline
  12114. */
  12115. // Constants
  12116. const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
  12117. /**
  12118. * Gets an image into a pipeline
  12119. */
  12120. class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode
  12121. {
  12122. /**
  12123. * Constructor
  12124. * @param {string} [name] name of the node
  12125. */
  12126. constructor(name = undefined)
  12127. {
  12128. super(name, UPLOAD_BUFFER_SIZE, [
  12129. OutputPort().expects(SpeedyPipelineMessageType.Image)
  12130. ]);
  12131. /** @type {SpeedyMedia|null} source media */
  12132. this._media = null;
  12133. /** @type {number} texture index */
  12134. this._textureIndex = 0;
  12135. }
  12136. /**
  12137. * Source media
  12138. * @returns {SpeedyMedia|null}
  12139. */
  12140. get media()
  12141. {
  12142. return this._media;
  12143. }
  12144. /**
  12145. * Source media
  12146. * @param {SpeedyMedia|null} media
  12147. */
  12148. set media(media)
  12149. {
  12150. if(media !== null && !(media instanceof SpeedyMedia))
  12151. throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
  12152. this._media = media;
  12153. }
  12154. /**
  12155. * Run the specific task of this node
  12156. * @param {SpeedyGPU} gpu
  12157. * @returns {void|SpeedyPromise<void>}
  12158. */
  12159. _run(gpu)
  12160. {
  12161. if(this._media == null)
  12162. throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
  12163. // use round-robin to mitigate WebGL's implicit synchronization
  12164. // and maybe minimize texture upload times
  12165. this._textureIndex = (this._textureIndex + 1) % this._tex.length;
  12166. // upload texture
  12167. const outputTexture = this._tex[this._textureIndex];
  12168. gpu.upload(this._media._source, outputTexture);
  12169. this.output().swrite(outputTexture, this._media._format);
  12170. }
  12171. }
  12172. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
  12173. /*
  12174. * speedy-vision.js
  12175. * GPU-accelerated Computer Vision for JavaScript
  12176. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12177. *
  12178. * Licensed under the Apache License, Version 2.0 (the "License");
  12179. * you may not use this file except in compliance with the License.
  12180. * You may obtain a copy of the License at
  12181. *
  12182. * http://www.apache.org/licenses/LICENSE-2.0
  12183. *
  12184. * Unless required by applicable law or agreed to in writing, software
  12185. * distributed under the License is distributed on an "AS IS" BASIS,
  12186. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12187. * See the License for the specific language governing permissions and
  12188. * limitations under the License.
  12189. *
  12190. * image-output.js
  12191. * Gets an image out of a pipeline
  12192. */
  12193. /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
  12194. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
  12195. const DEFAULT_MEDIA_TYPE = "bitmap";
  12196. /**
  12197. * Gets an image out of a pipeline
  12198. */
  12199. class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode
  12200. {
  12201. /**
  12202. * Constructor
  12203. * @param {string} [name] name of the node
  12204. */
  12205. constructor(name = 'image')
  12206. {
  12207. super(name, 0, [
  12208. InputPort().expects(SpeedyPipelineMessageType.Image)
  12209. ]);
  12210. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
  12211. this._mediaType = DEFAULT_MEDIA_TYPE;
  12212. /** @type {ImageBitmap} output bitmap */
  12213. this._bitmap = null;
  12214. /** @type {ImageData} output pixel data */
  12215. this._data = null;
  12216. /** @type {ImageFormat} output format */
  12217. this._format = types/* ImageFormat */.f5.RGBA;
  12218. /** @type {SpeedyTextureReader} texture reader */
  12219. this._textureReader = new SpeedyTextureReader(1);
  12220. }
  12221. /**
  12222. * The media type that is exported from this node
  12223. * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
  12224. */
  12225. get mediaType()
  12226. {
  12227. return this._mediaType;
  12228. }
  12229. /**
  12230. * The media type that is exported from this node
  12231. * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
  12232. */
  12233. set mediaType(value)
  12234. {
  12235. if(value != 'bitmap' && value != 'data')
  12236. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
  12237. this._mediaType = value;
  12238. }
  12239. /**
  12240. * Initializes this node
  12241. * @param {SpeedyGPU} gpu
  12242. */
  12243. init(gpu)
  12244. {
  12245. super.init(gpu);
  12246. this._textureReader.init(gpu);
  12247. }
  12248. /**
  12249. * Releases this node
  12250. * @param {SpeedyGPU} gpu
  12251. */
  12252. release(gpu)
  12253. {
  12254. this._textureReader.release(gpu);
  12255. super.release(gpu);
  12256. }
  12257. /**
  12258. * Export data from this node to the user
  12259. * @returns {SpeedyPromise<SpeedyMedia>}
  12260. */
  12261. export()
  12262. {
  12263. const bitmapOrData = (this._mediaType != 'data') ? this._bitmap : this._data;
  12264. utils/* Utils */.A.assert(bitmapOrData != null);
  12265. return SpeedyMedia.load(bitmapOrData, { format: this._format }, false);
  12266. }
  12267. /**
  12268. * Run the specific task of this node
  12269. * @param {SpeedyGPU} gpu
  12270. * @returns {void|SpeedyPromise<void>}
  12271. */
  12272. _run(gpu)
  12273. {
  12274. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12275. if(this._mediaType != 'data') {
  12276. /* Create an ImageBitmap (default) */
  12277. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  12278. const canvas = gpu.renderToCanvas(image);
  12279. createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
  12280. this._bitmap = bitmap;
  12281. this._format = format;
  12282. this._data = null;
  12283. resolve();
  12284. });
  12285. });
  12286. }
  12287. else {
  12288. /* Create an ImageData */
  12289. return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
  12290. const dataArray = new Uint8ClampedArray(pixels.buffer);
  12291. this._data = new ImageData(dataArray, image.width, image.height);
  12292. this._format = format;
  12293. this._bitmap = null;
  12294. });
  12295. }
  12296. }
  12297. }
  12298. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
  12299. /*
  12300. * speedy-vision.js
  12301. * GPU-accelerated Computer Vision for JavaScript
  12302. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12303. *
  12304. * Licensed under the Apache License, Version 2.0 (the "License");
  12305. * you may not use this file except in compliance with the License.
  12306. * You may obtain a copy of the License at
  12307. *
  12308. * http://www.apache.org/licenses/LICENSE-2.0
  12309. *
  12310. * Unless required by applicable law or agreed to in writing, software
  12311. * distributed under the License is distributed on an "AS IS" BASIS,
  12312. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12313. * See the License for the specific language governing permissions and
  12314. * limitations under the License.
  12315. *
  12316. * multiplexer.js
  12317. * Image multiplexer
  12318. */
  12319. /** @type {string[]} the names of the input ports indexed by their number */
  12320. const INPUT_PORT = [ 'in0', 'in1' ];
  12321. /**
  12322. * Image multiplexer
  12323. */
  12324. class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode
  12325. {
  12326. /**
  12327. * Constructor
  12328. * @param {string} [name] name of the node
  12329. */
  12330. constructor(name = undefined)
  12331. {
  12332. super(name, 0, [
  12333. ...(INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image))),
  12334. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12335. ]);
  12336. /** @type {number} which port should be linked to the output? */
  12337. this._port = 0;
  12338. }
  12339. /**
  12340. * The number of the port that should be linked to the output
  12341. * @returns {number}
  12342. */
  12343. get port()
  12344. {
  12345. return this._port;
  12346. }
  12347. /**
  12348. * The number of the port that should be linked to the output
  12349. * @param {number} port
  12350. */
  12351. set port(port)
  12352. {
  12353. if(port < 0 || port >= INPUT_PORT.length)
  12354. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  12355. this._port = port | 0;
  12356. }
  12357. /**
  12358. * Run the specific task of this node
  12359. * @param {SpeedyGPU} gpu
  12360. * @returns {void|SpeedyPromise<void>}
  12361. */
  12362. _run(gpu)
  12363. {
  12364. const message = this.input(INPUT_PORT[this._port]).read();
  12365. this.output().write(message);
  12366. }
  12367. }
  12368. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
  12369. /*
  12370. * speedy-vision.js
  12371. * GPU-accelerated Computer Vision for JavaScript
  12372. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12373. *
  12374. * Licensed under the Apache License, Version 2.0 (the "License");
  12375. * you may not use this file except in compliance with the License.
  12376. * You may obtain a copy of the License at
  12377. *
  12378. * http://www.apache.org/licenses/LICENSE-2.0
  12379. *
  12380. * Unless required by applicable law or agreed to in writing, software
  12381. * distributed under the License is distributed on an "AS IS" BASIS,
  12382. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12383. * See the License for the specific language governing permissions and
  12384. * limitations under the License.
  12385. *
  12386. * buffer.js
  12387. * Image Buffer
  12388. */
  12389. /**
  12390. * Image Buffer: a node with memory.
  12391. * At time t, it outputs the image received at time t-1
  12392. */
  12393. class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode
  12394. {
  12395. /**
  12396. * Constructor
  12397. * @param {string} [name] name of the node
  12398. */
  12399. constructor(name = undefined)
  12400. {
  12401. super(name, 2, [
  12402. InputPort().expects(SpeedyPipelineMessageType.Image),
  12403. OutputPort().expects(SpeedyPipelineMessageType.Image)
  12404. ]);
  12405. /** @type {number} current page: 0 or 1 */
  12406. this._pageIndex = 0;
  12407. /** @type {boolean} first run? */
  12408. this._initialized = false;
  12409. /** @type {ImageFormat} previous image format */
  12410. this._previousFormat = types/* ImageFormat */.f5.RGBA;
  12411. /** @type {boolean} frozen buffer? */
  12412. this._frozen = false;
  12413. }
  12414. /**
  12415. * A frozen buffer discards the input, effectively increasing the buffering time
  12416. * @returns {boolean}
  12417. */
  12418. get frozen()
  12419. {
  12420. return this._frozen;
  12421. }
  12422. /**
  12423. * A frozen buffer discards the input, effectively increasing the buffering time
  12424. * @param {boolean} value
  12425. */
  12426. set frozen(value)
  12427. {
  12428. this._frozen = Boolean(value);
  12429. }
  12430. /**
  12431. * Releases this node
  12432. * @param {SpeedyGPU} gpu
  12433. */
  12434. release(gpu)
  12435. {
  12436. this._initialized = false;
  12437. super.release(gpu);
  12438. }
  12439. /**
  12440. * Run the specific task of this node
  12441. * @param {SpeedyGPU} gpu
  12442. * @returns {void|SpeedyPromise<void>}
  12443. */
  12444. _run(gpu)
  12445. {
  12446. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12447. const previousFormat = this._previousFormat;
  12448. const page = this._tex;
  12449. const previousInputTexture = page[1 - this._pageIndex];
  12450. const outputTexture = page[this._pageIndex];
  12451. // can't store pyramids
  12452. if(image.hasMipmaps())
  12453. throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
  12454. // bufferize
  12455. if(!this._frozen || !this._initialized) {
  12456. // store input
  12457. this._previousFormat = format;
  12458. previousInputTexture.resize(image.width, image.height);
  12459. image.copyTo(previousInputTexture);
  12460. // page flipping
  12461. this._pageIndex = 1 - this._pageIndex;
  12462. }
  12463. // first run?
  12464. if(!this._initialized) {
  12465. this._initialized = true;
  12466. this.output().swrite(previousInputTexture, format);
  12467. return;
  12468. }
  12469. // done!
  12470. this.output().swrite(outputTexture, previousFormat);
  12471. }
  12472. }
  12473. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
  12474. /*
  12475. * speedy-vision.js
  12476. * GPU-accelerated Computer Vision for JavaScript
  12477. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12478. *
  12479. * Licensed under the Apache License, Version 2.0 (the "License");
  12480. * you may not use this file except in compliance with the License.
  12481. * You may obtain a copy of the License at
  12482. *
  12483. * http://www.apache.org/licenses/LICENSE-2.0
  12484. *
  12485. * Unless required by applicable law or agreed to in writing, software
  12486. * distributed under the License is distributed on an "AS IS" BASIS,
  12487. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12488. * See the License for the specific language governing permissions and
  12489. * limitations under the License.
  12490. *
  12491. * pyramid.js
  12492. * Generate pyramid
  12493. */
  12494. // Constants
  12495. const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
  12496. const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
  12497. /**
  12498. * Generate pyramid
  12499. */
  12500. class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode
  12501. {
  12502. /**
  12503. * Constructor
  12504. * @param {string} [name] name of the node
  12505. */
  12506. constructor(name = undefined)
  12507. {
  12508. super(name, MAX_TEXTURES + 1, [
  12509. InputPort().expects(SpeedyPipelineMessageType.Image),
  12510. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12511. ]);
  12512. }
  12513. /**
  12514. * Run the specific task of this node
  12515. * @param {SpeedyGPU} gpu
  12516. * @returns {void|SpeedyPromise<void>}
  12517. */
  12518. _run(gpu)
  12519. {
  12520. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12521. const outputTexture = this._tex[0];
  12522. const pyramids = gpu.programs.pyramids;
  12523. let width = image.width, height = image.height;
  12524. // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  12525. const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
  12526. // get work textures
  12527. const mip = new Array(MAX_TEXTURES + 1);
  12528. for(let i = MAX_TEXTURES; i >= 1; i--)
  12529. mip[i-1] = this._tex[i];
  12530. // get a copy of the input image
  12531. mip[0].resize(width, height);
  12532. image.copyTo(mip[0]);
  12533. // generate gaussian pyramid
  12534. const numLevels = Math.min(mipLevels, MAX_LEVELS);
  12535. for(let level = 1; level < numLevels; level++) {
  12536. // use max(1, floor(size / 2^lod)), in accordance to
  12537. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  12538. const halfWidth = Math.max(1, width >>> 1);
  12539. const halfHeight = Math.max(1, height >>> 1);
  12540. // reduce operation
  12541. const tmp = (level - 1) + MAX_LEVELS;
  12542. (pyramids.smoothX.outputs(width, height, mip[tmp]))(mip[level-1]);
  12543. (pyramids.smoothY.outputs(width, height, mip[level-1]))(mip[tmp]);
  12544. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[level-1]);
  12545. /*
  12546. (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
  12547. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
  12548. */
  12549. // flush
  12550. gpu.gl.flush();
  12551. // next level
  12552. width = halfWidth;
  12553. height = halfHeight;
  12554. /*
  12555. // debug: view pyramid
  12556. const view = mip[level-1];
  12557. const canvas = gpu.renderToCanvas(view);
  12558. if(!window._ww) document.body.appendChild(canvas);
  12559. window._ww = 1;
  12560. */
  12561. }
  12562. // copy to output & set mipmap
  12563. outputTexture.resize(image.width, image.height);
  12564. outputTexture.clear();
  12565. image.copyTo(outputTexture);
  12566. outputTexture.generateMipmaps(mip.slice(0, numLevels));
  12567. // done!
  12568. this.output().swrite(outputTexture, format);
  12569. }
  12570. }
  12571. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
  12572. /*
  12573. * speedy-vision.js
  12574. * GPU-accelerated Computer Vision for JavaScript
  12575. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12576. *
  12577. * Licensed under the Apache License, Version 2.0 (the "License");
  12578. * you may not use this file except in compliance with the License.
  12579. * You may obtain a copy of the License at
  12580. *
  12581. * http://www.apache.org/licenses/LICENSE-2.0
  12582. *
  12583. * Unless required by applicable law or agreed to in writing, software
  12584. * distributed under the License is distributed on an "AS IS" BASIS,
  12585. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12586. * See the License for the specific language governing permissions and
  12587. * limitations under the License.
  12588. *
  12589. * mixer.js
  12590. * Image Mixer
  12591. */
  12592. /**
  12593. * Image Mixer
  12594. */
  12595. class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode
  12596. {
  12597. /**
  12598. * Constructor
  12599. * @param {string} [name] name of the node
  12600. */
  12601. constructor(name = undefined)
  12602. {
  12603. super(name, 1, [
  12604. InputPort('in0').expects(SpeedyPipelineMessageType.Image),
  12605. InputPort('in1').expects(SpeedyPipelineMessageType.Image),
  12606. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12607. ]);
  12608. /** @type {number} alpha coefficient (applied to image0) */
  12609. this._alpha = 0.5;
  12610. /** @type {number} beta coefficient (applied to image1) */
  12611. this._beta = 0.5;
  12612. /** @type {number} gamma coefficient (brightness control) */
  12613. this._gamma = 0.0;
  12614. }
  12615. /**
  12616. * Alpha coefficient (applied to image0)
  12617. * @returns {number}
  12618. */
  12619. get alpha()
  12620. {
  12621. return this._alpha;
  12622. }
  12623. /**
  12624. * Alpha coefficient (applied to image0)
  12625. * @param {number} value
  12626. */
  12627. set alpha(value)
  12628. {
  12629. this._alpha = +value;
  12630. }
  12631. /**
  12632. * Beta coefficient (applied to image1)
  12633. * @returns {number}
  12634. */
  12635. get beta()
  12636. {
  12637. return this._beta;
  12638. }
  12639. /**
  12640. * Beta coefficient (applied to image1)
  12641. * @param {number} value
  12642. */
  12643. set beta(value)
  12644. {
  12645. this._beta = +value;
  12646. }
  12647. /**
  12648. * Gamma coefficient (brightness control)
  12649. * @returns {number}
  12650. */
  12651. get gamma()
  12652. {
  12653. return this._gamma;
  12654. }
  12655. /**
  12656. * Gamma coefficient (brightness control)
  12657. * @param {number} value
  12658. */
  12659. set gamma(value)
  12660. {
  12661. this._gamma = +value;
  12662. }
  12663. /**
  12664. * Run the specific task of this node
  12665. * @param {SpeedyGPU} gpu
  12666. * @returns {void|SpeedyPromise<void>}
  12667. */
  12668. _run(gpu)
  12669. {
  12670. const in0 = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('in0').read() );
  12671. const in1 = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('in1').read() );
  12672. const image0 = in0.image, image1 = in1.image;
  12673. const format0 = in0.format, format1 = in1.format;
  12674. const width = Math.max(image0.width, image1.width);
  12675. const height = Math.max(image0.height, image1.height);
  12676. const alpha = this._alpha, beta = this._beta, gamma = this._gamma;
  12677. const outputTexture = this._tex[0];
  12678. if(format0 != format1)
  12679. throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
  12680. gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
  12681. gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
  12682. this.output().swrite(outputTexture, format0);
  12683. }
  12684. }
  12685. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
  12686. /*
  12687. * speedy-vision.js
  12688. * GPU-accelerated Computer Vision for JavaScript
  12689. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12690. *
  12691. * Licensed under the Apache License, Version 2.0 (the "License");
  12692. * you may not use this file except in compliance with the License.
  12693. * You may obtain a copy of the License at
  12694. *
  12695. * http://www.apache.org/licenses/LICENSE-2.0
  12696. *
  12697. * Unless required by applicable law or agreed to in writing, software
  12698. * distributed under the License is distributed on an "AS IS" BASIS,
  12699. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12700. * See the License for the specific language governing permissions and
  12701. * limitations under the License.
  12702. *
  12703. * portal.js
  12704. * Image Portals
  12705. */
  12706. /**
  12707. * A sink of an Image Portal
  12708. * This is not a pipeline sink - it doesn't export any data!
  12709. */
  12710. class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode
  12711. {
  12712. /**
  12713. * Constructor
  12714. * @param {string} [name] name of the node
  12715. */
  12716. constructor(name = undefined)
  12717. {
  12718. super(name, 1, [
  12719. InputPort().expects(SpeedyPipelineMessageType.Image),
  12720. ]);
  12721. /** @type {ImageFormat} stored image format */
  12722. this._format = types/* ImageFormat */.f5.RGBA;
  12723. /** @type {boolean} is this node initialized? */
  12724. this._initialized = false;
  12725. }
  12726. /**
  12727. * Stored image
  12728. * @returns {SpeedyTexture}
  12729. */
  12730. get image()
  12731. {
  12732. if(!this._initialized)
  12733. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  12734. return this._tex[0];
  12735. }
  12736. /**
  12737. * Stored image format
  12738. * @returns {ImageFormat}
  12739. */
  12740. get format()
  12741. {
  12742. if(!this._initialized)
  12743. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  12744. return this._format;
  12745. }
  12746. /**
  12747. * Initializes this node
  12748. * @param {SpeedyGPU} gpu
  12749. */
  12750. init(gpu)
  12751. {
  12752. super.init(gpu);
  12753. this._tex[0].resize(1, 1).clear(); // initial texture
  12754. this._format = types/* ImageFormat */.f5.RGBA;
  12755. this._initialized = true;
  12756. }
  12757. /**
  12758. * Releases this node
  12759. * @param {SpeedyGPU} gpu
  12760. */
  12761. release(gpu)
  12762. {
  12763. this._initialized = false;
  12764. super.release(gpu);
  12765. }
  12766. /**
  12767. * Run the specific task of this node
  12768. * @param {SpeedyGPU} gpu
  12769. * @returns {void|SpeedyPromise<void>}
  12770. */
  12771. _run(gpu)
  12772. {
  12773. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12774. const tex = this._tex[0];
  12775. // can't store pyramids
  12776. if(image.hasMipmaps())
  12777. throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
  12778. // copy input
  12779. this._format = format;
  12780. tex.resize(image.width, image.height);
  12781. image.copyTo(tex);
  12782. }
  12783. }
  12784. /**
  12785. * A source of an Image Portal
  12786. */
  12787. class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode
  12788. {
  12789. /**
  12790. * Constructor
  12791. * @param {string} [name] name of the node
  12792. */
  12793. constructor(name = undefined)
  12794. {
  12795. super(name, 0, [
  12796. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12797. ]);
  12798. /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
  12799. this._source = null;
  12800. }
  12801. /**
  12802. * Data source
  12803. * @returns {SpeedyPipelineNodeImagePortalSink|null}
  12804. */
  12805. get source()
  12806. {
  12807. return this._source;
  12808. }
  12809. /**
  12810. * Data source
  12811. * @param {SpeedyPipelineNodeImagePortalSink|null} node
  12812. */
  12813. set source(node)
  12814. {
  12815. if(node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink))
  12816. throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  12817. this._source = node;
  12818. }
  12819. /**
  12820. * Run the specific task of this node
  12821. * @param {SpeedyGPU} gpu
  12822. * @returns {void|SpeedyPromise<void>}
  12823. */
  12824. _run(gpu)
  12825. {
  12826. if(this._source == null)
  12827. throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  12828. this.output().swrite(this._source.image, this._source.format);
  12829. }
  12830. }
  12831. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
  12832. /*
  12833. * speedy-vision.js
  12834. * GPU-accelerated Computer Vision for JavaScript
  12835. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12836. *
  12837. * Licensed under the Apache License, Version 2.0 (the "License");
  12838. * you may not use this file except in compliance with the License.
  12839. * You may obtain a copy of the License at
  12840. *
  12841. * http://www.apache.org/licenses/LICENSE-2.0
  12842. *
  12843. * Unless required by applicable law or agreed to in writing, software
  12844. * distributed under the License is distributed on an "AS IS" BASIS,
  12845. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12846. * See the License for the specific language governing permissions and
  12847. * limitations under the License.
  12848. *
  12849. * image-factory.js
  12850. * Image-related nodes
  12851. */
  12852. /**
  12853. * Portal nodes
  12854. */
  12855. class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q
  12856. {
  12857. /**
  12858. * Create an image portal source
  12859. * @param {string} [name] name of the node
  12860. * @returns {SpeedyPipelineNodeImagePortalSource}
  12861. */
  12862. static Source(name = undefined)
  12863. {
  12864. return new SpeedyPipelineNodeImagePortalSource(name);
  12865. }
  12866. /**
  12867. * Create an image portal sink
  12868. * @param {string} [name] name of the node
  12869. * @returns {SpeedyPipelineNodeImagePortalSink}
  12870. */
  12871. static Sink(name = undefined)
  12872. {
  12873. return new SpeedyPipelineNodeImagePortalSink(name);
  12874. }
  12875. }
  12876. /**
  12877. * Image nodes
  12878. */
  12879. class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q
  12880. {
  12881. /**
  12882. * Create an image source
  12883. * @param {string} [name] name of the node
  12884. * @returns {SpeedyPipelineNodeImageSource}
  12885. */
  12886. static Source(name = undefined)
  12887. {
  12888. return new SpeedyPipelineNodeImageSource(name);
  12889. }
  12890. /**
  12891. * Create an image sink
  12892. * @param {string} [name] name of the node
  12893. * @returns {SpeedyPipelineNodeImageSink}
  12894. */
  12895. static Sink(name = undefined)
  12896. {
  12897. return new SpeedyPipelineNodeImageSink(name);
  12898. }
  12899. /**
  12900. * Create an image multiplexer
  12901. * @param {string} [name] name of the node
  12902. * @returns {SpeedyPipelineNodeImageMultiplexer}
  12903. */
  12904. static Multiplexer(name = undefined)
  12905. {
  12906. return new SpeedyPipelineNodeImageMultiplexer(name);
  12907. }
  12908. /**
  12909. * Create an image buffer
  12910. * @param {string} [name] name of the node
  12911. * @returns {SpeedyPipelineNodeImageBuffer}
  12912. */
  12913. static Buffer(name = undefined)
  12914. {
  12915. return new SpeedyPipelineNodeImageBuffer(name);
  12916. }
  12917. /**
  12918. * Image Pyramid
  12919. * @param {string} [name] name of the node
  12920. * @returns {SpeedyPipelineNodeImagePyramid}
  12921. */
  12922. static Pyramid(name = undefined)
  12923. {
  12924. return new SpeedyPipelineNodeImagePyramid(name);
  12925. }
  12926. /**
  12927. * Image Mixer (blending)
  12928. * @param {string} [name] name of the node
  12929. * @returns {SpeedyPipelineNodeImageMixer}
  12930. */
  12931. static Mixer(name = undefined)
  12932. {
  12933. return new SpeedyPipelineNodeImageMixer(name);
  12934. }
  12935. /**
  12936. * Image Portals
  12937. * @returns {typeof SpeedyPipelineImagePortalFactory}
  12938. */
  12939. static get Portal()
  12940. {
  12941. return SpeedyPipelineImagePortalFactory;
  12942. }
  12943. }
  12944. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
  12945. /*
  12946. * speedy-vision.js
  12947. * GPU-accelerated Computer Vision for JavaScript
  12948. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12949. *
  12950. * Licensed under the Apache License, Version 2.0 (the "License");
  12951. * you may not use this file except in compliance with the License.
  12952. * You may obtain a copy of the License at
  12953. *
  12954. * http://www.apache.org/licenses/LICENSE-2.0
  12955. *
  12956. * Unless required by applicable law or agreed to in writing, software
  12957. * distributed under the License is distributed on an "AS IS" BASIS,
  12958. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12959. * See the License for the specific language governing permissions and
  12960. * limitations under the License.
  12961. *
  12962. * greyscale.js
  12963. * Convert an image to greyscale
  12964. */
  12965. /**
  12966. * Convert an image to greyscale
  12967. */
  12968. class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode
  12969. {
  12970. /**
  12971. * Constructor
  12972. * @param {string} [name] name of the node
  12973. */
  12974. constructor(name = undefined)
  12975. {
  12976. super(name, 1, [
  12977. InputPort().expects(SpeedyPipelineMessageType.Image),
  12978. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12979. ]);
  12980. }
  12981. /**
  12982. * Run the specific task of this node
  12983. * @param {SpeedyGPU} gpu
  12984. * @returns {void|SpeedyPromise<void>}
  12985. */
  12986. _run(gpu)
  12987. {
  12988. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12989. const width = image.width, height = image.height;
  12990. const outputTexture = this._tex[0];
  12991. const filters = gpu.programs.filters;
  12992. filters.rgb2grey.outputs(width, height, outputTexture);
  12993. filters.rgb2grey(image);
  12994. this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
  12995. }
  12996. }
  12997. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
  12998. /*
  12999. * speedy-vision.js
  13000. * GPU-accelerated Computer Vision for JavaScript
  13001. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13002. *
  13003. * Licensed under the Apache License, Version 2.0 (the "License");
  13004. * you may not use this file except in compliance with the License.
  13005. * You may obtain a copy of the License at
  13006. *
  13007. * http://www.apache.org/licenses/LICENSE-2.0
  13008. *
  13009. * Unless required by applicable law or agreed to in writing, software
  13010. * distributed under the License is distributed on an "AS IS" BASIS,
  13011. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13012. * See the License for the specific language governing permissions and
  13013. * limitations under the License.
  13014. *
  13015. * gaussian-blur.js
  13016. * Gaussian Blur
  13017. */
  13018. /**
  13019. * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
  13020. * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
  13021. * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
  13022. * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
  13023. */
  13024. const DEFAULT_KERNEL = Object.freeze({
  13025. 3: [ 0.27901008925473514, 0.44197982149052983, 0.27901008925473514 ], // 1D convolution (sigma = 1)
  13026. 5: [ 0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021 ], // 1D convolution (separable kernel)
  13027. 7: [ 0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274 ],
  13028. 9: [ 0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988 ],
  13029. 11:[ 0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346 ],
  13030. 13:[ 0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363 ],
  13031. 15:[ 0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383 ],
  13032. //3: [ 0.25, 0.5, 0.25 ],
  13033. //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
  13034. });
  13035. /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
  13036. const DEFAULT_SIGMA = new SpeedyVector2(0,0);
  13037. /** convolution programs (x-axis) */
  13038. const CONVOLUTION_X = Object.freeze({
  13039. 3: 'convolution3x',
  13040. 5: 'convolution5x',
  13041. 7: 'convolution7x',
  13042. 9: 'convolution9x',
  13043. 11: 'convolution11x',
  13044. 13: 'convolution13x',
  13045. 15: 'convolution15x',
  13046. });
  13047. /** convolution programs (y-axis) */
  13048. const CONVOLUTION_Y = Object.freeze({
  13049. 3: 'convolution3y',
  13050. 5: 'convolution5y',
  13051. 7: 'convolution7y',
  13052. 9: 'convolution9y',
  13053. 11: 'convolution11y',
  13054. 13: 'convolution13y',
  13055. 15: 'convolution15y',
  13056. });
  13057. /**
  13058. * @typedef {object} SeparableConvolutionKernel
  13059. * @property {number[]} x
  13060. * @property {number[]} y
  13061. */
  13062. /**
  13063. * Gaussian Blur
  13064. */
  13065. class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode
  13066. {
  13067. /**
  13068. * Constructor
  13069. * @param {string} [name] name of the node
  13070. */
  13071. constructor(name = undefined)
  13072. {
  13073. super(name, 2, [
  13074. InputPort().expects(SpeedyPipelineMessageType.Image),
  13075. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13076. ]);
  13077. /** @type {SpeedySize} size of the kernel */
  13078. this._kernelSize = new SpeedySize(5,5);
  13079. /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
  13080. this._sigma = DEFAULT_SIGMA;
  13081. /** @type {SeparableConvolutionKernel} convolution kernel */
  13082. this._kernel = {
  13083. x: DEFAULT_KERNEL[this._kernelSize.width],
  13084. y: DEFAULT_KERNEL[this._kernelSize.height]
  13085. };
  13086. }
  13087. /**
  13088. * Size of the kernel
  13089. * @returns {SpeedySize}
  13090. */
  13091. get kernelSize()
  13092. {
  13093. return this._kernelSize;
  13094. }
  13095. /**
  13096. * Size of the kernel
  13097. * @param {SpeedySize} kernelSize
  13098. */
  13099. set kernelSize(kernelSize)
  13100. {
  13101. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  13102. const kw = kernelSize.width, kh = kernelSize.height;
  13103. if(kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0)
  13104. throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  13105. this._kernelSize = kernelSize;
  13106. this._updateKernel();
  13107. }
  13108. /**
  13109. * Sigma of the Gaussian kernel
  13110. * @returns {SpeedyVector2}
  13111. */
  13112. get sigma()
  13113. {
  13114. return this._sigma;
  13115. }
  13116. /**
  13117. * Sigma of the Gaussian kernel
  13118. * @param {SpeedyVector2} sigma
  13119. */
  13120. set sigma(sigma)
  13121. {
  13122. utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
  13123. utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
  13124. this._sigma = sigma;
  13125. this._updateKernel();
  13126. }
  13127. /**
  13128. * Run the specific task of this node
  13129. * @param {SpeedyGPU} gpu
  13130. * @returns {void|SpeedyPromise<void>}
  13131. */
  13132. _run(gpu)
  13133. {
  13134. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13135. const width = image.width, height = image.height;
  13136. const kernX = this._kernel.x;
  13137. const kernY = this._kernel.y;
  13138. const convX = CONVOLUTION_X[this._kernelSize.width];
  13139. const convY = CONVOLUTION_Y[this._kernelSize.height];
  13140. const tex = this._tex[0];
  13141. const outputTexture = this._tex[1];
  13142. (gpu.programs.filters[convX]
  13143. .outputs(width, height, tex)
  13144. )(image, kernX);
  13145. (gpu.programs.filters[convY]
  13146. .outputs(width, height, outputTexture)
  13147. )(tex, kernY);
  13148. this.output().swrite(outputTexture, format);
  13149. }
  13150. /**
  13151. * Update the internal kernel to match
  13152. * sigma and kernelSize
  13153. */
  13154. _updateKernel()
  13155. {
  13156. if(this._sigma.x == DEFAULT_SIGMA.x)
  13157. this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];
  13158. else
  13159. this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
  13160. if(this._sigma.y == DEFAULT_SIGMA.y)
  13161. this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];
  13162. else
  13163. this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
  13164. }
  13165. }
  13166. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
  13167. /*
  13168. * speedy-vision.js
  13169. * GPU-accelerated Computer Vision for JavaScript
  13170. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13171. *
  13172. * Licensed under the Apache License, Version 2.0 (the "License");
  13173. * you may not use this file except in compliance with the License.
  13174. * You may obtain a copy of the License at
  13175. *
  13176. * http://www.apache.org/licenses/LICENSE-2.0
  13177. *
  13178. * Unless required by applicable law or agreed to in writing, software
  13179. * distributed under the License is distributed on an "AS IS" BASIS,
  13180. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13181. * See the License for the specific language governing permissions and
  13182. * limitations under the License.
  13183. *
  13184. * simple-blur.js
  13185. * Simple Blur (Box Filter)
  13186. */
  13187. /** 1D convolution filters */
  13188. const BOX_FILTER = Object.freeze({
  13189. 3: (new Array(3)).fill(1/3),
  13190. 5: (new Array(5)).fill(1/5),
  13191. 7: (new Array(7)).fill(1/7),
  13192. 9: (new Array(9)).fill(1/9),
  13193. 11: (new Array(11)).fill(1/11),
  13194. 13: (new Array(13)).fill(1/13),
  13195. 15: (new Array(15)).fill(1/15),
  13196. });
  13197. /** convolution programs (x-axis) */
  13198. const simple_blur_CONVOLUTION_X = Object.freeze({
  13199. 3: 'convolution3x',
  13200. 5: 'convolution5x',
  13201. 7: 'convolution7x',
  13202. 9: 'convolution9x',
  13203. 11: 'convolution11x',
  13204. 13: 'convolution13x',
  13205. 15: 'convolution15x',
  13206. });
  13207. /** convolution programs (y-axis) */
  13208. const simple_blur_CONVOLUTION_Y = Object.freeze({
  13209. 3: 'convolution3y',
  13210. 5: 'convolution5y',
  13211. 7: 'convolution7y',
  13212. 9: 'convolution9y',
  13213. 11: 'convolution11y',
  13214. 13: 'convolution13y',
  13215. 15: 'convolution15y',
  13216. });
  13217. /**
  13218. * @typedef {object} SeparableConvolutionKernel
  13219. * @property {number[]} x
  13220. * @property {number[]} y
  13221. */
  13222. /**
  13223. * Simple Blur (Box Filter)
  13224. */
  13225. class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode
  13226. {
  13227. /**
  13228. * Constructor
  13229. * @param {string} [name] name of the node
  13230. */
  13231. constructor(name = undefined)
  13232. {
  13233. super(name, 2, [
  13234. InputPort().expects(SpeedyPipelineMessageType.Image),
  13235. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13236. ]);
  13237. /** @type {SpeedySize} size of the kernel */
  13238. this._kernelSize = new SpeedySize(5,5);
  13239. /** @type {SeparableConvolutionKernel} convolution kernel */
  13240. this._kernel = {
  13241. x: BOX_FILTER[this._kernelSize.width],
  13242. y: BOX_FILTER[this._kernelSize.height]
  13243. };
  13244. }
  13245. /**
  13246. * Size of the kernel
  13247. * @returns {SpeedySize}
  13248. */
  13249. get kernelSize()
  13250. {
  13251. return this._kernelSize;
  13252. }
  13253. /**
  13254. * Size of the kernel
  13255. * @param {SpeedySize} kernelSize
  13256. */
  13257. set kernelSize(kernelSize)
  13258. {
  13259. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  13260. const kw = kernelSize.width, kh = kernelSize.height;
  13261. if(kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0)
  13262. throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  13263. this._kernelSize = kernelSize;
  13264. this._kernel.x = BOX_FILTER[this._kernelSize.width];
  13265. this._kernel.y = BOX_FILTER[this._kernelSize.height];
  13266. }
  13267. /**
  13268. * Run the specific task of this node
  13269. * @param {SpeedyGPU} gpu
  13270. * @returns {void|SpeedyPromise<void>}
  13271. */
  13272. _run(gpu)
  13273. {
  13274. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13275. const width = image.width, height = image.height;
  13276. const kernX = this._kernel.x;
  13277. const kernY = this._kernel.y;
  13278. const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
  13279. const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
  13280. const tex = this._tex[0];
  13281. const outputTexture = this._tex[1];
  13282. (gpu.programs.filters[convX]
  13283. .outputs(width, height, tex)
  13284. )(image, kernX);
  13285. (gpu.programs.filters[convY]
  13286. .outputs(width, height, outputTexture)
  13287. )(tex, kernY);
  13288. this.output().swrite(outputTexture, format);
  13289. }
  13290. }
  13291. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
  13292. /*
  13293. * speedy-vision.js
  13294. * GPU-accelerated Computer Vision for JavaScript
  13295. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13296. *
  13297. * Licensed under the Apache License, Version 2.0 (the "License");
  13298. * you may not use this file except in compliance with the License.
  13299. * You may obtain a copy of the License at
  13300. *
  13301. * http://www.apache.org/licenses/LICENSE-2.0
  13302. *
  13303. * Unless required by applicable law or agreed to in writing, software
  13304. * distributed under the License is distributed on an "AS IS" BASIS,
  13305. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13306. * See the License for the specific language governing permissions and
  13307. * limitations under the License.
  13308. *
  13309. * median-blur.js
  13310. * Median Blur
  13311. */
  13312. // Median programs
  13313. const MEDIAN = {
  13314. 3: 'median3',
  13315. 5: 'median5',
  13316. 7: 'median7',
  13317. };
  13318. /**
  13319. * Median Blur
  13320. */
  13321. class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode
  13322. {
  13323. /**
  13324. * Constructor
  13325. * @param {string} [name] name of the node
  13326. */
  13327. constructor(name = undefined)
  13328. {
  13329. super(name, 1, [
  13330. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  13331. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  13332. msg.format === types/* ImageFormat */.f5.GREY
  13333. ),
  13334. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13335. ]);
  13336. /** @type {SpeedySize} size of the kernel (assumed to be square) */
  13337. this._kernelSize = new SpeedySize(5,5);
  13338. }
  13339. /**
  13340. * Size of the kernel
  13341. * @returns {SpeedySize}
  13342. */
  13343. get kernelSize()
  13344. {
  13345. return this._kernelSize;
  13346. }
  13347. /**
  13348. * Size of the kernel
  13349. * @param {SpeedySize} kernelSize
  13350. */
  13351. set kernelSize(kernelSize)
  13352. {
  13353. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  13354. const ksize = kernelSize.width;
  13355. if(!(ksize == 3 || ksize == 5 || ksize == 7))
  13356. throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);
  13357. else if(kernelSize.width != kernelSize.height)
  13358. throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
  13359. this._kernelSize = kernelSize;
  13360. }
  13361. /**
  13362. * Run the specific task of this node
  13363. * @param {SpeedyGPU} gpu
  13364. * @returns {void|SpeedyPromise<void>}
  13365. */
  13366. _run(gpu)
  13367. {
  13368. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13369. const width = image.width, height = image.height;
  13370. const ksize = this._kernelSize.width;
  13371. const med = MEDIAN[ksize];
  13372. const outputTexture = this._tex[0];
  13373. (gpu.programs.filters[med]
  13374. .outputs(width, height, outputTexture)
  13375. )(image);
  13376. this.output().swrite(outputTexture, format);
  13377. }
  13378. }
  13379. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
  13380. /*
  13381. * speedy-vision.js
  13382. * GPU-accelerated Computer Vision for JavaScript
  13383. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13384. *
  13385. * Licensed under the Apache License, Version 2.0 (the "License");
  13386. * you may not use this file except in compliance with the License.
  13387. * You may obtain a copy of the License at
  13388. *
  13389. * http://www.apache.org/licenses/LICENSE-2.0
  13390. *
  13391. * Unless required by applicable law or agreed to in writing, software
  13392. * distributed under the License is distributed on an "AS IS" BASIS,
  13393. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13394. * See the License for the specific language governing permissions and
  13395. * limitations under the License.
  13396. *
  13397. * convolution.js
  13398. * Image convolution
  13399. */
  13400. // 2D convolution programs
  13401. const CONVOLUTION = {
  13402. 3: 'convolution3',
  13403. 5: 'convolution5',
  13404. 7: 'convolution7',
  13405. };
  13406. /**
  13407. * Image convolution
  13408. */
  13409. class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode
  13410. {
  13411. /**
  13412. * Constructor
  13413. * @param {string} [name] name of the node
  13414. */
  13415. constructor(name = undefined)
  13416. {
  13417. super(name, 1, [
  13418. InputPort().expects(SpeedyPipelineMessageType.Image),
  13419. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13420. ]);
  13421. /** @type {SpeedyMatrix} convolution kernel (square matrix) */
  13422. this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
  13423. }
  13424. /**
  13425. * Convolution kernel
  13426. * @returns {SpeedyMatrix}
  13427. */
  13428. get kernel()
  13429. {
  13430. return this._kernel;
  13431. }
  13432. /**
  13433. * Convolution kernel
  13434. * @param {SpeedyMatrix} kernel
  13435. */
  13436. set kernel(kernel)
  13437. {
  13438. if(kernel.rows != kernel.columns)
  13439. throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
  13440. else if(!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7))
  13441. throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
  13442. this._kernel = kernel;
  13443. }
  13444. /**
  13445. * Run the specific task of this node
  13446. * @param {SpeedyGPU} gpu
  13447. * @returns {void|SpeedyPromise<void>}
  13448. */
  13449. _run(gpu)
  13450. {
  13451. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13452. const width = image.width, height = image.height;
  13453. const outputTexture = this._tex[0];
  13454. const ksize = this._kernel.rows;
  13455. const conv = CONVOLUTION[ksize];
  13456. const kernel = this._kernel.read();
  13457. (gpu.programs.filters[conv]
  13458. .outputs(width, height, outputTexture)
  13459. )(image, kernel);
  13460. this.output().swrite(outputTexture, format);
  13461. }
  13462. }
  13463. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
  13464. /*
  13465. * speedy-vision.js
  13466. * GPU-accelerated Computer Vision for JavaScript
  13467. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13468. *
  13469. * Licensed under the Apache License, Version 2.0 (the "License");
  13470. * you may not use this file except in compliance with the License.
  13471. * You may obtain a copy of the License at
  13472. *
  13473. * http://www.apache.org/licenses/LICENSE-2.0
  13474. *
  13475. * Unless required by applicable law or agreed to in writing, software
  13476. * distributed under the License is distributed on an "AS IS" BASIS,
  13477. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13478. * See the License for the specific language governing permissions and
  13479. * limitations under the License.
  13480. *
  13481. * nightvision.js
  13482. * Nightvision filter
  13483. */
  13484. /**
  13485. * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
  13486. */
  13487. /**
  13488. * Nightvision filter: "see in the dark"
  13489. */
  13490. class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode
  13491. {
  13492. /**
  13493. * Constructor
  13494. * @param {string} [name] name of the node
  13495. */
  13496. constructor(name = undefined)
  13497. {
  13498. super(name, 3, [
  13499. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  13500. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  13501. msg.format === types/* ImageFormat */.f5.RGBA ||
  13502. msg.format === types/* ImageFormat */.f5.GREY
  13503. ),
  13504. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13505. ]);
  13506. /** @type {number} a value typically in [0,1]: larger number => higher contrast */
  13507. this._gain = 0.5;
  13508. /** @type {number} a value typically in [0,1]: controls brightness */
  13509. this._offset = 0.5;
  13510. /** @type {number} gain decay, a value in [0,1] */
  13511. this._decay = 0.0;
  13512. /** @type {NightvisionQualityLevel} quality level */
  13513. this._quality = 'medium';
  13514. }
  13515. /**
  13516. * Gain, a value typically in [0,1]: larger number => higher contrast
  13517. * @returns {number}
  13518. */
  13519. get gain()
  13520. {
  13521. return this._gain;
  13522. }
  13523. /**
  13524. * Gain, a value typically in [0,1]: larger number => higher contrast
  13525. * @param {number} gain
  13526. */
  13527. set gain(gain)
  13528. {
  13529. this._gain = +gain;
  13530. }
  13531. /**
  13532. * Offset, a value typically in [0,1] that controls the brightness
  13533. * @returns {number}
  13534. */
  13535. get offset()
  13536. {
  13537. return this._offset;
  13538. }
  13539. /**
  13540. * Offset, a value typically in [0,1] that controls the brightness
  13541. * @param {number} offset
  13542. */
  13543. set offset(offset)
  13544. {
  13545. this._offset = +offset;
  13546. }
  13547. /**
  13548. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  13549. * @returns {number}
  13550. */
  13551. get decay()
  13552. {
  13553. return this._decay;
  13554. }
  13555. /**
  13556. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  13557. * @param {number} decay
  13558. */
  13559. set decay(decay)
  13560. {
  13561. this._decay = Math.max(0.0, Math.min(+decay, 1.0));
  13562. }
  13563. /**
  13564. * Quality level of the filter
  13565. * @returns {NightvisionQualityLevel}
  13566. */
  13567. get quality()
  13568. {
  13569. return this._quality;
  13570. }
  13571. /**
  13572. * Quality level of the filter
  13573. * @param {NightvisionQualityLevel} quality
  13574. */
  13575. set quality(quality)
  13576. {
  13577. if(quality === 'high' || quality === 'medium' || quality === 'low')
  13578. this._quality = quality;
  13579. else
  13580. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
  13581. }
  13582. /**
  13583. * Run the specific task of this node
  13584. * @param {SpeedyGPU} gpu
  13585. * @returns {void|SpeedyPromise<void>}
  13586. */
  13587. _run(gpu)
  13588. {
  13589. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13590. const width = image.width, height = image.height;
  13591. const gain = this._gain;
  13592. const offset = this._offset;
  13593. const decay = this._decay;
  13594. const quality = this._quality;
  13595. const filters = gpu.programs.filters;
  13596. const tmp = this._tex[0];
  13597. const illuminationMap = this._tex[1];
  13598. const outputTexture = this._tex[2];
  13599. // compute illumination map
  13600. if(quality == 'medium') {
  13601. filters.illuminationMapX.outputs(width, height, tmp);
  13602. filters.illuminationMapY.outputs(width, height, illuminationMap);
  13603. filters.illuminationMapX(image);
  13604. filters.illuminationMapY(tmp);
  13605. }
  13606. else if(quality == 'high') {
  13607. filters.illuminationMapHiX.outputs(width, height, tmp);
  13608. filters.illuminationMapHiY.outputs(width, height, illuminationMap);
  13609. filters.illuminationMapHiX(image);
  13610. filters.illuminationMapHiY(tmp);
  13611. }
  13612. else if(quality == 'low') {
  13613. filters.illuminationMapLoX.outputs(width, height, tmp);
  13614. filters.illuminationMapLoY.outputs(width, height, illuminationMap);
  13615. filters.illuminationMapLoX(image);
  13616. filters.illuminationMapLoY(tmp);
  13617. }
  13618. // run nightvision
  13619. if(format === types/* ImageFormat */.f5.GREY) {
  13620. filters.nightvisionGreyscale.outputs(width, height, outputTexture);
  13621. filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
  13622. }
  13623. else if(format === types/* ImageFormat */.f5.RGBA) {
  13624. filters.nightvision.outputs(width, height, outputTexture);
  13625. filters.nightvision(image, illuminationMap, gain, offset, decay);
  13626. }
  13627. // done!
  13628. this.output().swrite(outputTexture, format);
  13629. }
  13630. }
  13631. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
  13632. /*
  13633. * speedy-vision.js
  13634. * GPU-accelerated Computer Vision for JavaScript
  13635. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13636. *
  13637. * Licensed under the Apache License, Version 2.0 (the "License");
  13638. * you may not use this file except in compliance with the License.
  13639. * You may obtain a copy of the License at
  13640. *
  13641. * http://www.apache.org/licenses/LICENSE-2.0
  13642. *
  13643. * Unless required by applicable law or agreed to in writing, software
  13644. * distributed under the License is distributed on an "AS IS" BASIS,
  13645. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13646. * See the License for the specific language governing permissions and
  13647. * limitations under the License.
  13648. *
  13649. * normalize.js
  13650. * Normalize image to a range
  13651. */
  13652. /**
  13653. * Normalize image to a range
  13654. */
  13655. class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode
  13656. {
  13657. /**
  13658. * Constructor
  13659. * @param {string} [name] name of the node
  13660. */
  13661. constructor(name = undefined)
  13662. {
  13663. super(name, 4, [
  13664. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  13665. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  13666. msg.format === types/* ImageFormat */.f5.GREY
  13667. ),
  13668. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13669. ]);
  13670. /** @type {number} a value in [0,255] */
  13671. this._minValue = 0;
  13672. /** @type {number} a value in [0,255] */
  13673. this._maxValue = 255;
  13674. }
  13675. /**
  13676. * Minimum intensity in the output image, a value in [0,255]
  13677. * @returns {number}
  13678. */
  13679. get minValue()
  13680. {
  13681. return this._minValue;
  13682. }
  13683. /**
  13684. * Minimum intensity in the output image, a value in [0,255]
  13685. * @param {number} minValue
  13686. */
  13687. set minValue(minValue)
  13688. {
  13689. this._minValue = Math.max(0, Math.min(+minValue, 255));
  13690. }
  13691. /**
  13692. * Maximum intensity in the output image, a value in [0,255]
  13693. * @returns {number}
  13694. */
  13695. get maxValue()
  13696. {
  13697. return this._maxValue;
  13698. }
  13699. /**
  13700. * Maximum intensity in the output image, a value in [0,255]
  13701. * @param {number} maxValue
  13702. */
  13703. set maxValue(maxValue)
  13704. {
  13705. this._maxValue = Math.max(0, Math.min(+maxValue, 255));
  13706. }
  13707. /**
  13708. * Run the specific task of this node
  13709. * @param {SpeedyGPU} gpu
  13710. * @returns {void|SpeedyPromise<void>}
  13711. */
  13712. _run(gpu)
  13713. {
  13714. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13715. const width = image.width, height = image.height;
  13716. const outputTexture = this._tex[3];
  13717. let minValue = this._minValue;
  13718. let maxValue = this._maxValue;
  13719. if(minValue > maxValue)
  13720. minValue = maxValue = (minValue + maxValue) / 2;
  13721. const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
  13722. gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
  13723. gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
  13724. this.output().swrite(outputTexture, format);
  13725. }
  13726. /**
  13727. * Scan a single component in all pixels of the image and find the min & max intensities
  13728. * @param {SpeedyGPU} gpu
  13729. * @param {SpeedyTexture} image input image
  13730. * @param {PixelComponent} pixelComponent a single PixelComponent flag
  13731. * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
  13732. */
  13733. _scanMinMax(gpu, image, pixelComponent)
  13734. {
  13735. const tex = this._tex;
  13736. const program = gpu.programs.utils;
  13737. const width = image.width, height = image.height;
  13738. const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
  13739. utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
  13740. program.copyComponents.outputs(width, height, tex[2]);
  13741. program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
  13742. let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
  13743. for(let i = 0; i < numIterations; i++)
  13744. texture = program.scanMinMax2D(texture, i);
  13745. return texture;
  13746. }
  13747. }
  13748. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
  13749. /*
  13750. * speedy-vision.js
  13751. * GPU-accelerated Computer Vision for JavaScript
  13752. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13753. *
  13754. * Licensed under the Apache License, Version 2.0 (the "License");
  13755. * you may not use this file except in compliance with the License.
  13756. * You may obtain a copy of the License at
  13757. *
  13758. * http://www.apache.org/licenses/LICENSE-2.0
  13759. *
  13760. * Unless required by applicable law or agreed to in writing, software
  13761. * distributed under the License is distributed on an "AS IS" BASIS,
  13762. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13763. * See the License for the specific language governing permissions and
  13764. * limitations under the License.
  13765. *
  13766. * filter-factory.js
  13767. * Image filters
  13768. */
  13769. /**
  13770. * Image filters
  13771. */
  13772. class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q
  13773. {
  13774. /**
  13775. * Convert image to greyscale
  13776. * @param {string} [name]
  13777. * @returns {SpeedyPipelineNodeGreyscale}
  13778. */
  13779. static Greyscale(name = undefined)
  13780. {
  13781. return new SpeedyPipelineNodeGreyscale(name);
  13782. }
  13783. /**
  13784. * Gaussian Blur
  13785. * @param {string} [name]
  13786. * @returns {SpeedyPipelineNodeGaussianBlur}
  13787. */
  13788. static GaussianBlur(name = undefined)
  13789. {
  13790. return new SpeedyPipelineNodeGaussianBlur(name);
  13791. }
  13792. /**
  13793. * Simple Blur (Box Filter)
  13794. * @param {string} [name]
  13795. * @returns {SpeedyPipelineNodeSimpleBlur}
  13796. */
  13797. static SimpleBlur(name = undefined)
  13798. {
  13799. return new SpeedyPipelineNodeSimpleBlur(name);
  13800. }
  13801. /**
  13802. * Median Blur
  13803. * @param {string} [name]
  13804. * @returns {SpeedyPipelineNodeMedianBlur}
  13805. */
  13806. static MedianBlur(name = undefined)
  13807. {
  13808. return new SpeedyPipelineNodeMedianBlur(name);
  13809. }
  13810. /**
  13811. * Image Convolution
  13812. * @param {string} [name]
  13813. * @returns {SpeedyPipelineNodeConvolution}
  13814. */
  13815. static Convolution(name = undefined)
  13816. {
  13817. return new SpeedyPipelineNodeConvolution(name);
  13818. }
  13819. /**
  13820. * Nightvision
  13821. * @param {string} [name]
  13822. * @returns {SpeedyPipelineNodeNightvision}
  13823. */
  13824. static Nightvision(name = undefined)
  13825. {
  13826. return new SpeedyPipelineNodeNightvision(name);
  13827. }
  13828. /**
  13829. * Normalize image
  13830. * @param {string} [name]
  13831. * @returns {SpeedyPipelineNodeNormalize}
  13832. */
  13833. static Normalize(name = undefined)
  13834. {
  13835. return new SpeedyPipelineNodeNormalize(name);
  13836. }
  13837. }
  13838. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
  13839. /*
  13840. * speedy-vision.js
  13841. * GPU-accelerated Computer Vision for JavaScript
  13842. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13843. *
  13844. * Licensed under the Apache License, Version 2.0 (the "License");
  13845. * you may not use this file except in compliance with the License.
  13846. * You may obtain a copy of the License at
  13847. *
  13848. * http://www.apache.org/licenses/LICENSE-2.0
  13849. *
  13850. * Unless required by applicable law or agreed to in writing, software
  13851. * distributed under the License is distributed on an "AS IS" BASIS,
  13852. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13853. * See the License for the specific language governing permissions and
  13854. * limitations under the License.
  13855. *
  13856. * perspective-warp.js
  13857. * Warp an image using a perspective transformation
  13858. */
  13859. // Used when an invalid matrix is provided
  13860. const SINGULAR_MATRIX = [0,0,0,0,0,0,0,0,1];
  13861. /**
  13862. * Warp an image using a perspective transformation
  13863. */
  13864. class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode
  13865. {
  13866. /**
  13867. * Constructor
  13868. * @param {string} [name] name of the node
  13869. */
  13870. constructor(name = undefined)
  13871. {
  13872. super(name, 1, [
  13873. InputPort().expects(SpeedyPipelineMessageType.Image),
  13874. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13875. ]);
  13876. /** @type {SpeedyMatrix} perspective transformation */
  13877. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  13878. }
  13879. /**
  13880. * Perspective transform, a 3x3 homography matrix
  13881. * @returns {SpeedyMatrix}
  13882. */
  13883. get transform()
  13884. {
  13885. return this._transform;
  13886. }
  13887. /**
  13888. * Perspective transform, a 3x3 homography matrix
  13889. * @param {SpeedyMatrix} transform
  13890. */
  13891. set transform(transform)
  13892. {
  13893. if(!(transform.rows == 3 && transform.columns == 3))
  13894. throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  13895. this._transform = transform;
  13896. }
  13897. /**
  13898. * Run the specific task of this node
  13899. * @param {SpeedyGPU} gpu
  13900. * @returns {void|SpeedyPromise<void>}
  13901. */
  13902. _run(gpu)
  13903. {
  13904. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13905. const width = image.width, height = image.height;
  13906. const outputTexture = this._tex[0];
  13907. const homography = this._transform.read();
  13908. const inverseHomography = this._inverse3(homography);
  13909. const isValidHomography = !Number.isNaN(inverseHomography[0]);
  13910. gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
  13911. gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
  13912. this.output().swrite(outputTexture, format);
  13913. }
  13914. /**
  13915. * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
  13916. * @param {number[]} mat 3x3 matrix in column-major format
  13917. * @param {number} [eps] epsilon
  13918. * @returns {number[]} 3x3 inverse matrix in column-major format
  13919. */
  13920. _inverse3(mat, eps = 1e-6)
  13921. {
  13922. // read the entries of the matrix
  13923. const a11 = mat[0];
  13924. const a21 = mat[1];
  13925. const a31 = mat[2];
  13926. const a12 = mat[3];
  13927. const a22 = mat[4];
  13928. const a32 = mat[5];
  13929. const a13 = mat[6];
  13930. const a23 = mat[7];
  13931. const a33 = mat[8];
  13932. // compute cofactors
  13933. const b1 = a33 * a22 - a32 * a23; // b11
  13934. const b2 = a33 * a12 - a32 * a13; // b21
  13935. const b3 = a23 * a12 - a22 * a13; // b31
  13936. // compute the determinant
  13937. const det = a11 * b1 - a21 * b2 + a31 * b3;
  13938. // set up the inverse
  13939. if(!(Math.abs(det) < eps)) {
  13940. const d = 1.0 / det;
  13941. mat[0] = b1 * d;
  13942. mat[1] = -(a33 * a21 - a31 * a23) * d;
  13943. mat[2] = (a32 * a21 - a31 * a22) * d;
  13944. mat[3] = -b2 * d;
  13945. mat[4] = (a33 * a11 - a31 * a13) * d;
  13946. mat[5] = -(a32 * a11 - a31 * a12) * d;
  13947. mat[6] = b3 * d;
  13948. mat[7] = -(a23 * a11 - a21 * a13) * d;
  13949. mat[8] = (a22 * a11 - a21 * a12) * d;
  13950. }
  13951. else
  13952. mat.fill(Number.NaN, 0, 9);
  13953. // done!
  13954. return mat;
  13955. }
  13956. }
  13957. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
  13958. /*
  13959. * speedy-vision.js
  13960. * GPU-accelerated Computer Vision for JavaScript
  13961. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13962. *
  13963. * Licensed under the Apache License, Version 2.0 (the "License");
  13964. * you may not use this file except in compliance with the License.
  13965. * You may obtain a copy of the License at
  13966. *
  13967. * http://www.apache.org/licenses/LICENSE-2.0
  13968. *
  13969. * Unless required by applicable law or agreed to in writing, software
  13970. * distributed under the License is distributed on an "AS IS" BASIS,
  13971. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13972. * See the License for the specific language governing permissions and
  13973. * limitations under the License.
  13974. *
  13975. * resize.js
  13976. * Resize image
  13977. */
  13978. /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
  13979. /**
  13980. * Resize image
  13981. */
  13982. class SpeedyPipelineNodeResize extends SpeedyPipelineNode
  13983. {
  13984. /**
  13985. * Constructor
  13986. * @param {string} [name] name of the node
  13987. */
  13988. constructor(name = undefined)
  13989. {
  13990. super(name, 1, [
  13991. InputPort().expects(SpeedyPipelineMessageType.Image),
  13992. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13993. ]);
  13994. /** @type {SpeedySize} size of the output image, in pixels */
  13995. this._size = new SpeedySize(0, 0);
  13996. /** @type {SpeedyVector2} size of the output relative to the size of the input */
  13997. this._scale = new SpeedyVector2(1, 1);
  13998. /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
  13999. this._method = 'bilinear';
  14000. }
  14001. /**
  14002. * Size of the output image, in pixels (use 0 to use scale)
  14003. * @returns {SpeedySize}
  14004. */
  14005. get size()
  14006. {
  14007. return this._size;
  14008. }
  14009. /**
  14010. * Size of the output image, in pixels (use 0 to use scale)
  14011. * @param {SpeedySize} size
  14012. */
  14013. set size(size)
  14014. {
  14015. this._size = size;
  14016. }
  14017. /**
  14018. * Size of the output image relative to the size of the input image
  14019. * @returns {SpeedyVector2}
  14020. */
  14021. get scale()
  14022. {
  14023. return this._scale;
  14024. }
  14025. /**
  14026. * Size of the output image relative to the size of the input image
  14027. * @param {SpeedyVector2} scale
  14028. */
  14029. set scale(scale)
  14030. {
  14031. this._scale = scale;
  14032. }
  14033. /**
  14034. * Interpolation method
  14035. * @returns {SpeedyPipelineNodeResizeMethod}
  14036. */
  14037. get method()
  14038. {
  14039. return this._method;
  14040. }
  14041. /**
  14042. * Interpolation method
  14043. * @param {SpeedyPipelineNodeResizeMethod} method
  14044. */
  14045. set method(method)
  14046. {
  14047. if(method !== 'nearest' && method !== 'bilinear')
  14048. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
  14049. this._method = method;
  14050. }
  14051. /**
  14052. * Run the specific task of this node
  14053. * @param {SpeedyGPU} gpu
  14054. * @returns {void|SpeedyPromise<void>}
  14055. */
  14056. _run(gpu)
  14057. {
  14058. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  14059. const width = image.width, height = image.height;
  14060. const outputTexture = this._tex[0];
  14061. const method = this._method;
  14062. const newWidth = this._size.width || Math.max(1, this._scale.x * width);
  14063. const newHeight = this._size.height || Math.max(1, this._scale.y * height);
  14064. if(method == 'bilinear') {
  14065. (gpu.programs.transforms.resizeBilinear
  14066. .outputs(newWidth, newHeight, outputTexture)
  14067. )(image);
  14068. }
  14069. else if(method == 'nearest') {
  14070. (gpu.programs.transforms.resizeNearest
  14071. .outputs(newWidth, newHeight, outputTexture)
  14072. )(image);
  14073. }
  14074. this.output().swrite(outputTexture, format);
  14075. }
  14076. }
  14077. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
  14078. /*
  14079. * speedy-vision.js
  14080. * GPU-accelerated Computer Vision for JavaScript
  14081. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14082. *
  14083. * Licensed under the Apache License, Version 2.0 (the "License");
  14084. * you may not use this file except in compliance with the License.
  14085. * You may obtain a copy of the License at
  14086. *
  14087. * http://www.apache.org/licenses/LICENSE-2.0
  14088. *
  14089. * Unless required by applicable law or agreed to in writing, software
  14090. * distributed under the License is distributed on an "AS IS" BASIS,
  14091. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14092. * See the License for the specific language governing permissions and
  14093. * limitations under the License.
  14094. *
  14095. * transform-factory.js
  14096. * Image transforms
  14097. */
  14098. /**
  14099. * Image transforms
  14100. */
  14101. class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q
  14102. {
  14103. /**
  14104. * Resize image
  14105. * @param {string} [name]
  14106. * @returns {SpeedyPipelineNodeResize}
  14107. */
  14108. static Resize(name = undefined)
  14109. {
  14110. return new SpeedyPipelineNodeResize(name);
  14111. }
  14112. /**
  14113. * Warp an image using a perspective transformation
  14114. * @param {string} [name]
  14115. * @returns {SpeedyPipelineNodePerspectiveWarp}
  14116. */
  14117. static PerspectiveWarp(name = undefined)
  14118. {
  14119. return new SpeedyPipelineNodePerspectiveWarp(name);
  14120. }
  14121. }
  14122. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
  14123. /*
  14124. * speedy-vision.js
  14125. * GPU-accelerated Computer Vision for JavaScript
  14126. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14127. *
  14128. * Licensed under the Apache License, Version 2.0 (the "License");
  14129. * you may not use this file except in compliance with the License.
  14130. * You may obtain a copy of the License at
  14131. *
  14132. * http://www.apache.org/licenses/LICENSE-2.0
  14133. *
  14134. * Unless required by applicable law or agreed to in writing, software
  14135. * distributed under the License is distributed on an "AS IS" BASIS,
  14136. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14137. * See the License for the specific language governing permissions and
  14138. * limitations under the License.
  14139. *
  14140. * detector.js
  14141. * Abstract keypoint detectors
  14142. */
  14143. // Constants
  14144. const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
  14145. const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
  14146. const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
  14147. const NUMBER_OF_RGBA16_TEXTURES = 2;
  14148. // legacy constants
  14149. const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
  14150. const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
  14151. const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
  14152. /**
  14153. * Abstract keypoint detector
  14154. * @abstract
  14155. */
  14156. class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode
  14157. {
  14158. /**
  14159. * Constructor
  14160. * @param {string} [name] name of the node
  14161. * @param {number} [texCount] number of work textures
  14162. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14163. */
  14164. constructor(name = undefined, texCount = 0, portBuilders = undefined)
  14165. {
  14166. super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
  14167. /** @type {number} encoder capacity */
  14168. this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
  14169. /** @type {GLint} auxiliary storage */
  14170. this._oldWrapS = 0;
  14171. /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
  14172. this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
  14173. }
  14174. /**
  14175. * Initialize this node
  14176. * @param {SpeedyGPU} gpu
  14177. */
  14178. init(gpu)
  14179. {
  14180. // initialize
  14181. super.init(gpu);
  14182. // encodeKeypointSkipOffsets() relies on this
  14183. this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
  14184. // allocate RGBA16 textures
  14185. this._allocateTex16(gpu);
  14186. gpu.subscribe(this._allocateTex16, this, gpu);
  14187. }
  14188. /**
  14189. * Release this node
  14190. * @param {SpeedyGPU} gpu
  14191. */
  14192. release(gpu)
  14193. {
  14194. // deallocate RGBA16 textures
  14195. gpu.unsubscribe(this._allocateTex16, this);
  14196. this._deallocateTex16(gpu);
  14197. // we need to restore the texture parameter because textures come from a pool!
  14198. this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
  14199. // release
  14200. super.release(gpu);
  14201. }
  14202. /**
  14203. * Set a parameter of the special texture
  14204. * @param {GLenum} pname
  14205. * @param {GLint} param new value
  14206. * @returns {GLint} old value of param
  14207. */
  14208. _setupSpecialTexture(pname, param)
  14209. {
  14210. if(NUMBER_OF_INTERNAL_TEXTURES == 0)
  14211. return;
  14212. // legacy code
  14213. const texture = this._tex[this._tex.length - 1];
  14214. const gl = texture.gl;
  14215. gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
  14216. const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
  14217. gl.texParameteri(gl.TEXTURE_2D, pname, param);
  14218. gl.bindTexture(gl.TEXTURE_2D, null);
  14219. return oldval;
  14220. }
  14221. /**
  14222. * We can encode up to this many keypoints. If you find a
  14223. * tight bound for this, download times will be faster.
  14224. * @returns {number}
  14225. */
  14226. get capacity()
  14227. {
  14228. return this._capacity;
  14229. }
  14230. /**
  14231. * We can encode up to this many keypoints. If you find a
  14232. * tight bound for this, download times will be faster.
  14233. * @param {number} capacity
  14234. */
  14235. set capacity(capacity)
  14236. {
  14237. this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
  14238. }
  14239. /**
  14240. * Create a tiny texture with encoded keypoints out of
  14241. * an encoded corners texture
  14242. * @param {SpeedyGPU} gpu
  14243. * @param {SpeedyTexture} corners input
  14244. * @param {SpeedyDrawableTexture} encodedKeypoints output
  14245. * @param {number} [descriptorSize] in bytes
  14246. * @param {number} [extraSize] in bytes
  14247. * @returns {SpeedyDrawableTexture} encodedKeypoints
  14248. */
  14249. _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  14250. {
  14251. const encoderCapacity = this._capacity;
  14252. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
  14253. const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
  14254. const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
  14255. //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
  14256. const maxSize = Math.max(width, height);
  14257. const keypoints = gpu.programs.keypoints;
  14258. // prepare programs
  14259. keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
  14260. keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
  14261. keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  14262. // compute lookup table
  14263. let lookupTable = keypoints.initLookupTable(corners);
  14264. for(let b = 1; b < maxSize; b *= 2)
  14265. lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
  14266. /*
  14267. // debug: view texture
  14268. const lookupView = (keypoints.viewLookupTable.outputs(
  14269. width, height, this._tex[0]
  14270. ))(lookupTable);
  14271. const canvas = gpu.renderToCanvas(lookupView);
  14272. if(!this._ww) document.body.appendChild(canvas);
  14273. this._ww = 1;
  14274. */
  14275. // encode keypoints
  14276. return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
  14277. }
  14278. _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  14279. {
  14280. const capacity = this._capacity;
  14281. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  14282. const width = corners.width, height = corners.height;
  14283. const imageSize = [ width, height ];
  14284. const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
  14285. const keypoints = gpu.programs.keypoints;
  14286. const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
  14287. // prepare programs
  14288. keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
  14289. keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
  14290. keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
  14291. keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
  14292. // copy the input corners to a special texture
  14293. // that is needed by encodeKeypointSkipOffsets()
  14294. corners = (gpu.programs.utils.copy
  14295. .outputs(width, height, specialTexture)
  14296. )(corners);
  14297. // encode skip offsets
  14298. let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
  14299. for(let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) { // to boost performance
  14300. // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
  14301. // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
  14302. offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
  14303. }
  14304. /*
  14305. // debug: view corners
  14306. let cornerview = offsets;
  14307. const canvas = gpu.renderToCanvas(cornerview);
  14308. if(!window._ww) document.body.appendChild(canvas);
  14309. window._ww = 1;
  14310. */
  14311. // encode keypoint positions
  14312. let encodedKps = tex[3].clear();
  14313. for(let j = 0; j < ENCODER_PASSES; j++)
  14314. encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
  14315. // encode keypoint properties
  14316. return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
  14317. }
  14318. /**
  14319. * Create a tiny texture with zero encoded keypoints
  14320. * @param {SpeedyGPU} gpu
  14321. * @param {SpeedyDrawableTexture} encodedKeypoints output texture
  14322. * @param {number} [descriptorSize] in bytes
  14323. * @param {number} [extraSize] in bytes
  14324. * @returns {SpeedyDrawableTexture} encodedKeypoints
  14325. */
  14326. _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  14327. {
  14328. const capacity = 0;
  14329. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  14330. const keypoints = gpu.programs.keypoints;
  14331. keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  14332. return keypoints.encodeNullKeypoints();
  14333. }
  14334. /**
  14335. * Allocate RGBA16 textures
  14336. * @param {SpeedyGPU} gpu
  14337. */
  14338. _allocateTex16(gpu)
  14339. {
  14340. const gl = gpu.gl;
  14341. // RGBA16UI is color renderable according to the OpenGL ES 3 spec
  14342. for(let i = 0; i < this._tex16.length; i++)
  14343. this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
  14344. }
  14345. /**
  14346. * Deallocate RGBA16 textures
  14347. * @param {SpeedyGPU} gpu
  14348. */
  14349. _deallocateTex16(gpu)
  14350. {
  14351. for(let i = 0; i < this._tex16.length; i++)
  14352. this._tex16[i] = this._tex16[i].release();
  14353. }
  14354. /**
  14355. * Compute the length of the keypoint encoder, given its capacity
  14356. * @param {number} encoderCapacity how many keypoints can we fit?
  14357. * @param {number} descriptorSize in bytes
  14358. * @param {number} extraSize in bytes
  14359. */
  14360. static encoderLength(encoderCapacity, descriptorSize, extraSize)
  14361. {
  14362. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  14363. const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
  14364. return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
  14365. }
  14366. /**
  14367. * The maximum number of keypoints we can store using
  14368. * a particular configuration of a keypoint encoder
  14369. * @param {number} descriptorSize in bytes
  14370. * @param {number} extraSize in bytes
  14371. * @param {number} encoderLength
  14372. */
  14373. static encoderCapacity(descriptorSize, extraSize, encoderLength)
  14374. {
  14375. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  14376. const numberOfPixels = encoderLength * encoderLength;
  14377. return Math.floor(numberOfPixels / pixelsPerKeypoint);
  14378. }
  14379. }
  14380. /**
  14381. * Abstract scale-space keypoint detector
  14382. * @abstract
  14383. */
  14384. class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector
  14385. {
  14386. /**
  14387. * Constructor
  14388. * @param {string} [name] name of the node
  14389. * @param {number} [texCount] number of work textures
  14390. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14391. */
  14392. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  14393. {
  14394. super(name, texCount, portBuilders);
  14395. /** @type {number} number of pyramid levels */
  14396. this._levels = 1;
  14397. /** @type {number} scale factor between two pyramid levels */
  14398. this._scaleFactor = DEFAULT_SCALE_FACTOR;
  14399. }
  14400. /**
  14401. * Number of pyramid levels
  14402. * @returns {number}
  14403. */
  14404. get levels()
  14405. {
  14406. return this._levels;
  14407. }
  14408. /**
  14409. * Number of pyramid levels
  14410. * @param {number} levels
  14411. */
  14412. set levels(levels)
  14413. {
  14414. this._levels = Math.max(1, levels | 0);
  14415. }
  14416. /**
  14417. * Scale factor between two pyramid levels
  14418. * @returns {number}
  14419. */
  14420. get scaleFactor()
  14421. {
  14422. return this._scaleFactor;
  14423. }
  14424. /**
  14425. * Scale factor between two pyramid levels
  14426. * @param {number} scaleFactor should be greater than 1
  14427. */
  14428. set scaleFactor(scaleFactor)
  14429. {
  14430. this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
  14431. }
  14432. }
  14433. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
  14434. /*
  14435. * speedy-vision.js
  14436. * GPU-accelerated Computer Vision for JavaScript
  14437. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14438. *
  14439. * Licensed under the Apache License, Version 2.0 (the "License");
  14440. * you may not use this file except in compliance with the License.
  14441. * You may obtain a copy of the License at
  14442. *
  14443. * http://www.apache.org/licenses/LICENSE-2.0
  14444. *
  14445. * Unless required by applicable law or agreed to in writing, software
  14446. * distributed under the License is distributed on an "AS IS" BASIS,
  14447. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14448. * See the License for the specific language governing permissions and
  14449. * limitations under the License.
  14450. *
  14451. * source.js
  14452. * Gets keypoints into the pipeline
  14453. */
  14454. // Constants
  14455. const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
  14456. const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
  14457. const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
  14458. /**
  14459. * Gets keypoints into the pipeline
  14460. */
  14461. class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode
  14462. {
  14463. /**
  14464. * Constructor
  14465. * @param {string} [name] name of the node
  14466. */
  14467. constructor(name = undefined)
  14468. {
  14469. super(name, 2, [
  14470. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14471. ]);
  14472. /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
  14473. this._keypoints = [];
  14474. /** @type {Float32Array} upload buffer (UBO) */
  14475. this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
  14476. /** @type {number} maximum number of keypoints */
  14477. this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
  14478. }
  14479. /**
  14480. * Keypoints to be uploaded
  14481. * @returns {SpeedyKeypoint[]}
  14482. */
  14483. get keypoints()
  14484. {
  14485. return this._keypoints;
  14486. }
  14487. /**
  14488. * Keypoints to be uploaded
  14489. * @param {SpeedyKeypoint[]} keypoints
  14490. */
  14491. set keypoints(keypoints)
  14492. {
  14493. if(!Array.isArray(keypoints))
  14494. throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
  14495. this._keypoints = keypoints;
  14496. }
  14497. /**
  14498. * The maximum number of keypoints we'll accept.
  14499. * This should be a tight bound for better performance.
  14500. * @returns {number}
  14501. */
  14502. get capacity()
  14503. {
  14504. return this._capacity;
  14505. }
  14506. /**
  14507. * The maximum number of keypoints we'll accept.
  14508. * This should be a tight bound for better performance.
  14509. * @param {number} capacity
  14510. */
  14511. set capacity(capacity)
  14512. {
  14513. this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
  14514. }
  14515. /**
  14516. * Run the specific task of this node
  14517. * @param {SpeedyGPU} gpu
  14518. * @returns {void|SpeedyPromise<void>}
  14519. */
  14520. _run(gpu)
  14521. {
  14522. // Orientation, descriptors and extra bytes will be lost
  14523. const descriptorSize = 0, extraSize = 0;
  14524. const keypoints = this._keypoints;
  14525. const maxKeypoints = this._capacity;
  14526. const numKeypoints = Math.min(keypoints.length, maxKeypoints);
  14527. const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
  14528. const buffer = this._buffer;
  14529. const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
  14530. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
  14531. uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
  14532. let startIndex = 0, encodedKeypoints = uploadKeypoints.clear();
  14533. for(let i = 0; i < numPasses; i++) {
  14534. const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
  14535. const endIndex = startIndex + n;
  14536. uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
  14537. encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
  14538. startIndex = endIndex;
  14539. }
  14540. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14541. }
  14542. /**
  14543. * Create an upload buffer
  14544. * @param {number} bufferSize number of keypoints
  14545. * @returns {Float32Array}
  14546. */
  14547. static _createUploadBuffer(bufferSize)
  14548. {
  14549. const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
  14550. utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
  14551. return new Float32Array(internalBuffer);
  14552. }
  14553. /**
  14554. * Fill upload buffer with keypoint data
  14555. * @param {Float32Array} buffer
  14556. * @param {SpeedyKeypoint[]} keypoints
  14557. * @param {number} start index, inclusive
  14558. * @param {number} end index, exclusive
  14559. * @returns {Float32Array} buffer
  14560. */
  14561. static _fillUploadBuffer(buffer, keypoints, start, end)
  14562. {
  14563. const n = end - start;
  14564. for(let i = 0; i < n; i++) {
  14565. const keypoint = keypoints[start + i];
  14566. const hasPos = keypoint.position !== undefined;
  14567. const j = i * 4;
  14568. // Format data as follows:
  14569. // vec4(xpos, ypos, lod, score)
  14570. buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
  14571. buffer[j+1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
  14572. buffer[j+2] = +(keypoint.lod) || 0;
  14573. buffer[j+3] = +(keypoint.score) || 0;
  14574. }
  14575. // done!
  14576. return buffer;
  14577. }
  14578. }
  14579. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
  14580. /*
  14581. * speedy-vision.js
  14582. * GPU-accelerated Computer Vision for JavaScript
  14583. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14584. *
  14585. * Licensed under the Apache License, Version 2.0 (the "License");
  14586. * you may not use this file except in compliance with the License.
  14587. * You may obtain a copy of the License at
  14588. *
  14589. * http://www.apache.org/licenses/LICENSE-2.0
  14590. *
  14591. * Unless required by applicable law or agreed to in writing, software
  14592. * distributed under the License is distributed on an "AS IS" BASIS,
  14593. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14594. * See the License for the specific language governing permissions and
  14595. * limitations under the License.
  14596. *
  14597. * speedy-keypoint-descriptor.js
  14598. * Keypoint descriptor
  14599. */
  14600. /**
  14601. * Represents a keypoint descriptor
  14602. */
  14603. class SpeedyKeypointDescriptor
  14604. {
  14605. /**
  14606. * Constructor
  14607. * @param {Uint8Array} data descriptor bytes
  14608. */
  14609. constructor(data)
  14610. {
  14611. this._data = data;
  14612. return Object.freeze(this);
  14613. }
  14614. /**
  14615. * Descriptor data
  14616. * @returns {Uint8Array}
  14617. */
  14618. get data()
  14619. {
  14620. return this._data;
  14621. }
  14622. /**
  14623. * The size of the descriptor, in bytes
  14624. * @returns {number}
  14625. */
  14626. get size()
  14627. {
  14628. return this._data.byteLength;
  14629. }
  14630. /**
  14631. * A string representation of the keypoint descriptor
  14632. * @returns {string}
  14633. */
  14634. toString()
  14635. {
  14636. return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
  14637. }
  14638. }
  14639. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
  14640. /*
  14641. * speedy-vision.js
  14642. * GPU-accelerated Computer Vision for JavaScript
  14643. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14644. *
  14645. * Licensed under the Apache License, Version 2.0 (the "License");
  14646. * you may not use this file except in compliance with the License.
  14647. * You may obtain a copy of the License at
  14648. *
  14649. * http://www.apache.org/licenses/LICENSE-2.0
  14650. *
  14651. * Unless required by applicable law or agreed to in writing, software
  14652. * distributed under the License is distributed on an "AS IS" BASIS,
  14653. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14654. * See the License for the specific language governing permissions and
  14655. * limitations under the License.
  14656. *
  14657. * sink.js
  14658. * Gets keypoints out of the pipeline
  14659. */
  14660. /** next power of 2 */
  14661. const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  14662. /** empty array of bytes */
  14663. const ZERO_BYTES = new Uint8Array([]);
  14664. /**
  14665. * Gets keypoints out of the pipeline
  14666. * @template {SpeedyKeypoint} T
  14667. * @abstract
  14668. */
  14669. class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode
  14670. {
  14671. /**
  14672. * Constructor
  14673. * @param {string} [name] name of the node
  14674. * @param {number} [texCount]
  14675. * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
  14676. */
  14677. constructor(name = 'keypoints', texCount = 0, portBuilders = [])
  14678. {
  14679. super(name, texCount + 2, portBuilders);
  14680. /** @type {Array<T|null>} keypoints (output) */
  14681. this._keypoints = [];
  14682. /** @type {SpeedyTextureReader} texture reader */
  14683. this._textureReader = new SpeedyTextureReader();
  14684. /** @type {number} page flipping index */
  14685. this._page = 0;
  14686. /** @type {boolean} accelerate GPU-CPU transfers */
  14687. this._turbo = false;
  14688. /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
  14689. this._includeDiscarded = false;
  14690. }
  14691. /**
  14692. * Accelerate GPU-CPU transfers
  14693. * @returns {boolean}
  14694. */
  14695. get turbo()
  14696. {
  14697. return this._turbo;
  14698. }
  14699. /**
  14700. * Accelerate GPU-CPU transfers
  14701. * @param {boolean} value
  14702. */
  14703. set turbo(value)
  14704. {
  14705. this._turbo = Boolean(value);
  14706. }
  14707. /**
  14708. * Should discarded keypoints be exported as null or dropped altogether?
  14709. * @returns {boolean}
  14710. */
  14711. get includeDiscarded()
  14712. {
  14713. return this._includeDiscarded;
  14714. }
  14715. /**
  14716. * Should discarded keypoints be exported as null or dropped altogether?
  14717. * @param {boolean} value
  14718. */
  14719. set includeDiscarded(value)
  14720. {
  14721. this._includeDiscarded = Boolean(value);
  14722. }
  14723. /**
  14724. * Initializes this node
  14725. * @param {SpeedyGPU} gpu
  14726. */
  14727. init(gpu)
  14728. {
  14729. super.init(gpu);
  14730. this._textureReader.init(gpu);
  14731. }
  14732. /**
  14733. * Releases this node
  14734. * @param {SpeedyGPU} gpu
  14735. */
  14736. release(gpu)
  14737. {
  14738. this._textureReader.release(gpu);
  14739. super.release(gpu);
  14740. }
  14741. /**
  14742. * Export data from this node to the user
  14743. * @returns {SpeedyPromise<Array<T|null>>}
  14744. */
  14745. export()
  14746. {
  14747. return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
  14748. }
  14749. /**
  14750. * Run the specific task of this node
  14751. * @param {SpeedyGPU} gpu
  14752. * @returns {void|SpeedyPromise<void>}
  14753. */
  14754. _run(gpu)
  14755. {
  14756. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14757. return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14758. }
  14759. /**
  14760. * Download and decode keypoints from the GPU
  14761. * @param {SpeedyGPU} gpu
  14762. * @param {SpeedyDrawableTexture} encodedKeypoints
  14763. * @param {number} descriptorSize
  14764. * @param {number} extraSize
  14765. * @param {number} encoderLength
  14766. * @returns {SpeedyPromise<void>}
  14767. */
  14768. _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength)
  14769. {
  14770. const useBufferedDownloads = this._turbo;
  14771. /*
  14772. I have found experimentally that, in Firefox, readPixelsAsync()
  14773. performs MUCH better if the width of the target texture is a power
  14774. of two. I have no idea why this is the case, nor if it's related to
  14775. some interaction with the GL drivers, somehow. This seems to make no
  14776. difference on Chrome, however. In any case, let's convert the input
  14777. texture to POT.
  14778. */
  14779. const encoderWidth = sink_nextPot(encoderLength);
  14780. //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  14781. const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
  14782. //const encoderWidth=encoderLength,encoderHeight=encoderLength;
  14783. // copy the set of keypoints to an internal texture
  14784. const copiedTexture = this._tex[(this._tex.length - 1) - this._page];
  14785. (gpu.programs.utils.copyKeypoints
  14786. .outputs(encoderWidth, encoderHeight, copiedTexture)
  14787. )(encodedKeypoints);
  14788. // flip page
  14789. this._page = 1 - this._page;
  14790. // download the internal texture
  14791. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  14792. // decode the keypoints and store them in this._keypoints
  14793. this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
  14794. });
  14795. }
  14796. /**
  14797. * Decode a sequence of keypoints, given a flattened image of encoded pixels
  14798. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  14799. * @param {number} descriptorSize in bytes
  14800. * @param {number} extraSize in bytes
  14801. * @param {number} encoderWidth
  14802. * @param {number} encoderHeight
  14803. * @returns {Array<T|null>} keypoints
  14804. */
  14805. _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight)
  14806. {
  14807. const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
  14808. const m = globals.LOG2_PYRAMID_MAX_SCALE, h = globals.PYRAMID_MAX_LEVELS;
  14809. const piOver255 = Math.PI / 255.0;
  14810. const keypoints = /** @type {Array<T|null>} */ ( [] );
  14811. const includeDiscarded = this._includeDiscarded;
  14812. let descriptorBytes = ZERO_BYTES, extraBytes = ZERO_BYTES;
  14813. let x, y, z, w, lod, rotation, score;
  14814. let keypoint;
  14815. // validate
  14816. if(descriptorSize % 4 != 0 || extraSize % 4 != 0)
  14817. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
  14818. // how many bytes should we read?
  14819. const e2 = encoderWidth * encoderHeight * 4;
  14820. const size = pixels.byteLength;
  14821. if(size != e2)
  14822. utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
  14823. // copy the data (we use shared buffers when receiving pixels[])
  14824. if(descriptorSize + extraSize > 0)
  14825. pixels = new Uint8Array(pixels);
  14826. // for each encoded keypoint
  14827. for(let i = 0; i < size; i += bytesPerKeypoint) {
  14828. // extract encoded header
  14829. x = (pixels[i+1] << 8) | pixels[i];
  14830. y = (pixels[i+3] << 8) | pixels[i+2];
  14831. z = (pixels[i+5] << 8) | pixels[i+4];
  14832. w = (pixels[i+7] << 8) | pixels[i+6];
  14833. // the keypoint is "null": we have reached the end of the list
  14834. if(x == 0xFFFF && y == 0xFFFF)
  14835. break;
  14836. // the header is zero: discard the keypoint
  14837. if(x + y + z + w == 0) {
  14838. if(includeDiscarded)
  14839. keypoints.push(null);
  14840. continue;
  14841. }
  14842. // extract extra & descriptor bytes
  14843. if(extraSize > 0) {
  14844. extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
  14845. if(extraBytes.byteLength < extraSize) {
  14846. utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i/bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
  14847. continue; // something is off here; discard
  14848. }
  14849. }
  14850. if(descriptorSize > 0) {
  14851. descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
  14852. if(descriptorBytes.byteLength < descriptorSize) {
  14853. utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i/bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
  14854. continue; // something is off here; discard
  14855. }
  14856. }
  14857. // decode position: convert from fixed-point
  14858. x /= globals.FIX_RESOLUTION;
  14859. y /= globals.FIX_RESOLUTION;
  14860. // decode level-of-detail
  14861. lod = (pixels[i+4] < 255) ? -m + ((m + h) * pixels[i+4]) / 255.0 : 0.0;
  14862. // decode orientation
  14863. rotation = (2 * pixels[i+5] - 255) * piOver255;
  14864. // decode score
  14865. score = utils/* Utils */.A.decodeFloat16(w);
  14866. // create keypoint
  14867. keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
  14868. // register keypoint
  14869. keypoints.push(keypoint);
  14870. }
  14871. // done!
  14872. return keypoints;
  14873. }
  14874. /**
  14875. * Instantiate a new keypoint
  14876. * @param {number} x
  14877. * @param {number} y
  14878. * @param {number} lod
  14879. * @param {number} rotation
  14880. * @param {number} score
  14881. * @param {Uint8Array} descriptorBytes
  14882. * @param {Uint8Array} extraBytes
  14883. * @returns {T}
  14884. */
  14885. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14886. {
  14887. throw new utils_errors/* AbstractMethodError */.aQ();
  14888. }
  14889. /**
  14890. * Allocate extra space
  14891. * @param {SpeedyGPU} gpu
  14892. * @param {SpeedyDrawableTexture} output output texture
  14893. * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
  14894. * @param {number} inputDescriptorSize in bytes, must be positive
  14895. * @param {number} inputExtraSize must be 0
  14896. * @param {number} outputDescriptorSize must be inputDescriptorSize
  14897. * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
  14898. * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
  14899. */
  14900. _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize)
  14901. {
  14902. utils/* Utils */.A.assert(inputExtraSize === 0);
  14903. utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
  14904. const inputEncoderLength = inputEncodedKeypoints.width;
  14905. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  14906. const outputEncoderCapacity = inputEncoderCapacity;
  14907. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  14908. return (gpu.programs.keypoints.allocateExtra
  14909. .outputs(outputEncoderLength, outputEncoderLength, output)
  14910. )(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  14911. }
  14912. }
  14913. /**
  14914. * Gets standard keypoints out of the pipeline
  14915. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
  14916. */
  14917. class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  14918. {
  14919. /**
  14920. * Constructor
  14921. * @param {string} [name] name of the node
  14922. */
  14923. constructor(name = 'keypoints')
  14924. {
  14925. super(name, 0, [
  14926. InputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14927. ]);
  14928. }
  14929. /**
  14930. * Instantiate a new keypoint
  14931. * @param {number} x
  14932. * @param {number} y
  14933. * @param {number} lod
  14934. * @param {number} rotation
  14935. * @param {number} score
  14936. * @param {Uint8Array} descriptorBytes
  14937. * @param {Uint8Array} extraBytes
  14938. * @returns {SpeedyKeypoint}
  14939. */
  14940. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14941. {
  14942. const descriptorSize = descriptorBytes.byteLength;
  14943. // read descriptor, if any
  14944. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  14945. // create keypoint
  14946. return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
  14947. }
  14948. }
  14949. /**
  14950. * Gets tracked keypoints out of the pipeline
  14951. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
  14952. */
  14953. class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  14954. {
  14955. /**
  14956. * Constructor
  14957. * @param {string} [name] name of the node
  14958. */
  14959. constructor(name = 'keypoints')
  14960. {
  14961. super(name, 2, [
  14962. InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  14963. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  14964. msg.extraSize == 0
  14965. ),
  14966. InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)
  14967. ]);
  14968. }
  14969. /**
  14970. * Run the specific task of this node
  14971. * @param {SpeedyGPU} gpu
  14972. * @returns {void|SpeedyPromise<void>}
  14973. */
  14974. _run(gpu)
  14975. {
  14976. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14977. const { vectors } = /** @type {SpeedyPipelineMessageWith2DVectors} */ ( this.input('flow').read() );
  14978. // allocate extra space
  14979. const newDescriptorSize = descriptorSize;
  14980. const newExtraSize = 4; // 1 pixel per flow vector per keypoint
  14981. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  14982. // attach flow vectors
  14983. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  14984. const newEncodedKeypoints = (gpu.programs.keypoints.transferToExtra
  14985. .outputs(newEncoderLength, newEncoderLength, this._tex[1])
  14986. )(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  14987. // done!
  14988. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  14989. }
  14990. /**
  14991. * Instantiate a new keypoint
  14992. * @param {number} x
  14993. * @param {number} y
  14994. * @param {number} lod
  14995. * @param {number} rotation
  14996. * @param {number} score
  14997. * @param {Uint8Array} descriptorBytes
  14998. * @param {Uint8Array} extraBytes
  14999. * @returns {SpeedyTrackedKeypoint}
  15000. */
  15001. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  15002. {
  15003. const descriptorSize = descriptorBytes.byteLength;
  15004. const extraSize = extraBytes.byteLength;
  15005. // read descriptor, if any
  15006. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  15007. // read flow vector
  15008. const fx = utils/* Utils */.A.decodeFloat16((extraBytes[1] << 8) | extraBytes[0]);
  15009. const fy = utils/* Utils */.A.decodeFloat16((extraBytes[3] << 8) | extraBytes[2]);
  15010. const flow = new SpeedyVector2(fx, fy);
  15011. // create keypoint
  15012. return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
  15013. }
  15014. }
  15015. /**
  15016. * Gets matched keypoints out of the pipeline
  15017. * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
  15018. */
  15019. class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  15020. {
  15021. /**
  15022. * Constructor
  15023. * @param {string} [name] name of the node
  15024. */
  15025. constructor(name = 'keypoints')
  15026. {
  15027. super(name, 2, [
  15028. InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  15029. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  15030. msg.extraSize == 0
  15031. ),
  15032. InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)
  15033. ]);
  15034. }
  15035. /**
  15036. * Run the specific task of this node
  15037. * @param {SpeedyGPU} gpu
  15038. * @returns {void|SpeedyPromise<void>}
  15039. */
  15040. _run(gpu)
  15041. {
  15042. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15043. const { encodedMatches, matchesPerKeypoint } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */ ( this.input('matches').read() );
  15044. // allocate space for the matches
  15045. const newDescriptorSize = descriptorSize;
  15046. const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
  15047. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  15048. // transfer matches to a new texture
  15049. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  15050. const newEncodedKeypoints = (gpu.programs.keypoints.transferToExtra
  15051. .outputs(newEncoderLength, newEncoderLength, this._tex[1])
  15052. )(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  15053. // done!
  15054. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  15055. }
  15056. /**
  15057. * Instantiate a new keypoint
  15058. * @param {number} x
  15059. * @param {number} y
  15060. * @param {number} lod
  15061. * @param {number} rotation
  15062. * @param {number} score
  15063. * @param {Uint8Array} descriptorBytes
  15064. * @param {Uint8Array} extraBytes
  15065. * @returns {SpeedyMatchedKeypoint}
  15066. */
  15067. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  15068. {
  15069. const descriptorSize = descriptorBytes.byteLength;
  15070. const extraSize = extraBytes.byteLength;
  15071. // read descriptor, if any
  15072. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  15073. // decode matches
  15074. const matchesPerKeypoint = extraSize / 4;
  15075. const matches = /** @type {SpeedyKeypointMatch[]} */ ( new Array(matchesPerKeypoint) );
  15076. for(let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
  15077. const base = matchIndex * 4;
  15078. const u32 = extraBytes[base] | (extraBytes[base+1] << 8) | (extraBytes[base+2] << 16) | (extraBytes[base+3] << 24);
  15079. const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
  15080. matches[matchIndex] = match;
  15081. }
  15082. // done!
  15083. return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
  15084. }
  15085. }
  15086. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
  15087. /*
  15088. * speedy-vision.js
  15089. * GPU-accelerated Computer Vision for JavaScript
  15090. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15091. *
  15092. * Licensed under the Apache License, Version 2.0 (the "License");
  15093. * you may not use this file except in compliance with the License.
  15094. * You may obtain a copy of the License at
  15095. *
  15096. * http://www.apache.org/licenses/LICENSE-2.0
  15097. *
  15098. * Unless required by applicable law or agreed to in writing, software
  15099. * distributed under the License is distributed on an "AS IS" BASIS,
  15100. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15101. * See the License for the specific language governing permissions and
  15102. * limitations under the License.
  15103. *
  15104. * clipper.js
  15105. * Keypoint clipper
  15106. */
  15107. // Constants
  15108. const LOG2_STRIDE = 5;
  15109. const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
  15110. /**
  15111. * Keypoint clipper: filters the best keypoints from a stream
  15112. */
  15113. class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode
  15114. {
  15115. /**
  15116. * Constructor
  15117. * @param {string} [name] name of the node
  15118. */
  15119. constructor(name = undefined)
  15120. {
  15121. super(name, 4, [
  15122. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15123. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15124. ]);
  15125. /** @type {number} the maximum number of keypoints in the output */
  15126. this._size = MAX_SIZE;
  15127. }
  15128. /**
  15129. * The maximum number of keypoints in the output
  15130. * @returns {number}
  15131. */
  15132. get size()
  15133. {
  15134. return this._size;
  15135. }
  15136. /**
  15137. * The maximum number of keypoints in the output
  15138. * @param {number} size
  15139. */
  15140. set size(size)
  15141. {
  15142. this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
  15143. }
  15144. /**
  15145. * Run the specific task of this node
  15146. * @param {SpeedyGPU} gpu
  15147. * @returns {void|SpeedyPromise<void>}
  15148. */
  15149. _run(gpu)
  15150. {
  15151. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15152. const keypoints = gpu.programs.keypoints;
  15153. const clipValue = this._size;
  15154. const tex = this._tex;
  15155. const outputTexture = this._tex[3];
  15156. // find the minimum power of 2 pot such that pot >= capacity
  15157. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15158. //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
  15159. // find the dimensions of the sorting shaders
  15160. const stride = 1 << LOG2_STRIDE; // must be a power of 2
  15161. //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
  15162. const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
  15163. const numberOfPixels = stride * height;
  15164. // find the dimensions of the output texture
  15165. const newCapacity = Math.min(capacity, clipValue);
  15166. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
  15167. // generate permutation of keypoints
  15168. keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
  15169. let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15170. // sort permutation
  15171. const numPasses = Math.ceil(Math.log2(numberOfPixels));
  15172. keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
  15173. for(let i = 1; i <= numPasses; i++) {
  15174. const blockSize = 1 << i; // 2, 4, 8...
  15175. const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
  15176. permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
  15177. }
  15178. // apply permutation
  15179. keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
  15180. keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
  15181. /*
  15182. // debug (read the contents of the permutation)
  15183. const pixels = permutation.inspect(gpu), debug = [];
  15184. for(let i = 0; i < pixels.length; i += 4) {
  15185. let id = pixels[i] | (pixels[i+1] << 8);
  15186. let score = pixels[i+2] / 255.0;
  15187. let valid = pixels[i+3] / 255.0;
  15188. debug.push([ id, valid, score, ].join(', '));
  15189. }
  15190. console.log(debug);
  15191. */
  15192. // done!
  15193. this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
  15194. }
  15195. }
  15196. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
  15197. /*
  15198. * speedy-vision.js
  15199. * GPU-accelerated Computer Vision for JavaScript
  15200. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15201. *
  15202. * Licensed under the Apache License, Version 2.0 (the "License");
  15203. * you may not use this file except in compliance with the License.
  15204. * You may obtain a copy of the License at
  15205. *
  15206. * http://www.apache.org/licenses/LICENSE-2.0
  15207. *
  15208. * Unless required by applicable law or agreed to in writing, software
  15209. * distributed under the License is distributed on an "AS IS" BASIS,
  15210. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15211. * See the License for the specific language governing permissions and
  15212. * limitations under the License.
  15213. *
  15214. * border-clipper.js
  15215. * Keypoint Border Clipper
  15216. */
  15217. /**
  15218. * The Border Clipper removes all keypoints within a border of the edges of an image
  15219. */
  15220. class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode
  15221. {
  15222. /**
  15223. * Constructor
  15224. * @param {string} [name] name of the node
  15225. */
  15226. constructor(name = undefined)
  15227. {
  15228. super(name, 5, [
  15229. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15230. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15231. ]);
  15232. /** @type {SpeedySize} image size, in pixels */
  15233. this._imageSize = new SpeedySize(0,0);
  15234. /** @type {SpeedyVector2} border size, in pixels */
  15235. this._borderSize = new SpeedyVector2(0,0);
  15236. }
  15237. /**
  15238. * Image size, in pixels
  15239. * @returns {SpeedySize}
  15240. */
  15241. get imageSize()
  15242. {
  15243. return this._imageSize;
  15244. }
  15245. /**
  15246. * Image size, in pixels
  15247. * @param {SpeedySize} imageSize
  15248. */
  15249. set imageSize(imageSize)
  15250. {
  15251. this._imageSize = imageSize;
  15252. }
  15253. /**
  15254. * Border size, in pixels
  15255. * @returns {SpeedyVector2}
  15256. */
  15257. get borderSize()
  15258. {
  15259. return this._borderSize;
  15260. }
  15261. /**
  15262. * Border size, in pixels
  15263. * @param {SpeedyVector2} borderSize
  15264. */
  15265. set borderSize(borderSize)
  15266. {
  15267. this._borderSize = borderSize;
  15268. }
  15269. /**
  15270. * Run the specific task of this node
  15271. * @param {SpeedyGPU} gpu
  15272. * @returns {void|SpeedyPromise<void>}
  15273. */
  15274. _run(gpu)
  15275. {
  15276. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15277. const keypoints = gpu.programs.keypoints;
  15278. const imageSize = this._imageSize;
  15279. const borderSize = this._borderSize;
  15280. const imageWidth = imageSize.width, imageHeight = imageSize.height;
  15281. const borderLeft = borderSize.x, borderRight = borderSize.x;
  15282. const borderTop = borderSize.y, borderBottom = borderSize.y;
  15283. const tex = this._tex;
  15284. // validate
  15285. if(imageWidth == 0 || imageHeight == 0)
  15286. throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
  15287. // find the capacity of the keypoint stream
  15288. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15289. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15290. // prepare programs
  15291. keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
  15292. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15293. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  15294. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  15295. // clip keypoints
  15296. let clippedKeypoints = keypoints.clipBorder(
  15297. imageWidth, imageHeight,
  15298. borderTop, borderRight, borderBottom, borderLeft,
  15299. encodedKeypoints, descriptorSize, extraSize, encoderLength
  15300. );
  15301. // sort keypoints
  15302. let sortedKeypoints = keypoints.mixKeypointsInit(
  15303. clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  15304. );
  15305. for(let b = 1; b < capacity; b *= 2)
  15306. sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  15307. clippedKeypoints = keypoints.mixKeypointsApply(
  15308. sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength
  15309. );
  15310. /*
  15311. // debug: view keypoints
  15312. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15313. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  15314. */
  15315. // done!
  15316. this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
  15317. }
  15318. }
  15319. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
  15320. /*
  15321. * speedy-vision.js
  15322. * GPU-accelerated Computer Vision for JavaScript
  15323. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15324. *
  15325. * Licensed under the Apache License, Version 2.0 (the "License");
  15326. * you may not use this file except in compliance with the License.
  15327. * You may obtain a copy of the License at
  15328. *
  15329. * http://www.apache.org/licenses/LICENSE-2.0
  15330. *
  15331. * Unless required by applicable law or agreed to in writing, software
  15332. * distributed under the License is distributed on an "AS IS" BASIS,
  15333. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15334. * See the License for the specific language governing permissions and
  15335. * limitations under the License.
  15336. *
  15337. * buffer.js
  15338. * Keypoint Buffer
  15339. */
  15340. /**
  15341. * Keypoint Buffer: a node with memory.
  15342. * At time t, it outputs the keypoints received at time t-1
  15343. */
  15344. class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode
  15345. {
  15346. /**
  15347. * Constructor
  15348. * @param {string} [name] name of the node
  15349. */
  15350. constructor(name = undefined)
  15351. {
  15352. super(name, 2, [
  15353. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15354. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15355. ]);
  15356. /** @type {number} current page: 0 or 1 */
  15357. this._pageIndex = 0;
  15358. /** @type {boolean} first run? */
  15359. this._initialized = false;
  15360. /** @type {number} previous descriptor size, in bytes */
  15361. this._previousDescriptorSize = 0;
  15362. /** @type {number} previous extra size, in bytes */
  15363. this._previousExtraSize = 0;
  15364. /** @type {number} previous encoder length */
  15365. this._previousEncoderLength = 0;
  15366. /** @type {boolean} frozen buffer? */
  15367. this._frozen = false;
  15368. }
  15369. /**
  15370. * A frozen buffer discards the input, effectively increasing the buffering time
  15371. * @returns {boolean}
  15372. */
  15373. get frozen()
  15374. {
  15375. return this._frozen;
  15376. }
  15377. /**
  15378. * A frozen buffer discards the input, effectively increasing the buffering time
  15379. * @param {boolean} value
  15380. */
  15381. set frozen(value)
  15382. {
  15383. this._frozen = Boolean(value);
  15384. }
  15385. /**
  15386. * Releases this node
  15387. * @param {SpeedyGPU} gpu
  15388. */
  15389. release(gpu)
  15390. {
  15391. this._initialized = false;
  15392. super.release(gpu);
  15393. }
  15394. /**
  15395. * Run the specific task of this node
  15396. * @param {SpeedyGPU} gpu
  15397. * @returns {void|SpeedyPromise<void>}
  15398. */
  15399. _run(gpu)
  15400. {
  15401. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15402. const previousDescriptorSize = this._previousDescriptorSize;
  15403. const previousExtraSize = this._previousExtraSize;
  15404. const previousEncoderLength = this._previousEncoderLength;
  15405. const page = this._tex;
  15406. const previousInputTexture = page[1 - this._pageIndex];
  15407. const outputTexture = page[this._pageIndex];
  15408. // bufferize
  15409. if(!this._frozen || !this._initialized) {
  15410. // store input
  15411. this._previousDescriptorSize = descriptorSize;
  15412. this._previousExtraSize = extraSize;
  15413. this._previousEncoderLength = encoderLength;
  15414. previousInputTexture.resize(encoderLength, encoderLength);
  15415. encodedKeypoints.copyTo(previousInputTexture);
  15416. // page flipping
  15417. this._pageIndex = 1 - this._pageIndex;
  15418. }
  15419. // first run?
  15420. if(!this._initialized) {
  15421. this._initialized = true;
  15422. this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
  15423. return;
  15424. }
  15425. // done!
  15426. this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
  15427. }
  15428. }
  15429. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
  15430. /*
  15431. * speedy-vision.js
  15432. * GPU-accelerated Computer Vision for JavaScript
  15433. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15434. *
  15435. * Licensed under the Apache License, Version 2.0 (the "License");
  15436. * you may not use this file except in compliance with the License.
  15437. * You may obtain a copy of the License at
  15438. *
  15439. * http://www.apache.org/licenses/LICENSE-2.0
  15440. *
  15441. * Unless required by applicable law or agreed to in writing, software
  15442. * distributed under the License is distributed on an "AS IS" BASIS,
  15443. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15444. * See the License for the specific language governing permissions and
  15445. * limitations under the License.
  15446. *
  15447. * mixer.js
  15448. * Keypoint Mixer
  15449. */
  15450. /**
  15451. * Keypoint Mixer: merges two sets of keypoints
  15452. */
  15453. class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode
  15454. {
  15455. /**
  15456. * Constructor
  15457. * @param {string} [name] name of the node
  15458. */
  15459. constructor(name = undefined)
  15460. {
  15461. super(name, 5, [
  15462. InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints),
  15463. InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints),
  15464. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15465. ]);
  15466. }
  15467. /**
  15468. * Run the specific task of this node
  15469. * @param {SpeedyGPU} gpu
  15470. * @returns {void|SpeedyPromise<void>}
  15471. */
  15472. _run(gpu)
  15473. {
  15474. const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in0').read() );
  15475. const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in1').read() );
  15476. const descriptorSize = kps0.descriptorSize;
  15477. const extraSize = kps0.extraSize;
  15478. const keypoints = gpu.programs.keypoints;
  15479. const tex = this._tex;
  15480. // ensure that the format of kps0 equals the format of kps1
  15481. if(!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize))
  15482. throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
  15483. // find the capacity of kps0 + kps1
  15484. const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
  15485. const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
  15486. const capacity = cap0 + cap1;
  15487. // find the dimensions of the output texture
  15488. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  15489. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15490. // prepare programs
  15491. keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
  15492. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15493. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  15494. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  15495. // mix keypoints
  15496. let mixedKeypoints = keypoints.mixKeypointsPreInit(
  15497. kps0.encodedKeypoints, kps1.encodedKeypoints,
  15498. kps0.encoderLength, kps1.encoderLength,
  15499. cap0, cap1,
  15500. descriptorSize,
  15501. extraSize,
  15502. encoderLength
  15503. );
  15504. let sortedKeypoints = keypoints.mixKeypointsInit(
  15505. mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  15506. );
  15507. for(let b = 1; b < capacity; b *= 2)
  15508. sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  15509. mixedKeypoints = keypoints.mixKeypointsApply(
  15510. sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength
  15511. );
  15512. /*
  15513. // debug: view keypoints
  15514. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15515. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  15516. */
  15517. this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
  15518. }
  15519. }
  15520. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
  15521. /*
  15522. * speedy-vision.js
  15523. * GPU-accelerated Computer Vision for JavaScript
  15524. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15525. *
  15526. * Licensed under the Apache License, Version 2.0 (the "License");
  15527. * you may not use this file except in compliance with the License.
  15528. * You may obtain a copy of the License at
  15529. *
  15530. * http://www.apache.org/licenses/LICENSE-2.0
  15531. *
  15532. * Unless required by applicable law or agreed to in writing, software
  15533. * distributed under the License is distributed on an "AS IS" BASIS,
  15534. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15535. * See the License for the specific language governing permissions and
  15536. * limitations under the License.
  15537. *
  15538. * shuffler.js
  15539. * Keypoint Shuffler
  15540. */
  15541. /**
  15542. * The Keypoint Shuffler shuffles a list of keypoints
  15543. */
  15544. class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode
  15545. {
  15546. /**
  15547. * Constructor
  15548. * @param {string} [name] name of the node
  15549. */
  15550. constructor(name = undefined)
  15551. {
  15552. super(name, 6, [
  15553. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15554. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15555. ]);
  15556. /** @type {number} maximum number of keypoints */
  15557. this._maxKeypoints = Number.NaN;
  15558. }
  15559. /**
  15560. * Maximum number of keypoints (optional)
  15561. * @returns {number}
  15562. */
  15563. get maxKeypoints()
  15564. {
  15565. return this._maxKeypoints;
  15566. }
  15567. /**
  15568. * Maximum number of keypoints (optional)
  15569. * @param {number} value
  15570. */
  15571. set maxKeypoints(value)
  15572. {
  15573. if(!Number.isNaN(value))
  15574. this._maxKeypoints = Math.max(0, value | 0);
  15575. else
  15576. this._maxKeypoints = Number.NaN;
  15577. }
  15578. /**
  15579. * Run the specific task of this node
  15580. * @param {SpeedyGPU} gpu
  15581. * @returns {void|SpeedyPromise<void>}
  15582. */
  15583. _run(gpu)
  15584. {
  15585. let { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15586. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15587. const maxKeypoints = this._maxKeypoints;
  15588. // shuffle the keypoints (including nulls)
  15589. const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
  15590. const permutationLength = Math.min(permutationMaxLength, capacity);
  15591. const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
  15592. encodedKeypoints = (gpu.programs.keypoints.shuffle
  15593. .setUBO('Permutation', permutation)
  15594. .outputs(encoderLength, encoderLength, this._tex[0])
  15595. )(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15596. // sort the keypoints
  15597. gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
  15598. gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
  15599. gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
  15600. let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(
  15601. encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  15602. );
  15603. for(let b = 1; b < capacity; b *= 2)
  15604. sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
  15605. encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(
  15606. sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength
  15607. );
  15608. // clip the output?
  15609. if(!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
  15610. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
  15611. encodedKeypoints = (gpu.programs.keypoints.clip
  15612. .outputs(newEncoderLength, newEncoderLength, this._tex[5])
  15613. )(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
  15614. encoderLength = newEncoderLength;
  15615. }
  15616. // done!
  15617. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15618. }
  15619. /**
  15620. * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
  15621. * @param {number} n positive integer
  15622. * @param {number} [bufsize] size of the output array
  15623. * @returns {Int32Array} permutation
  15624. */
  15625. _generatePermutation(n, bufsize = n)
  15626. {
  15627. const array = new Int32Array(bufsize);
  15628. const p = array.subarray(0, n).fill(-1);
  15629. const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
  15630. for(let i = 0, j = 0; i < n; i++) {
  15631. if(p[i] < 0) {
  15632. do { p[i] = q[j++]; } while(p[i] < i);
  15633. p[p[i]] = i;
  15634. }
  15635. }
  15636. return array; // padded with zeros
  15637. }
  15638. }
  15639. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
  15640. /*
  15641. * speedy-vision.js
  15642. * GPU-accelerated Computer Vision for JavaScript
  15643. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15644. *
  15645. * Licensed under the Apache License, Version 2.0 (the "License");
  15646. * you may not use this file except in compliance with the License.
  15647. * You may obtain a copy of the License at
  15648. *
  15649. * http://www.apache.org/licenses/LICENSE-2.0
  15650. *
  15651. * Unless required by applicable law or agreed to in writing, software
  15652. * distributed under the License is distributed on an "AS IS" BASIS,
  15653. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15654. * See the License for the specific language governing permissions and
  15655. * limitations under the License.
  15656. *
  15657. * multiplexer.js
  15658. * Keypoint multiplexer
  15659. */
  15660. /** @type {string[]} the names of the input ports indexed by their number */
  15661. const multiplexer_INPUT_PORT = [ 'in0', 'in1' ];
  15662. /**
  15663. * Keypoint multiplexer
  15664. */
  15665. class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode
  15666. {
  15667. /**
  15668. * Constructor
  15669. * @param {string} [name] name of the node
  15670. */
  15671. constructor(name = undefined)
  15672. {
  15673. super(name, 0, [
  15674. ...(multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints))),
  15675. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15676. ]);
  15677. /** @type {number} which port should be linked to the output? */
  15678. this._port = 0;
  15679. }
  15680. /**
  15681. * The number of the port that should be linked to the output
  15682. * @returns {number}
  15683. */
  15684. get port()
  15685. {
  15686. return this._port;
  15687. }
  15688. /**
  15689. * The number of the port that should be linked to the output
  15690. * @param {number} port
  15691. */
  15692. set port(port)
  15693. {
  15694. if(port < 0 || port >= multiplexer_INPUT_PORT.length)
  15695. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  15696. this._port = port | 0;
  15697. }
  15698. /**
  15699. * Run the specific task of this node
  15700. * @param {SpeedyGPU} gpu
  15701. * @returns {void|SpeedyPromise<void>}
  15702. */
  15703. _run(gpu)
  15704. {
  15705. const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
  15706. this.output().write(message);
  15707. }
  15708. }
  15709. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
  15710. /*
  15711. * speedy-vision.js
  15712. * GPU-accelerated Computer Vision for JavaScript
  15713. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15714. *
  15715. * Licensed under the Apache License, Version 2.0 (the "License");
  15716. * you may not use this file except in compliance with the License.
  15717. * You may obtain a copy of the License at
  15718. *
  15719. * http://www.apache.org/licenses/LICENSE-2.0
  15720. *
  15721. * Unless required by applicable law or agreed to in writing, software
  15722. * distributed under the License is distributed on an "AS IS" BASIS,
  15723. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15724. * See the License for the specific language governing permissions and
  15725. * limitations under the License.
  15726. *
  15727. * transformer.js
  15728. * Apply a transformation matrix to a set of keypoints
  15729. */
  15730. /**
  15731. * Apply a transformation matrix to a set of keypoints
  15732. */
  15733. class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode
  15734. {
  15735. /**
  15736. * Constructor
  15737. * @param {string} [name] name of the node
  15738. */
  15739. constructor(name = undefined)
  15740. {
  15741. super(name, 1, [
  15742. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15743. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15744. ]);
  15745. /** @type {SpeedyMatrix} transformation matrix */
  15746. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  15747. }
  15748. /**
  15749. * Transformation matrix
  15750. * @returns {SpeedyMatrix}
  15751. */
  15752. get transform()
  15753. {
  15754. return this._transform;
  15755. }
  15756. /**
  15757. * Transformation matrix. Must be 3x3
  15758. * @param {SpeedyMatrix} transform
  15759. */
  15760. set transform(transform)
  15761. {
  15762. if(!(transform.rows == 3 && transform.columns == 3))
  15763. throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  15764. this._transform = transform;
  15765. }
  15766. /**
  15767. * Run the specific task of this node
  15768. * @param {SpeedyGPU} gpu
  15769. * @returns {void|SpeedyPromise<void>}
  15770. */
  15771. _run(gpu)
  15772. {
  15773. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15774. const outputTexture = this._tex[0];
  15775. const homography = this._transform.read();
  15776. // apply homography
  15777. (gpu.programs.keypoints.applyHomography
  15778. .outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)
  15779. )(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15780. // done!
  15781. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15782. }
  15783. }
  15784. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
  15785. /*
  15786. * speedy-vision.js
  15787. * GPU-accelerated Computer Vision for JavaScript
  15788. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15789. *
  15790. * Licensed under the Apache License, Version 2.0 (the "License");
  15791. * you may not use this file except in compliance with the License.
  15792. * You may obtain a copy of the License at
  15793. *
  15794. * http://www.apache.org/licenses/LICENSE-2.0
  15795. *
  15796. * Unless required by applicable law or agreed to in writing, software
  15797. * distributed under the License is distributed on an "AS IS" BASIS,
  15798. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15799. * See the License for the specific language governing permissions and
  15800. * limitations under the License.
  15801. *
  15802. * subpixel.js
  15803. * Subpixel refinement of keypoint location
  15804. */
  15805. /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
  15806. /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
  15807. const METHOD2PROGRAM = Object.freeze({
  15808. 'quadratic1d': 'subpixelQuadratic1d',
  15809. 'taylor2d': 'subpixelTaylor2d',
  15810. 'bicubic-upsample': 'subpixelBicubic',
  15811. 'bilinear-upsample': 'subpixelBilinear',
  15812. });
  15813. /**
  15814. * Subpixel refinement of keypoint location
  15815. */
  15816. class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode
  15817. {
  15818. /**
  15819. * Constructor
  15820. * @param {string} [name] name of the node
  15821. */
  15822. constructor(name = undefined)
  15823. {
  15824. super(name, 2, [
  15825. InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(
  15826. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15827. msg.format === types/* ImageFormat */.f5.GREY
  15828. ),
  15829. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints),
  15830. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15831. OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2),
  15832. ]);
  15833. /** @type {SubpixelRefinementMethod} subpixel refinement method */
  15834. this._method = 'quadratic1d';
  15835. /** @type {number} max iterations for the upsampling methods */
  15836. this._maxIterations = 6;
  15837. /** @type {number} convergence threshold for the upsampling methods */
  15838. this._epsilon = 0.1;
  15839. }
  15840. /**
  15841. * Subpixel refinement method
  15842. * @returns {SubpixelRefinementMethod}
  15843. */
  15844. get method()
  15845. {
  15846. return this._method;
  15847. }
  15848. /**
  15849. * Subpixel refinement method
  15850. * @param {SubpixelRefinementMethod} name
  15851. */
  15852. set method(name)
  15853. {
  15854. if(!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name))
  15855. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
  15856. this._method = name;
  15857. }
  15858. /**
  15859. * Max. iterations for the upsampling methods
  15860. * @returns {number}
  15861. */
  15862. get maxIterations()
  15863. {
  15864. return this._maxIterations;
  15865. }
  15866. /**
  15867. * Max. iterations for the upsampling methods
  15868. * @param {number} value
  15869. */
  15870. set maxIterations(value)
  15871. {
  15872. this._maxIterations = Math.max(0, +value);
  15873. }
  15874. /**
  15875. * Convergence threshold for the upsampling methods
  15876. * @returns {number}
  15877. */
  15878. get epsilon()
  15879. {
  15880. return this._epsilon;
  15881. }
  15882. /**
  15883. * Convergence threshold for the upsampling methods
  15884. * @param {number} value
  15885. */
  15886. set epsilon(value)
  15887. {
  15888. this._epsilon = Math.max(0, +value);
  15889. }
  15890. /**
  15891. * Run the specific task of this node
  15892. * @param {SpeedyGPU} gpu
  15893. * @returns {void|SpeedyPromise<void>}
  15894. */
  15895. _run(gpu)
  15896. {
  15897. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  15898. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('image').read() );
  15899. const tex = this._tex;
  15900. const program = METHOD2PROGRAM[this._method];
  15901. const maxIterations = this._maxIterations;
  15902. const epsilon = this._epsilon;
  15903. // note: if you detected the keypoints using a pyramid,
  15904. // you need to pass that pyramid as input!
  15905. // we'll compute the offsets for each keypoint
  15906. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15907. const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
  15908. const offsets = (gpu.programs.keypoints[program]
  15909. .outputs(offsetEncoderLength, offsetEncoderLength, tex[0])
  15910. )(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
  15911. // apply the offsets to the keypoints
  15912. const refinedKeypoints = (gpu.programs.keypoints.transferFlow
  15913. .outputs(encoderLength, encoderLength, tex[1])
  15914. )(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15915. // done!
  15916. this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
  15917. this.output('displacements').swrite(offsets);
  15918. }
  15919. }
  15920. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
  15921. /*
  15922. * speedy-vision.js
  15923. * GPU-accelerated Computer Vision for JavaScript
  15924. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15925. *
  15926. * Licensed under the Apache License, Version 2.0 (the "License");
  15927. * you may not use this file except in compliance with the License.
  15928. * You may obtain a copy of the License at
  15929. *
  15930. * http://www.apache.org/licenses/LICENSE-2.0
  15931. *
  15932. * Unless required by applicable law or agreed to in writing, software
  15933. * distributed under the License is distributed on an "AS IS" BASIS,
  15934. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15935. * See the License for the specific language governing permissions and
  15936. * limitations under the License.
  15937. *
  15938. * fast.js
  15939. * FAST corner detector
  15940. */
  15941. // Constants
  15942. const DEFAULT_THRESHOLD = 20;
  15943. /**
  15944. * FAST corner detector
  15945. */
  15946. class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector
  15947. {
  15948. /**
  15949. * Constructor
  15950. * @param {string} [name] name of the node
  15951. */
  15952. constructor(name = undefined)
  15953. {
  15954. super(name, 5, [
  15955. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  15956. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15957. msg.format === types/* ImageFormat */.f5.GREY
  15958. ),
  15959. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15960. ]);
  15961. /** @type {number} FAST threshold in [0,255] */
  15962. this._threshold = DEFAULT_THRESHOLD;
  15963. }
  15964. /**
  15965. * FAST threshold in [0,255]
  15966. * @returns {number}
  15967. */
  15968. get threshold()
  15969. {
  15970. return this._threshold;
  15971. }
  15972. /**
  15973. * FAST threshold in [0,255]
  15974. * @param {number} threshold
  15975. */
  15976. set threshold(threshold)
  15977. {
  15978. this._threshold = Math.max(0, Math.min(threshold | 0, 255));
  15979. }
  15980. /**
  15981. * Run the specific task of this node
  15982. * @param {SpeedyGPU} gpu
  15983. * @returns {void|SpeedyPromise<void>}
  15984. */
  15985. _run(gpu)
  15986. {
  15987. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  15988. const width = image.width, height = image.height;
  15989. const tex = this._tex;
  15990. const capacity = this._capacity;
  15991. const threshold = this._threshold;
  15992. const lodStep = Math.log2(this.scaleFactor);
  15993. const levels = this.levels;
  15994. // validate pyramid
  15995. if(!(levels == 1 || image.hasMipmaps()))
  15996. throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  15997. // skip if the capacity is zero
  15998. if(capacity == 0) {
  15999. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
  16000. const encoderLength = encodedKeypoints.width;
  16001. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  16002. return;
  16003. }
  16004. // FAST
  16005. gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
  16006. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
  16007. let corners = tex[1].clear();
  16008. let numPasses = Math.max(1, Math.min(levels, (globals.PYRAMID_MAX_LEVELS / lodStep) | 0));
  16009. for(let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  16010. corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
  16011. //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  16012. }
  16013. // Same-scale non-maximum suppression
  16014. // *nicer results inside the loop; faster outside
  16015. // Hard to notice a difference when using FAST
  16016. corners = gpu.programs.keypoints.nonmaxSpace(corners);
  16017. // Multi-scale non-maximum suppression
  16018. // (doesn't seem to remove many keypoints)
  16019. if(levels > 1) {
  16020. corners = (gpu.programs.keypoints.nonmaxScaleSimple
  16021. .outputs(width, height, tex[1])
  16022. )(corners, image, lodStep);
  16023. }
  16024. // encode keypoints
  16025. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
  16026. const encoderLength = encodedKeypoints.width;
  16027. // scale refinement
  16028. if(levels > 1) {
  16029. encodedKeypoints = (gpu.programs.keypoints.refineScaleFAST916
  16030. .outputs(encoderLength, encoderLength, tex[4])
  16031. )(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
  16032. }
  16033. // done!
  16034. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  16035. }
  16036. }
  16037. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
  16038. /*
  16039. * speedy-vision.js
  16040. * GPU-accelerated Computer Vision for JavaScript
  16041. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16042. *
  16043. * Licensed under the Apache License, Version 2.0 (the "License");
  16044. * you may not use this file except in compliance with the License.
  16045. * You may obtain a copy of the License at
  16046. *
  16047. * http://www.apache.org/licenses/LICENSE-2.0
  16048. *
  16049. * Unless required by applicable law or agreed to in writing, software
  16050. * distributed under the License is distributed on an "AS IS" BASIS,
  16051. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16052. * See the License for the specific language governing permissions and
  16053. * limitations under the License.
  16054. *
  16055. * harris.js
  16056. * Harris corner detector
  16057. */
  16058. /** Window size helper */
  16059. const HARRIS = Object.freeze({
  16060. 1: 'harris1',
  16061. 3: 'harris3',
  16062. 5: 'harris5',
  16063. 7: 'harris7',
  16064. });
  16065. /**
  16066. * Harris corner detector
  16067. */
  16068. class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector
  16069. {
  16070. /**
  16071. * Constructor
  16072. * @param {string} [name] name of the node
  16073. */
  16074. constructor(name = undefined)
  16075. {
  16076. super(name, 6, [
  16077. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  16078. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16079. msg.format === types/* ImageFormat */.f5.GREY
  16080. ),
  16081. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16082. ]);
  16083. /** @type {SpeedySize} neighborhood size */
  16084. this._windowSize = new SpeedySize(3, 3);
  16085. /** @type {number} min corner quality in [0,1] */
  16086. this._quality = 0.1;
  16087. }
  16088. /**
  16089. * Minimum corner quality in [0,1] - this is a fraction of
  16090. * the largest min. eigenvalue of the autocorrelation matrix
  16091. * over the entire image
  16092. * @returns {number}
  16093. */
  16094. get quality()
  16095. {
  16096. return this._quality;
  16097. }
  16098. /**
  16099. * Minimum corner quality in [0,1]
  16100. * @param {number} quality
  16101. */
  16102. set quality(quality)
  16103. {
  16104. this._quality = Math.max(0.0, Math.min(+quality, 1.0));
  16105. }
  16106. /**
  16107. * Neighborhood size
  16108. * @returns {SpeedySize}
  16109. */
  16110. get windowSize()
  16111. {
  16112. return this._windowSize;
  16113. }
  16114. /**
  16115. * Neighborhood size
  16116. * @param {SpeedySize} windowSize
  16117. */
  16118. set windowSize(windowSize)
  16119. {
  16120. const d = windowSize.width;
  16121. if(!((d == windowSize.height) && (d == 1 || d == 3 || d == 5 || d == 7)))
  16122. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
  16123. this._windowSize = windowSize;
  16124. }
  16125. /**
  16126. * Run the specific task of this node
  16127. * @param {SpeedyGPU} gpu
  16128. * @returns {void|SpeedyPromise<void>}
  16129. */
  16130. _run(gpu)
  16131. {
  16132. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  16133. const width = image.width, height = image.height;
  16134. const capacity = this._capacity;
  16135. const quality = this._quality;
  16136. const windowSize = this._windowSize.width;
  16137. const levels = this.levels;
  16138. const lodStep = Math.log2(this.scaleFactor);
  16139. const intFactor = levels > 1 ? this.scaleFactor : 1;
  16140. const harris = gpu.programs.keypoints[HARRIS[windowSize]];
  16141. const tex = this._tex;
  16142. // validate pyramid
  16143. if(!(levels == 1 || image.hasMipmaps()))
  16144. throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  16145. // skip if the capacity is zero
  16146. if(capacity == 0) {
  16147. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
  16148. const encoderLength = encodedKeypoints.width;
  16149. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  16150. return;
  16151. }
  16152. // compute corner response map
  16153. harris.outputs(width, height, tex[0], tex[1]);
  16154. gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
  16155. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
  16156. let corners = tex[1].clear();
  16157. let numPasses = Math.max(1, Math.min(levels, (globals.PYRAMID_MAX_LEVELS / lodStep) | 0));
  16158. for(let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  16159. const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
  16160. const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
  16161. corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
  16162. corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  16163. }
  16164. // Same-scale non-maximum suppression
  16165. // *performs better inside the loop
  16166. //corners = gpu.programs.keypoints.nonmaxSpace(corners);
  16167. // Multi-scale non-maximum suppression
  16168. // (doesn't seem to remove many keypoints)
  16169. if(levels > 1) {
  16170. const laplacian = (gpu.programs.keypoints.laplacian
  16171. .outputs(width, height, tex[0])
  16172. )(corners, image, lodStep, 0);
  16173. corners = (gpu.programs.keypoints.nonmaxScale
  16174. .outputs(width, height, tex[2])
  16175. )(corners, image, laplacian, lodStep);
  16176. }
  16177. // find the maximum corner response over the entire image
  16178. gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
  16179. numPasses = Math.ceil(Math.log2(Math.max(width, height)));
  16180. let maxScore = corners;
  16181. for(let j = 0; j < numPasses; j++)
  16182. maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
  16183. // discard corners below a quality level
  16184. corners = (gpu.programs.keypoints.harrisScoreCutoff
  16185. .outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])
  16186. )(corners, maxScore, quality);
  16187. // encode keypoints
  16188. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
  16189. const encoderLength = encodedKeypoints.width;
  16190. // scale refinement
  16191. if(levels > 1) {
  16192. encodedKeypoints = (gpu.programs.keypoints.refineScaleLoG
  16193. .outputs(encoderLength, encoderLength, tex[5])
  16194. )(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
  16195. }
  16196. // done!
  16197. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  16198. }
  16199. }
  16200. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
  16201. /*
  16202. * speedy-vision.js
  16203. * GPU-accelerated Computer Vision for JavaScript
  16204. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16205. *
  16206. * Licensed under the Apache License, Version 2.0 (the "License");
  16207. * you may not use this file except in compliance with the License.
  16208. * You may obtain a copy of the License at
  16209. *
  16210. * http://www.apache.org/licenses/LICENSE-2.0
  16211. *
  16212. * Unless required by applicable law or agreed to in writing, software
  16213. * distributed under the License is distributed on an "AS IS" BASIS,
  16214. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16215. * See the License for the specific language governing permissions and
  16216. * limitations under the License.
  16217. *
  16218. * descriptor.js
  16219. * Abstract keypoint descriptor
  16220. */
  16221. /**
  16222. * Abstract keypoint descriptor
  16223. * @abstract
  16224. */
  16225. class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode
  16226. {
  16227. /**
  16228. * Constructor
  16229. * @param {string} [name] name of the node
  16230. * @param {number} [texCount] number of work textures
  16231. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  16232. */
  16233. constructor(name = undefined, texCount = 0, portBuilders = undefined)
  16234. {
  16235. super(name, texCount + 1, portBuilders);
  16236. }
  16237. /**
  16238. *
  16239. * Allocate space for keypoint descriptors
  16240. * @param {SpeedyGPU} gpu
  16241. * @param {number} inputDescriptorSize should be 0
  16242. * @param {number} inputExtraSize must be non-negative
  16243. * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
  16244. * @param {number} outputExtraSize must be inputExtraSize
  16245. * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
  16246. * @returns {SpeedyDrawableTexture} encodedKeypoints
  16247. */
  16248. _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints)
  16249. {
  16250. utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
  16251. utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
  16252. const inputEncoderLength = inputEncodedKeypoints.width;
  16253. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  16254. const outputEncoderCapacity = inputEncoderCapacity;
  16255. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  16256. const tex = this._tex[this._tex.length - 1];
  16257. return (gpu.programs.keypoints.allocateDescriptors
  16258. .outputs(outputEncoderLength, outputEncoderLength, tex)
  16259. )(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  16260. }
  16261. }
  16262. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
  16263. /*
  16264. * speedy-vision.js
  16265. * GPU-accelerated Computer Vision for JavaScript
  16266. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16267. *
  16268. * Licensed under the Apache License, Version 2.0 (the "License");
  16269. * you may not use this file except in compliance with the License.
  16270. * You may obtain a copy of the License at
  16271. *
  16272. * http://www.apache.org/licenses/LICENSE-2.0
  16273. *
  16274. * Unless required by applicable law or agreed to in writing, software
  16275. * distributed under the License is distributed on an "AS IS" BASIS,
  16276. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16277. * See the License for the specific language governing permissions and
  16278. * limitations under the License.
  16279. *
  16280. * orb.js
  16281. * ORB descriptors
  16282. */
  16283. // Constants
  16284. const DESCRIPTOR_SIZE = 32; // 256 bits
  16285. /**
  16286. * ORB descriptors
  16287. */
  16288. class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor
  16289. {
  16290. /**
  16291. * Constructor
  16292. * @param {string} [name] name of the node
  16293. */
  16294. constructor(name = undefined)
  16295. {
  16296. super(name, 3, [
  16297. InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(
  16298. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16299. msg.format === types/* ImageFormat */.f5.GREY
  16300. ),
  16301. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints),
  16302. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16303. ]);
  16304. }
  16305. /**
  16306. * Run the specific task of this node
  16307. * @param {SpeedyGPU} gpu
  16308. * @returns {void|SpeedyPromise<void>}
  16309. */
  16310. _run(gpu)
  16311. {
  16312. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  16313. const image = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('image').read() ) ).image;
  16314. const tex = this._tex;
  16315. const outputTexture = this._tex[2];
  16316. // compute orientation
  16317. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16318. const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
  16319. const encodedOrientations = (gpu.programs.keypoints.orbOrientation
  16320. .outputs(orientationEncoderLength, orientationEncoderLength, tex[0])
  16321. )(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  16322. const orientedKeypoints = (gpu.programs.keypoints.transferOrientation
  16323. .outputs(encoderLength, encoderLength, tex[1])
  16324. )(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  16325. // allocate space
  16326. const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
  16327. const newEncoderLength = encodedKps.width;
  16328. // compute descriptors (it's a good idea to blur the image)
  16329. const describedKeypoints = (gpu.programs.keypoints.orbDescriptor
  16330. .outputs(newEncoderLength, newEncoderLength, outputTexture)
  16331. )(image, encodedKps, extraSize, newEncoderLength);
  16332. // done!
  16333. this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
  16334. }
  16335. }
  16336. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
  16337. /*
  16338. * speedy-vision.js
  16339. * GPU-accelerated Computer Vision for JavaScript
  16340. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16341. *
  16342. * Licensed under the Apache License, Version 2.0 (the "License");
  16343. * you may not use this file except in compliance with the License.
  16344. * You may obtain a copy of the License at
  16345. *
  16346. * http://www.apache.org/licenses/LICENSE-2.0
  16347. *
  16348. * Unless required by applicable law or agreed to in writing, software
  16349. * distributed under the License is distributed on an "AS IS" BASIS,
  16350. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16351. * See the License for the specific language governing permissions and
  16352. * limitations under the License.
  16353. *
  16354. * lk.js
  16355. * LK optical-flow
  16356. */
  16357. // Constants
  16358. const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
  16359. const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
  16360. const DEFAULT_NUMBER_OF_ITERATIONS = 30;
  16361. const DEFAULT_DISCARD_THRESHOLD = 0.0001;
  16362. const DEFAULT_EPSILON = 0.01;
  16363. const LK_PROGRAM = {
  16364. 3: 'lk3',
  16365. 5: 'lk5',
  16366. 7: 'lk7',
  16367. 9: 'lk9',
  16368. 11: 'lk11',
  16369. 13: 'lk13',
  16370. 15: 'lk15',
  16371. 17: 'lk17',
  16372. 19: 'lk19',
  16373. 21: 'lk21',
  16374. };
  16375. /**
  16376. * LK optical-flow
  16377. */
  16378. class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode
  16379. {
  16380. /**
  16381. * Constructor
  16382. * @param {string} [name] name of the node
  16383. */
  16384. constructor(name = undefined)
  16385. {
  16386. super(name, 3, [
  16387. InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(
  16388. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16389. msg.format === types/* ImageFormat */.f5.GREY
  16390. ),
  16391. InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(
  16392. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16393. msg.format === types/* ImageFormat */.f5.GREY
  16394. ),
  16395. InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints),
  16396. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16397. OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2),
  16398. ]);
  16399. /** @type {SpeedySize} window size */
  16400. this._windowSize = DEFAULT_WINDOW_SIZE;
  16401. /** @type {number} number of pyramid levels to use */
  16402. this._levels = DEFAULT_DEPTH;
  16403. /** @type {number} minimum acceptable corner response */
  16404. this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
  16405. /** @type {number} number of iterations per pyramid level (termination criteria) */
  16406. this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
  16407. /** @type {number} minimum increment per iteration (termination criteria) */
  16408. this._epsilon = DEFAULT_EPSILON;
  16409. }
  16410. /**
  16411. * Window size (use odd numbers)
  16412. * @returns {SpeedySize}
  16413. */
  16414. get windowSize()
  16415. {
  16416. return this._windowSize;
  16417. }
  16418. /**
  16419. * Window size (use odd numbers)
  16420. * @param {SpeedySize} windowSize must be a square window
  16421. */
  16422. set windowSize(windowSize)
  16423. {
  16424. if(windowSize.width != windowSize.height) {
  16425. throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
  16426. }
  16427. else if(!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
  16428. const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a,b) => a-b).map(k => k+'x'+k).join(', ');
  16429. throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
  16430. }
  16431. this._windowSize = windowSize;
  16432. }
  16433. /**
  16434. * Number of pyramid levels to use
  16435. * @returns {number}
  16436. */
  16437. get levels()
  16438. {
  16439. return this._levels;
  16440. }
  16441. /**
  16442. * Number of pyramid levels to use
  16443. * @param {number} levels
  16444. */
  16445. set levels(levels)
  16446. {
  16447. utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
  16448. this._levels = levels | 0;
  16449. }
  16450. /**
  16451. * Get the discard threshold, used to discard "bad" keypoints
  16452. * @returns {number}
  16453. */
  16454. get discardThreshold()
  16455. {
  16456. return this._discardThreshold;
  16457. }
  16458. /**
  16459. * Set the discard threshold, used to discard "bad" keypoints
  16460. * @param {number} value typically 10^(-4) - increase to discard more
  16461. */
  16462. set discardThreshold(value)
  16463. {
  16464. utils/* Utils */.A.assert(value >= 0);
  16465. this._discardThreshold = +value;
  16466. }
  16467. /**
  16468. * Get the maximum number of iterations of the pyramidal LK algorithm
  16469. * @returns {number}
  16470. */
  16471. get numberOfIterations()
  16472. {
  16473. return this._numberOfIterations;
  16474. }
  16475. /**
  16476. * Set the maximum number of iterations of the pyramidal LK algorithm
  16477. * @param {number} value
  16478. */
  16479. set numberOfIterations(value)
  16480. {
  16481. utils/* Utils */.A.assert(value >= 1);
  16482. this._numberOfIterations = value | 0;
  16483. }
  16484. /**
  16485. * Get the accuracy threshold, used to stop LK iterations
  16486. * @returns {number}
  16487. */
  16488. get epsilon()
  16489. {
  16490. return this._epsilon;
  16491. }
  16492. /**
  16493. * Get the accuracy threshold, used to stop LK iterations
  16494. * @param {number} value typically 0.01
  16495. */
  16496. set epsilon(value)
  16497. {
  16498. utils/* Utils */.A.assert(value >= 0);
  16499. this._epsilon = +value;
  16500. }
  16501. /**
  16502. * Run the specific task of this node
  16503. * @param {SpeedyGPU} gpu
  16504. * @returns {void|SpeedyPromise<void>}
  16505. */
  16506. _run(gpu)
  16507. {
  16508. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('previousKeypoints').read() );
  16509. const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('previousImage').read() )).image;
  16510. const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('nextImage').read() )).image;
  16511. const previousKeypoints = encodedKeypoints;
  16512. const levels = this._levels;
  16513. const windowSize = this._windowSize;
  16514. const wsize = windowSize.width; // square window
  16515. const numberOfIterations = this._numberOfIterations;
  16516. const discardThreshold = this._discardThreshold;
  16517. const epsilon = this._epsilon;
  16518. const keypoints = gpu.programs.keypoints;
  16519. const tex = this._tex;
  16520. // do we need a pyramid?
  16521. if(!(levels == 1 || (previousImage.hasMipmaps() && nextImage.hasMipmaps())))
  16522. throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);
  16523. else if(previousImage.width !== nextImage.width || previousImage.height !== nextImage.height)
  16524. throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
  16525. // select the appropriate program
  16526. const lk = keypoints[LK_PROGRAM[wsize]];
  16527. // find the dimensions of the flow texture (1 pixel per flow vector)
  16528. const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16529. const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
  16530. lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
  16531. // compute optical-flow
  16532. let flow = lk.clear();
  16533. for(let lod = levels - 1; lod >= 0; lod--)
  16534. flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
  16535. // transfer optical-flow to nextKeypoints
  16536. keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
  16537. const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
  16538. // done!
  16539. this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
  16540. this.output('flow').swrite(flow);
  16541. }
  16542. }
  16543. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
  16544. /*
  16545. * speedy-vision.js
  16546. * GPU-accelerated Computer Vision for JavaScript
  16547. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16548. *
  16549. * Licensed under the Apache License, Version 2.0 (the "License");
  16550. * you may not use this file except in compliance with the License.
  16551. * You may obtain a copy of the License at
  16552. *
  16553. * http://www.apache.org/licenses/LICENSE-2.0
  16554. *
  16555. * Unless required by applicable law or agreed to in writing, software
  16556. * distributed under the License is distributed on an "AS IS" BASIS,
  16557. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16558. * See the License for the specific language governing permissions and
  16559. * limitations under the License.
  16560. *
  16561. * lsh-static-tables.js
  16562. * Static LSH tables
  16563. */
  16564. /**
  16565. * Static LSH tables
  16566. */
  16567. class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode
  16568. {
  16569. /**
  16570. * Constructor
  16571. * @param {string} [name] name of the node
  16572. */
  16573. constructor(name = undefined)
  16574. {
  16575. super(name, 2, [
  16576. OutputPort().expects(SpeedyPipelineMessageType.LSHTables)
  16577. ]);
  16578. /** @type {SpeedyKeypoint[]} "training" keypoints */
  16579. this._keypoints = [];
  16580. /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
  16581. this._keypointsCopy = [];
  16582. /** @type {number} number of tables in the LSH data structure */
  16583. this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
  16584. /** @type {number} number of bits of a hash */
  16585. this._hashSize = LSH_DEFAULT_HASH_SIZE;
  16586. /** @type {SpeedyLSH|null} LSH data structure */
  16587. this._lsh = null;
  16588. }
  16589. /**
  16590. * "Training" keypoints
  16591. * @returns {SpeedyKeypoint[]}
  16592. */
  16593. get keypoints()
  16594. {
  16595. return this._keypoints;
  16596. }
  16597. /**
  16598. * "Training" keypoints
  16599. * @param {SpeedyKeypoint[]} keypoints
  16600. */
  16601. set keypoints(keypoints)
  16602. {
  16603. if(!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint)))
  16604. throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
  16605. if(this._keypoints !== keypoints) {
  16606. this._keypoints = keypoints; // update internal pointer
  16607. this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
  16608. this._lsh = null; // (re)train the model
  16609. }
  16610. }
  16611. /**
  16612. * Number of tables in the LSH data structure
  16613. * @returns {number}
  16614. */
  16615. get numberOfTables()
  16616. {
  16617. return this._numberOfTables;
  16618. }
  16619. /**
  16620. * Number of tables in the LSH data structure
  16621. * @param {number} n
  16622. */
  16623. set numberOfTables(n)
  16624. {
  16625. if(!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n))
  16626. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
  16627. if(n !== this._numberOfTables) {
  16628. this._numberOfTables = n | 0;
  16629. this._lsh = null; // need to retrain the model
  16630. }
  16631. }
  16632. /**
  16633. * Number of bits of a hash
  16634. * @returns {number}
  16635. */
  16636. get hashSize()
  16637. {
  16638. return this._hashSize;
  16639. }
  16640. /**
  16641. * Number of bits of a hash
  16642. * @param {number} h
  16643. */
  16644. set hashSize(h)
  16645. {
  16646. if(!LSH_ACCEPTABLE_HASH_SIZES.includes(h))
  16647. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
  16648. if(h !== this._hashSize) {
  16649. this._hashSize = h | 0;
  16650. this._lsh = null; // need to retrain the model
  16651. }
  16652. }
  16653. /**
  16654. * Run the specific task of this node
  16655. * @param {SpeedyGPU} gpu
  16656. * @returns {void|SpeedyPromise<void>}
  16657. */
  16658. _run(gpu)
  16659. {
  16660. // Need to train the model?
  16661. if(this._lsh == null) {
  16662. // internal work textures are only available after initialization,
  16663. // i.e., after calling this._init()
  16664. this._lsh = this._train();
  16665. }
  16666. // Pass it forward
  16667. this.output().swrite(this._lsh);
  16668. }
  16669. /**
  16670. * Train the model
  16671. * @returns {SpeedyLSH}
  16672. */
  16673. _train()
  16674. {
  16675. const keypoints = this._keypointsCopy;
  16676. const numberOfTables = this._numberOfTables;
  16677. const hashSize = this._hashSize;
  16678. if(keypoints.find(keypoint => keypoint.descriptor == null))
  16679. throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
  16680. const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
  16681. const lshTables = this._tex[0];
  16682. const descriptorDB = this._tex[1];
  16683. return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
  16684. }
  16685. }
  16686. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
  16687. /*
  16688. * speedy-vision.js
  16689. * GPU-accelerated Computer Vision for JavaScript
  16690. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16691. *
  16692. * Licensed under the Apache License, Version 2.0 (the "License");
  16693. * you may not use this file except in compliance with the License.
  16694. * You may obtain a copy of the License at
  16695. *
  16696. * http://www.apache.org/licenses/LICENSE-2.0
  16697. *
  16698. * Unless required by applicable law or agreed to in writing, software
  16699. * distributed under the License is distributed on an "AS IS" BASIS,
  16700. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16701. * See the License for the specific language governing permissions and
  16702. * limitations under the License.
  16703. *
  16704. * lsh-knn.js
  16705. * K approximate nearest neighbors matcher
  16706. */
  16707. /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
  16708. /** @type {number} how many neighbors to search for, by default */
  16709. const DEFAULT_K = 1;
  16710. /** @type {LSHKNNQualityLevel} default quality level */
  16711. const DEFAULT_QUALITY = 'default';
  16712. /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
  16713. const NUMBER_OF_BIT_SWAPS = {
  16714. 'fastest': 0,
  16715. 'default': 1,
  16716. 'demanding': 2,
  16717. };
  16718. /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
  16719. const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o,d) => ((o[d] = fd(d)), o), {}))(
  16720. d => ((fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o,h) => ((o[h] = fh(h)), o), {}))(
  16721. h => ((fl => [0,1,2].reduce((o,l) => ((o[l] = fl(l)), o), {}))(
  16722. l => `lshKnn${d}h${h}lv${l}`
  16723. ))
  16724. ))
  16725. );
  16726. /**
  16727. * K approximate nearest neighbors matcher
  16728. */
  16729. class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode
  16730. {
  16731. /**
  16732. * Constructor
  16733. * @param {string} [name] name of the node
  16734. */
  16735. constructor(name = undefined)
  16736. {
  16737. super(name, 6, [
  16738. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16739. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16740. msg.descriptorSize > 0
  16741. ),
  16742. InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables),
  16743. OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches),
  16744. ]);
  16745. /** @type {number} how many neighbors do you want? */
  16746. this._k = DEFAULT_K;
  16747. /** @type {LSHKNNQualityLevel} quality of the matching */
  16748. this._quality = DEFAULT_QUALITY;
  16749. }
  16750. /**
  16751. * How many neighbors do you want?
  16752. * @returns {number}
  16753. */
  16754. get k()
  16755. {
  16756. return this._k;
  16757. }
  16758. /**
  16759. * How many neighbors do you want?
  16760. * @param {number} k number of neighbors
  16761. */
  16762. set k(k)
  16763. {
  16764. this._k = Math.max(1, k | 0);
  16765. }
  16766. /**
  16767. * Quality of the matching
  16768. * @returns {LSHKNNQualityLevel}
  16769. */
  16770. get quality()
  16771. {
  16772. return this._quality;
  16773. }
  16774. /**
  16775. * Quality of the matching
  16776. * @param {LSHKNNQualityLevel} quality
  16777. */
  16778. set quality(quality)
  16779. {
  16780. if(!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality))
  16781. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
  16782. this._quality = quality;
  16783. }
  16784. /**
  16785. * Run the specific task of this node
  16786. * @param {SpeedyGPU} gpu
  16787. * @returns {void|SpeedyPromise<void>}
  16788. */
  16789. _run(gpu)
  16790. {
  16791. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  16792. /** @type {SpeedyLSH} */ const lsh = this.input('lsh').read().lsh;
  16793. const keypoints = gpu.programs.keypoints;
  16794. const tables = lsh.tables;
  16795. const descriptorDB = lsh.descriptorDB;
  16796. const tablesStride = tables.width;
  16797. const descriptorDBStride = descriptorDB.width;
  16798. const tableCount = lsh.tableCount;
  16799. const hashSize = lsh.hashSize;
  16800. const bucketCapacity = lsh.bucketCapacity;
  16801. const bucketsPerTable = lsh.bucketsPerTable;
  16802. const sequences = lsh.sequences;
  16803. const candidatesA = this._tex[0];
  16804. const candidatesB = this._tex[1];
  16805. const candidatesC = this._tex[2];
  16806. const filters = this._tex[3];
  16807. const transferA = this._tex[4];
  16808. const transferB = this._tex[5];
  16809. const level = NUMBER_OF_BIT_SWAPS[this._quality];
  16810. const matchesPerKeypoint = this._k;
  16811. // validate parameters
  16812. if(descriptorSize !== lsh.descriptorSize)
  16813. throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
  16814. utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
  16815. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
  16816. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
  16817. // configure the output texture
  16818. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16819. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  16820. let encodedMatches = transferB;
  16821. keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
  16822. // prepare the LSH matching
  16823. const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  16824. keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
  16825. keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
  16826. const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
  16827. lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
  16828. lshKnn.setUBO('LSHSequences', sequences);
  16829. // match keypoints
  16830. encodedMatches.clear();
  16831. keypoints.lshKnnInitFilters();
  16832. for(let i = 0; i < matchesPerKeypoint; i++) {
  16833. // find the (i+1)-th best match
  16834. let candidates = keypoints.lshKnnInitCandidates();
  16835. for(let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
  16836. candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  16837. gpu.gl.flush();
  16838. }
  16839. candidates.copyTo(filters);
  16840. // transfer matches to an encoded matches texture
  16841. encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
  16842. }
  16843. // done
  16844. this.output().swrite(encodedMatches, matchesPerKeypoint);
  16845. /*
  16846. // debug
  16847. let data = filters.inspect32(gpu), debug = [];
  16848. for(let i = 0; i < data.length; i++) {
  16849. const bits = MATCH_INDEX_BITS;
  16850. const mask = (1 << bits) - 1;
  16851. const u32 = data[i];
  16852. const index = u32 & mask, distance = u32 >>> bits;
  16853. //debug.push('|'+[ u32 ].toString());
  16854. debug.push('|'+[ index, distance ].toString());
  16855. }
  16856. console.log(debug.join(','));
  16857. */
  16858. }
  16859. }
  16860. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
  16861. /*
  16862. * speedy-vision.js
  16863. * GPU-accelerated Computer Vision for JavaScript
  16864. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16865. *
  16866. * Licensed under the Apache License, Version 2.0 (the "License");
  16867. * you may not use this file except in compliance with the License.
  16868. * You may obtain a copy of the License at
  16869. *
  16870. * http://www.apache.org/licenses/LICENSE-2.0
  16871. *
  16872. * Unless required by applicable law or agreed to in writing, software
  16873. * distributed under the License is distributed on an "AS IS" BASIS,
  16874. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16875. * See the License for the specific language governing permissions and
  16876. * limitations under the License.
  16877. *
  16878. * bf-knn.js
  16879. * Brute Force KNN Keypoint Matcher
  16880. */
  16881. /** @type {Object<number,string>} program name indexed by descriptor size */
  16882. const PROGRAM_NAME = {
  16883. 32: 'bfMatcher32',
  16884. 64: 'bfMatcher64',
  16885. };
  16886. /**
  16887. * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
  16888. * invoking this (use a database of 50 keypoints or so - your mileage may vary)
  16889. */
  16890. class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode
  16891. {
  16892. /**
  16893. * Constructor
  16894. * @param {string} [name] name of the node
  16895. */
  16896. constructor(name = undefined)
  16897. {
  16898. super(name, 6, [
  16899. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16900. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16901. msg.descriptorSize > 0
  16902. ),
  16903. InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16904. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16905. msg.descriptorSize > 0
  16906. ),
  16907. OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches),
  16908. ]);
  16909. /** @type {number} number of matches per keypoint (the "k" of knn) */
  16910. this._matchesPerKeypoint = 1;
  16911. }
  16912. /**
  16913. * Number of matches per keypoint
  16914. * @returns {number}
  16915. */
  16916. get k()
  16917. {
  16918. return this._matchesPerKeypoint;
  16919. }
  16920. /**
  16921. * Number of matches per keypoint
  16922. * @param {number} value
  16923. */
  16924. set k(value)
  16925. {
  16926. this._matchesPerKeypoint = Math.max(1, value | 0);
  16927. }
  16928. /**
  16929. * Run the specific task of this node
  16930. * @param {SpeedyGPU} gpu
  16931. * @returns {void|SpeedyPromise<void>}
  16932. */
  16933. _run(gpu)
  16934. {
  16935. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  16936. const database = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('database').read() );
  16937. const candidatesA = this._tex[0];
  16938. const candidatesB = this._tex[1];
  16939. const candidatesC = this._tex[2];
  16940. const encodedFiltersA = this._tex[3];
  16941. const encodedMatchesA = this._tex[4];
  16942. const encodedMatchesB = this._tex[5];
  16943. const matchesPerKeypoint = this._matchesPerKeypoint;
  16944. const keypoints = gpu.programs.keypoints;
  16945. // validate parameters
  16946. if(descriptorSize !== database.descriptorSize)
  16947. throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);
  16948. else if(!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize))
  16949. throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
  16950. // prepare the brute force matching
  16951. const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
  16952. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16953. const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
  16954. const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
  16955. const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
  16956. const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  16957. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  16958. keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
  16959. keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
  16960. keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
  16961. bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
  16962. // match keypoints
  16963. let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
  16964. let encodedFilters = keypoints.bfMatcherInitFilters();
  16965. for(let k = 0; k < matchesPerKeypoint; k++) {
  16966. let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
  16967. // find the (k+1)-th best match
  16968. for(let passId = 0; passId < numberOfPasses; passId++) {
  16969. encodedPartialMatches = bfMatcher(
  16970. encodedPartialMatches, encodedFilters, partialMatcherLength,
  16971. database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength,
  16972. encodedKeypoints, descriptorSize, extraSize, encoderLength,
  16973. passId
  16974. );
  16975. gpu.gl.flush();
  16976. }
  16977. //gpu.gl.flush();
  16978. // copy the (k+1)-th best match to the filter
  16979. if(matchesPerKeypoint > 1)
  16980. encodedPartialMatches.copyTo(encodedFilters);
  16981. // aggregate matches
  16982. encodedMatches = keypoints.bfMatcherTransfer(
  16983. encodedMatches, encodedPartialMatches, matchesPerKeypoint, k
  16984. );
  16985. }
  16986. // done!
  16987. this.output().swrite(encodedMatches, matchesPerKeypoint);
  16988. }
  16989. }
  16990. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
  16991. /*
  16992. * speedy-vision.js
  16993. * GPU-accelerated Computer Vision for JavaScript
  16994. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16995. *
  16996. * Licensed under the Apache License, Version 2.0 (the "License");
  16997. * you may not use this file except in compliance with the License.
  16998. * You may obtain a copy of the License at
  16999. *
  17000. * http://www.apache.org/licenses/LICENSE-2.0
  17001. *
  17002. * Unless required by applicable law or agreed to in writing, software
  17003. * distributed under the License is distributed on an "AS IS" BASIS,
  17004. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17005. * See the License for the specific language governing permissions and
  17006. * limitations under the License.
  17007. *
  17008. * distance-filter.js
  17009. * Given a set of pairs of keypoints, discard all pairs whose distance is
  17010. * above a user-defined threshold. Useful for bidirectional optical-flow.
  17011. */
  17012. /**
  17013. * Given a set of pairs of keypoints, discard all pairs whose distance is
  17014. * above a user-defined threshold. Useful for bidirectional optical-flow.
  17015. *
  17016. * The pairs of keypoints are provided as two separate sets, "in" and
  17017. * "reference". Keypoints that are kept will have their data extracted
  17018. * from the "in" set.
  17019. */
  17020. class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode
  17021. {
  17022. /**
  17023. * Constructor
  17024. * @param {string} [name] name of the node
  17025. */
  17026. constructor(name = undefined)
  17027. {
  17028. super(name, 1, [
  17029. InputPort('in').expects(SpeedyPipelineMessageType.Keypoints),
  17030. InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints),
  17031. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  17032. ]);
  17033. /** @type {number} maximum accepted distance */
  17034. this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
  17035. }
  17036. /**
  17037. * Maximum accepted distance
  17038. * @returns {number}
  17039. */
  17040. get threshold()
  17041. {
  17042. return this._threshold;
  17043. }
  17044. /**
  17045. * Maximum accepted distance
  17046. * @param {number} value
  17047. */
  17048. set threshold(value)
  17049. {
  17050. this._threshold = Math.max(0, +value);
  17051. }
  17052. /**
  17053. * Run the specific task of this node
  17054. * @param {SpeedyGPU} gpu
  17055. * @returns {void|SpeedyPromise<void>}
  17056. */
  17057. _run(gpu)
  17058. {
  17059. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in').read() );
  17060. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('reference').read() );
  17061. const threshold = this._threshold;
  17062. // validate shapes
  17063. if(set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize)
  17064. throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
  17065. // calculate the shape of the output
  17066. const outputTexture = this._tex[0];
  17067. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  17068. const descriptorSize = set0.descriptorSize;
  17069. const extraSize = set0.extraSize;
  17070. // apply the distance filter
  17071. (gpu.programs.keypoints.distanceFilter
  17072. .outputs(encoderLength, encoderLength, outputTexture)
  17073. )(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  17074. // done!
  17075. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  17076. }
  17077. }
  17078. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
  17079. /*
  17080. * speedy-vision.js
  17081. * GPU-accelerated Computer Vision for JavaScript
  17082. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17083. *
  17084. * Licensed under the Apache License, Version 2.0 (the "License");
  17085. * you may not use this file except in compliance with the License.
  17086. * You may obtain a copy of the License at
  17087. *
  17088. * http://www.apache.org/licenses/LICENSE-2.0
  17089. *
  17090. * Unless required by applicable law or agreed to in writing, software
  17091. * distributed under the License is distributed on an "AS IS" BASIS,
  17092. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17093. * See the License for the specific language governing permissions and
  17094. * limitations under the License.
  17095. *
  17096. * hamming-distance-filter.js
  17097. * Given a set of pairs of keypoints, discard all pairs whose hamming
  17098. * distance (of descriptor) is above a user-defined threshold
  17099. */
  17100. /** @type {Object<number,string>} Program names */
  17101. const hamming_distance_filter_PROGRAM_NAME = {
  17102. 32: 'hammingDistanceFilter32',
  17103. 64: 'hammingDistanceFilter64',
  17104. };
  17105. /**
  17106. * Given a set of pairs of keypoints, discard all pairs whose hamming
  17107. * distance (of descriptor) is above a user-defined threshold
  17108. *
  17109. * The pairs of keypoints are provided as two separate sets, "in" and
  17110. * "reference". Keypoints that are kept will have their data extracted
  17111. * from the "in" set.
  17112. */
  17113. class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode
  17114. {
  17115. /**
  17116. * Constructor
  17117. * @param {string} [name] name of the node
  17118. */
  17119. constructor(name = undefined)
  17120. {
  17121. super(name, 1, [
  17122. InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  17123. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  17124. msg.descriptorSize > 0
  17125. ),
  17126. InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  17127. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  17128. msg.descriptorSize > 0
  17129. ),
  17130. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  17131. ]);
  17132. /** @type {number} distance threshold, an integer */
  17133. this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
  17134. }
  17135. /**
  17136. * Distance threshold, an integer
  17137. * @returns {number}
  17138. */
  17139. get threshold()
  17140. {
  17141. return this._threshold;
  17142. }
  17143. /**
  17144. * Distance threshold, an integer
  17145. * @param {number} value
  17146. */
  17147. set threshold(value)
  17148. {
  17149. this._threshold = Math.max(0, value | 0);
  17150. }
  17151. /**
  17152. * Run the specific task of this node
  17153. * @param {SpeedyGPU} gpu
  17154. * @returns {void|SpeedyPromise<void>}
  17155. */
  17156. _run(gpu)
  17157. {
  17158. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in').read() );
  17159. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('reference').read() );
  17160. const threshold = this._threshold;
  17161. // validate shapes
  17162. if(set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize)
  17163. throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
  17164. // validate descriptor size
  17165. if(!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize))
  17166. throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
  17167. // calculate the shape of the output
  17168. const outputTexture = this._tex[0];
  17169. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  17170. const descriptorSize = set0.descriptorSize;
  17171. const extraSize = set0.extraSize;
  17172. // apply the distance filter
  17173. const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
  17174. (gpu.programs.keypoints[program]
  17175. .outputs(encoderLength, encoderLength, outputTexture)
  17176. )(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  17177. // done!
  17178. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  17179. }
  17180. }
  17181. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
  17182. /*
  17183. * speedy-vision.js
  17184. * GPU-accelerated Computer Vision for JavaScript
  17185. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17186. *
  17187. * Licensed under the Apache License, Version 2.0 (the "License");
  17188. * you may not use this file except in compliance with the License.
  17189. * You may obtain a copy of the License at
  17190. *
  17191. * http://www.apache.org/licenses/LICENSE-2.0
  17192. *
  17193. * Unless required by applicable law or agreed to in writing, software
  17194. * distributed under the License is distributed on an "AS IS" BASIS,
  17195. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17196. * See the License for the specific language governing permissions and
  17197. * limitations under the License.
  17198. *
  17199. * portal.js
  17200. * Keypoint Portals
  17201. */
  17202. /**
  17203. * A sink of a Keypoint Portal
  17204. * This is not a pipeline sink - it doesn't export any data!
  17205. */
  17206. class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode
  17207. {
  17208. /**
  17209. * Constructor
  17210. * @param {string} [name] name of the node
  17211. */
  17212. constructor(name = undefined)
  17213. {
  17214. super(name, 1, [
  17215. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  17216. ]);
  17217. /** @type {number} descriptor size, in bytes */
  17218. this._descriptorSize = 0;
  17219. /** @type {number} extra size, in bytes */
  17220. this._extraSize = 0;
  17221. /** @type {number} extra size */
  17222. this._encoderLength = 0;
  17223. /** @type {boolean} is this node initialized? */
  17224. this._initialized = false;
  17225. }
  17226. /**
  17227. * Encoded keypoints
  17228. * @returns {SpeedyTexture}
  17229. */
  17230. get encodedKeypoints()
  17231. {
  17232. if(!this._initialized)
  17233. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  17234. return this._tex[0];
  17235. }
  17236. /**
  17237. * Descriptor size, in bytes
  17238. * @returns {number}
  17239. */
  17240. get descriptorSize()
  17241. {
  17242. if(!this._initialized)
  17243. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  17244. return this._descriptorSize;
  17245. }
  17246. /**
  17247. * Extra size, in bytes
  17248. * @returns {number}
  17249. */
  17250. get extraSize()
  17251. {
  17252. if(!this._initialized)
  17253. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  17254. return this._extraSize;
  17255. }
  17256. /**
  17257. * Encoder length
  17258. * @returns {number}
  17259. */
  17260. get encoderLength()
  17261. {
  17262. if(!this._initialized)
  17263. throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  17264. return this._encoderLength;
  17265. }
  17266. /**
  17267. * Initializes this node
  17268. * @param {SpeedyGPU} gpu
  17269. */
  17270. init(gpu)
  17271. {
  17272. super.init(gpu);
  17273. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
  17274. this._tex[0].resize(encoderLength, encoderLength).clearToColor(1,1,1,1); // initial texture
  17275. this._descriptorSize = this._extraSize = 0;
  17276. this._encoderLength = encoderLength;
  17277. this._initialized = true;
  17278. }
  17279. /**
  17280. * Releases this node
  17281. * @param {SpeedyGPU} gpu
  17282. */
  17283. release(gpu)
  17284. {
  17285. this._initialized = false;
  17286. super.release(gpu);
  17287. }
  17288. /**
  17289. * Run the specific task of this node
  17290. * @param {SpeedyGPU} gpu
  17291. * @returns {void|SpeedyPromise<void>}
  17292. */
  17293. _run(gpu)
  17294. {
  17295. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  17296. const tex = this._tex[0];
  17297. // copy input
  17298. tex.resize(encodedKeypoints.width, encodedKeypoints.height);
  17299. encodedKeypoints.copyTo(tex);
  17300. this._descriptorSize = descriptorSize;
  17301. this._extraSize = extraSize;
  17302. this._encoderLength = encoderLength;
  17303. }
  17304. }
  17305. /**
  17306. * A source of a Keypoint Portal
  17307. */
  17308. class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode
  17309. {
  17310. /**
  17311. * Constructor
  17312. * @param {string} [name] name of the node
  17313. */
  17314. constructor(name = undefined)
  17315. {
  17316. super(name, 0, [
  17317. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  17318. ]);
  17319. /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
  17320. this._source = null;
  17321. }
  17322. /**
  17323. * Data source
  17324. * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
  17325. */
  17326. get source()
  17327. {
  17328. return this._source;
  17329. }
  17330. /**
  17331. * Data source
  17332. * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
  17333. */
  17334. set source(node)
  17335. {
  17336. if(node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink))
  17337. throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  17338. this._source = node;
  17339. }
  17340. /**
  17341. * Run the specific task of this node
  17342. * @param {SpeedyGPU} gpu
  17343. * @returns {void|SpeedyPromise<void>}
  17344. */
  17345. _run(gpu)
  17346. {
  17347. if(this._source == null)
  17348. throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  17349. this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
  17350. }
  17351. }
  17352. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
  17353. /*
  17354. * speedy-vision.js
  17355. * GPU-accelerated Computer Vision for JavaScript
  17356. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17357. *
  17358. * Licensed under the Apache License, Version 2.0 (the "License");
  17359. * you may not use this file except in compliance with the License.
  17360. * You may obtain a copy of the License at
  17361. *
  17362. * http://www.apache.org/licenses/LICENSE-2.0
  17363. *
  17364. * Unless required by applicable law or agreed to in writing, software
  17365. * distributed under the License is distributed on an "AS IS" BASIS,
  17366. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17367. * See the License for the specific language governing permissions and
  17368. * limitations under the License.
  17369. *
  17370. * keypoint-factory.js
  17371. * Keypoint-related nodes
  17372. */
  17373. /**
  17374. * Keypoint detectors
  17375. */
  17376. class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17377. {
  17378. /**
  17379. * FAST corner detector
  17380. * @param {string} [name]
  17381. * @returns {SpeedyPipelineNodeFASTKeypointDetector}
  17382. */
  17383. static FAST(name = undefined)
  17384. {
  17385. return new SpeedyPipelineNodeFASTKeypointDetector(name);
  17386. }
  17387. /**
  17388. * Harris corner detector
  17389. * @param {string} [name]
  17390. * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
  17391. */
  17392. static Harris(name = undefined)
  17393. {
  17394. return new SpeedyPipelineNodeHarrisKeypointDetector(name);
  17395. }
  17396. }
  17397. /**
  17398. * Keypoint descriptors
  17399. */
  17400. class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17401. {
  17402. /**
  17403. * ORB descriptors
  17404. * @param {string} [name]
  17405. * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
  17406. */
  17407. static ORB(name = undefined)
  17408. {
  17409. return new SpeedyPipelineNodeORBKeypointDescriptor(name);
  17410. }
  17411. }
  17412. /**
  17413. * Keypoint trackers
  17414. */
  17415. class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17416. {
  17417. /**
  17418. * LK optical-flow
  17419. * @param {string} [name]
  17420. * @returns {SpeedyPipelineNodeLKKeypointTracker}
  17421. */
  17422. static LK(name = undefined)
  17423. {
  17424. return new SpeedyPipelineNodeLKKeypointTracker(name);
  17425. }
  17426. }
  17427. /**
  17428. * Keypoint matchers
  17429. */
  17430. class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17431. {
  17432. /**
  17433. * Static LSH tables
  17434. * @param {string} [name]
  17435. * @returns {SpeedyPipelineNodeStaticLSHTables}
  17436. */
  17437. static StaticLSHTables(name = undefined)
  17438. {
  17439. return new SpeedyPipelineNodeStaticLSHTables(name);
  17440. }
  17441. /**
  17442. * LSH-based K-approximate nearest neighbors
  17443. * @param {string} [name]
  17444. * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
  17445. */
  17446. static LSHKNN(name = undefined)
  17447. {
  17448. return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
  17449. }
  17450. /**
  17451. * Brute-force K-nearest neighbors keypoint matcher
  17452. * @param {string} [name]
  17453. * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
  17454. */
  17455. static BFKNN(name = undefined)
  17456. {
  17457. return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
  17458. }
  17459. }
  17460. /**
  17461. * Portal nodes
  17462. */
  17463. class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17464. {
  17465. /**
  17466. * Create an image portal source
  17467. * @param {string} [name] name of the node
  17468. * @returns {SpeedyPipelineNodeKeypointPortalSource}
  17469. */
  17470. static Source(name = undefined)
  17471. {
  17472. return new SpeedyPipelineNodeKeypointPortalSource(name);
  17473. }
  17474. /**
  17475. * Create an image portal sink
  17476. * @param {string} [name] name of the node
  17477. * @returns {SpeedyPipelineNodeKeypointPortalSink}
  17478. */
  17479. static Sink(name = undefined)
  17480. {
  17481. return new SpeedyPipelineNodeKeypointPortalSink(name);
  17482. }
  17483. }
  17484. /**
  17485. * Keypoint-related nodes
  17486. */
  17487. class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q
  17488. {
  17489. /**
  17490. * Keypoint detectors
  17491. * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
  17492. */
  17493. static get Detector()
  17494. {
  17495. return SpeedyPipelineKeypointDetectorFactory;
  17496. }
  17497. /**
  17498. * Keypoint descriptors
  17499. * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
  17500. */
  17501. static get Descriptor()
  17502. {
  17503. return SpeedyPipelineKeypointDescriptorFactory;
  17504. }
  17505. /**
  17506. * Keypoint trackers
  17507. * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
  17508. */
  17509. static get Tracker()
  17510. {
  17511. return SpeedyPipelineKeypointTrackerFactory;
  17512. }
  17513. /**
  17514. * Keypoint matchers
  17515. * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
  17516. */
  17517. static get Matcher()
  17518. {
  17519. return SpeedyPipelineKeypointMatcherFactory;
  17520. }
  17521. /**
  17522. * Keypoint Portals
  17523. * @returns {typeof SpeedyPipelineKeypointPortalFactory}
  17524. */
  17525. static get Portal()
  17526. {
  17527. return SpeedyPipelineKeypointPortalFactory;
  17528. }
  17529. /**
  17530. * Create a keypoint source
  17531. * @param {string} [name]
  17532. * @returns {SpeedyPipelineNodeKeypointSource}
  17533. */
  17534. static Source(name = undefined)
  17535. {
  17536. return new SpeedyPipelineNodeKeypointSource(name);
  17537. }
  17538. /**
  17539. * Create a keypoint sink
  17540. * @param {string} [name]
  17541. * @returns {SpeedyPipelineNodeKeypointSink}
  17542. */
  17543. static Sink(name = undefined)
  17544. {
  17545. return new SpeedyPipelineNodeKeypointSink(name);
  17546. }
  17547. /**
  17548. * Create a sink of tracked keypoints
  17549. * @param {string} [name]
  17550. * @returns {SpeedyPipelineNodeTrackedKeypointSink}
  17551. */
  17552. static SinkOfTrackedKeypoints(name = undefined)
  17553. {
  17554. return new SpeedyPipelineNodeTrackedKeypointSink(name);
  17555. }
  17556. /**
  17557. * Create a sink of matched keypoints
  17558. * @param {string} [name]
  17559. * @returns {SpeedyPipelineNodeMatchedKeypointSink}
  17560. */
  17561. static SinkOfMatchedKeypoints(name = undefined)
  17562. {
  17563. return new SpeedyPipelineNodeMatchedKeypointSink(name);
  17564. }
  17565. /**
  17566. * Keypoint clipper
  17567. * @param {string} [name]
  17568. * @returns {SpeedyPipelineNodeKeypointClipper}
  17569. */
  17570. static Clipper(name = undefined)
  17571. {
  17572. return new SpeedyPipelineNodeKeypointClipper(name);
  17573. }
  17574. /**
  17575. * Border Clipper
  17576. * @param {string} [name]
  17577. * @returns {SpeedyPipelineNodeKeypointBorderClipper}
  17578. */
  17579. static BorderClipper(name = undefined)
  17580. {
  17581. return new SpeedyPipelineNodeKeypointBorderClipper(name);
  17582. }
  17583. /**
  17584. * Create a keypoint buffer
  17585. * @param {string} [name]
  17586. * @returns {SpeedyPipelineNodeKeypointBuffer}
  17587. */
  17588. static Buffer(name = undefined)
  17589. {
  17590. return new SpeedyPipelineNodeKeypointBuffer(name);
  17591. }
  17592. /**
  17593. * Create a keypoint mixer
  17594. * @param {string} [name]
  17595. * @returns {SpeedyPipelineNodeKeypointMixer}
  17596. */
  17597. static Mixer(name = undefined)
  17598. {
  17599. return new SpeedyPipelineNodeKeypointMixer(name);
  17600. }
  17601. /**
  17602. * Create a keypoint shuffler
  17603. * @param {string} [name]
  17604. * @returns {SpeedyPipelineNodeKeypointShuffler}
  17605. */
  17606. static Shuffler(name = undefined)
  17607. {
  17608. return new SpeedyPipelineNodeKeypointShuffler(name);
  17609. }
  17610. /**
  17611. * Create a keypoint multiplexer
  17612. * @param {string} [name]
  17613. * @returns {SpeedyPipelineNodeKeypointMultiplexer}
  17614. */
  17615. static Multiplexer(name = undefined)
  17616. {
  17617. return new SpeedyPipelineNodeKeypointMultiplexer(name);
  17618. }
  17619. /**
  17620. * Create a keypoint transformer
  17621. * @param {string} [name]
  17622. * @returns {SpeedyPipelineNodeKeypointTransformer}
  17623. */
  17624. static Transformer(name = undefined)
  17625. {
  17626. return new SpeedyPipelineNodeKeypointTransformer(name);
  17627. }
  17628. /**
  17629. * Create a subpixel refiner of keypoint locations
  17630. * @param {string} [name]
  17631. * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
  17632. */
  17633. static SubpixelRefiner(name = undefined)
  17634. {
  17635. return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
  17636. }
  17637. /**
  17638. * Distance filter
  17639. * @param {string} [name]
  17640. * @returns {SpeedyPipelineNodeDistanceFilter}
  17641. */
  17642. static DistanceFilter(name = undefined)
  17643. {
  17644. return new SpeedyPipelineNodeKeypointDistanceFilter(name);
  17645. }
  17646. /**
  17647. * Hamming distance filter
  17648. * @param {string} [name]
  17649. * @returns {SpeedyPipelineNodeHammingDistanceFilter}
  17650. */
  17651. static HammingDistanceFilter(name = undefined)
  17652. {
  17653. return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
  17654. }
  17655. }
  17656. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
  17657. /*
  17658. * speedy-vision.js
  17659. * GPU-accelerated Computer Vision for JavaScript
  17660. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17661. *
  17662. * Licensed under the Apache License, Version 2.0 (the "License");
  17663. * you may not use this file except in compliance with the License.
  17664. * You may obtain a copy of the License at
  17665. *
  17666. * http://www.apache.org/licenses/LICENSE-2.0
  17667. *
  17668. * Unless required by applicable law or agreed to in writing, software
  17669. * distributed under the License is distributed on an "AS IS" BASIS,
  17670. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17671. * See the License for the specific language governing permissions and
  17672. * limitations under the License.
  17673. *
  17674. * sink.js
  17675. * Gets keypoints out of the pipeline
  17676. */
  17677. // next power of 2
  17678. const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  17679. /**
  17680. * Gets 2D vectors out of the pipeline
  17681. */
  17682. class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode
  17683. {
  17684. /**
  17685. * Constructor
  17686. * @param {string} [name] name of the node
  17687. */
  17688. constructor(name = 'vec2')
  17689. {
  17690. super(name, 2, [
  17691. InputPort().expects(SpeedyPipelineMessageType.Vector2)
  17692. ]);
  17693. /** @type {SpeedyVector2[]} 2D vectors (output) */
  17694. this._vectors = [];
  17695. /** @type {SpeedyTextureReader} texture reader */
  17696. this._textureReader = new SpeedyTextureReader();
  17697. /** @type {number} page flipping index */
  17698. this._page = 0;
  17699. /** @type {boolean} accelerate GPU-CPU transfers */
  17700. this._turbo = false;
  17701. }
  17702. /**
  17703. * Accelerate GPU-CPU transfers
  17704. * @returns {boolean}
  17705. */
  17706. get turbo()
  17707. {
  17708. return this._turbo;
  17709. }
  17710. /**
  17711. * Accelerate GPU-CPU transfers
  17712. * @param {boolean} value
  17713. */
  17714. set turbo(value)
  17715. {
  17716. this._turbo = Boolean(value);
  17717. }
  17718. /**
  17719. * Initializes this node
  17720. * @param {SpeedyGPU} gpu
  17721. */
  17722. init(gpu)
  17723. {
  17724. super.init(gpu);
  17725. this._textureReader.init(gpu);
  17726. }
  17727. /**
  17728. * Releases this node
  17729. * @param {SpeedyGPU} gpu
  17730. */
  17731. release(gpu)
  17732. {
  17733. this._textureReader.release(gpu);
  17734. super.release(gpu);
  17735. }
  17736. /**
  17737. * Export data from this node to the user
  17738. * @returns {SpeedyPromise<SpeedyVector2[]>}
  17739. */
  17740. export()
  17741. {
  17742. return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
  17743. }
  17744. /**
  17745. * Run the specific task of this node
  17746. * @param {SpeedyGPU} gpu
  17747. * @returns {void|SpeedyPromise<void>}
  17748. */
  17749. _run(gpu)
  17750. {
  17751. const { vectors } = /** @type {SpeedyPipelineMessageWith2DVectors} */ ( this.input().read() );
  17752. const useBufferedDownloads = this._turbo;
  17753. const encoderLength = vectors.width;
  17754. /*
  17755. I have found experimentally that, in Firefox, readPixelsAsync()
  17756. performs MUCH better if the width of the target texture is a power
  17757. of two. I have no idea why this is the case, nor if it's related to
  17758. some interaction with the GL drivers, somehow. This seems to make no
  17759. difference on Chrome, however. In any case, let's convert the input
  17760. texture to POT.
  17761. */
  17762. const encoderWidth = vector2_sink_nextPot(encoderLength);
  17763. const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  17764. //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
  17765. // copy the set of vectors to an internal texture
  17766. const copiedTexture = this._tex[this._page];
  17767. (gpu.programs.utils.copy2DVectors
  17768. .outputs(encoderWidth, encoderHeight, copiedTexture)
  17769. )(vectors);
  17770. // flip page
  17771. this._page = 1 - this._page;
  17772. // download the internal texture
  17773. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  17774. this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
  17775. });
  17776. }
  17777. /**
  17778. * Decode a sequence of vectors, given a flattened image of encoded pixels
  17779. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  17780. * @param {number} encoderWidth
  17781. * @param {number} encoderHeight
  17782. * @returns {SpeedyVector2[]} vectors
  17783. */
  17784. static _decode(pixels, encoderWidth, encoderHeight)
  17785. {
  17786. const bytesPerVector = 4; // 1 pixel per vector
  17787. const vectors = [];
  17788. let hi = 0, lo = 0;
  17789. let x = 0, y = 0;
  17790. // how many bytes should we read?
  17791. const e2 = encoderWidth * encoderHeight * bytesPerVector;
  17792. const size = Math.min(pixels.length, e2);
  17793. // for each encoded vector
  17794. for(let i = 0; i < size; i += bytesPerVector) {
  17795. // extract 16-bit words
  17796. lo = (pixels[i+1] << 8) | pixels[i];
  17797. hi = (pixels[i+3] << 8) | pixels[i+2];
  17798. // the vector is "null": we have reached the end of the list
  17799. if(lo == 0xFFFF && hi == 0xFFFF)
  17800. break;
  17801. // the vector must be discarded
  17802. if(lo == 0xFF00 && hi == 0xFF00)
  17803. continue;
  17804. // decode floats
  17805. x = utils/* Utils */.A.decodeFloat16(lo);
  17806. y = utils/* Utils */.A.decodeFloat16(hi);
  17807. // register vector
  17808. vectors.push(new SpeedyVector2(x, y));
  17809. }
  17810. // done!
  17811. return vectors;
  17812. }
  17813. }
  17814. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
  17815. /*
  17816. * speedy-vision.js
  17817. * GPU-accelerated Computer Vision for JavaScript
  17818. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17819. *
  17820. * Licensed under the Apache License, Version 2.0 (the "License");
  17821. * you may not use this file except in compliance with the License.
  17822. * You may obtain a copy of the License at
  17823. *
  17824. * http://www.apache.org/licenses/LICENSE-2.0
  17825. *
  17826. * Unless required by applicable law or agreed to in writing, software
  17827. * distributed under the License is distributed on an "AS IS" BASIS,
  17828. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17829. * See the License for the specific language governing permissions and
  17830. * limitations under the License.
  17831. *
  17832. * vector2-factory.js
  17833. * 2D vectors
  17834. */
  17835. /**
  17836. * 2D vectors
  17837. */
  17838. class SpeedyPipelineVector2Factory extends Function
  17839. {
  17840. /**
  17841. * Constructor
  17842. */
  17843. constructor()
  17844. {
  17845. // This factory can be invoked as a function
  17846. super('...args', 'return this._create(...args)');
  17847. return this.bind(this);
  17848. }
  17849. /**
  17850. * @private
  17851. *
  17852. * Create a 2D vector
  17853. * @param {number} x x-coordinate
  17854. * @param {number} y y-coordinate
  17855. * @returns {SpeedyVector2}
  17856. */
  17857. _create(x, y)
  17858. {
  17859. return new SpeedyVector2(x, y);
  17860. }
  17861. /**
  17862. * Create a Vector2 sink
  17863. * @param {string} [name]
  17864. * @returns {SpeedyPipelineNodeVector2Sink}
  17865. */
  17866. Sink(name = undefined)
  17867. {
  17868. return new SpeedyPipelineNodeVector2Sink(name);
  17869. }
  17870. }
  17871. ;// CONCATENATED MODULE: ./src/main.js
  17872. /*
  17873. * speedy-vision.js
  17874. * GPU-accelerated Computer Vision for JavaScript
  17875. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  17876. *
  17877. * Licensed under the Apache License, Version 2.0 (the "License");
  17878. * you may not use this file except in compliance with the License.
  17879. * You may obtain a copy of the License at
  17880. *
  17881. * http://www.apache.org/licenses/LICENSE-2.0
  17882. *
  17883. * Unless required by applicable law or agreed to in writing, software
  17884. * distributed under the License is distributed on an "AS IS" BASIS,
  17885. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17886. * See the License for the specific language governing permissions and
  17887. * limitations under the License.
  17888. *
  17889. * main.js
  17890. * The entry point of the library
  17891. */
  17892. /* eslint-disable no-undef */
  17893. /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  17894. /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
  17895. /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
  17896. /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  17897. // Constants
  17898. /** @type {SpeedyMatrixFactory} */
  17899. const matrixFactory = new SpeedyMatrixFactory();
  17900. /** @type {SpeedyPipelineVector2Factory} */
  17901. const vector2Factory = new SpeedyPipelineVector2Factory();
  17902. /**
  17903. * GPU-accelerated Computer Vision for JavaScript
  17904. */
  17905. class Speedy
  17906. {
  17907. /**
  17908. * Loads a SpeedyMedia object based on the provided source element
  17909. * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
  17910. * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
  17911. * @returns {SpeedyPromise<SpeedyMedia>}
  17912. */
  17913. static load(sourceElement, options = {})
  17914. {
  17915. return SpeedyMedia.load(sourceElement, options);
  17916. }
  17917. /**
  17918. * Loads a camera stream
  17919. * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
  17920. * @param {number} [height] height of the stream
  17921. * @returns {SpeedyPromise<SpeedyMedia>}
  17922. */
  17923. static camera(widthOrConstraints = 640, height = 360)
  17924. {
  17925. const constraints = (typeof(widthOrConstraints) === 'object') ? widthOrConstraints : ({
  17926. audio: false,
  17927. video: {
  17928. width: widthOrConstraints | 0,
  17929. height: height | 0,
  17930. },
  17931. });
  17932. return utils/* Utils */.A.requestCameraStream(constraints).then(
  17933. video => SpeedyMedia.load(video)
  17934. );
  17935. }
  17936. /**
  17937. * Checks if Speedy can be executed in this machine & browser
  17938. * @returns {boolean} true if Speedy can be executed in this machine & browser
  17939. */
  17940. static isSupported()
  17941. {
  17942. return (
  17943. (typeof WebAssembly !== 'undefined') &&
  17944. (typeof WebGL2RenderingContext !== 'undefined') &&
  17945. (speedy_gl/* SpeedyGL */.c.instance.gl != null)
  17946. );
  17947. }
  17948. /**
  17949. * Create a 2D vector
  17950. * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
  17951. */
  17952. static get Vector2()
  17953. {
  17954. return vector2Factory;
  17955. }
  17956. /**
  17957. * Create a 2D point
  17958. * @param {number} x
  17959. * @param {number} y
  17960. * @returns {SpeedyPoint2}
  17961. */
  17962. static Point2(x, y)
  17963. {
  17964. return new SpeedyPoint2(x, y);
  17965. }
  17966. /**
  17967. * Create a new size object
  17968. * @param {number} width
  17969. * @param {number} height
  17970. * @returns {SpeedySize}
  17971. */
  17972. static Size(width, height)
  17973. {
  17974. return new SpeedySize(width, height);
  17975. }
  17976. /**
  17977. * Create a Matrix (entries are given in column-major format)
  17978. * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
  17979. */
  17980. static get Matrix()
  17981. {
  17982. return matrixFactory;
  17983. }
  17984. /**
  17985. * Speedy Promises
  17986. * @returns {typeof SpeedyPromise}
  17987. */
  17988. static get Promise()
  17989. {
  17990. return speedy_promise/* SpeedyPromise */.i;
  17991. }
  17992. /**
  17993. * Create a new Pipeline
  17994. * @returns {SpeedyPipeline}
  17995. */
  17996. static Pipeline()
  17997. {
  17998. return new SpeedyPipeline();
  17999. }
  18000. /**
  18001. * Image-related nodes
  18002. * @returns {typeof SpeedyPipelineImageFactory}
  18003. */
  18004. static get Image()
  18005. {
  18006. return SpeedyPipelineImageFactory;
  18007. }
  18008. /**
  18009. * Image filters
  18010. * @returns {typeof SpeedyPipelineFilterFactory}
  18011. */
  18012. static get Filter()
  18013. {
  18014. return SpeedyPipelineFilterFactory;
  18015. }
  18016. /**
  18017. * Image transforms
  18018. * @returns {typeof SpeedyPipelineTransformFactory}
  18019. */
  18020. static get Transform()
  18021. {
  18022. return SpeedyPipelineTransformFactory;
  18023. }
  18024. /**
  18025. * Keypoint-related nodes
  18026. * @returns {typeof SpeedyPipelineKeypointFactory}
  18027. */
  18028. static get Keypoint()
  18029. {
  18030. return SpeedyPipelineKeypointFactory;
  18031. }
  18032. /**
  18033. * The version of the library
  18034. * @returns {string} The version of the library
  18035. */
  18036. static get version()
  18037. {
  18038. if(false)
  18039. {}
  18040. else
  18041. return "0.9.1";
  18042. }
  18043. /**
  18044. * The FPS rate
  18045. * @returns {number} Frames per second (FPS)
  18046. */
  18047. static get fps()
  18048. {
  18049. return FPSCounter.instance.fps;
  18050. }
  18051. /**
  18052. * Global settings
  18053. * @returns {typeof Settings}
  18054. */
  18055. static get Settings()
  18056. {
  18057. return settings/* Settings */.w;
  18058. }
  18059. }
  18060. // Freeze the namespace
  18061. Object.freeze(Speedy);
  18062. // Display a notice
  18063. utils/* Utils */.A.log(
  18064. `Speedy Vision version ${Speedy.version}. ` +
  18065. `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` +
  18066. "https://github.com/alemart/speedy-vision"
  18067. );
  18068. // Big-endian machine? Currently untested.
  18069. if(!globals.LITTLE_ENDIAN)
  18070. utils/* Utils */.A.warning('Running on a big-endian machine');
  18071. })();
  18072. __nested_webpack_exports__ = __nested_webpack_exports__["default"];
  18073. /******/ return __nested_webpack_exports__;
  18074. /******/ })()
  18075. ;
  18076. });
  18077. /***/ })
  18078. /******/ });
  18079. /************************************************************************/
  18080. /******/ // The module cache
  18081. /******/ var __webpack_module_cache__ = {};
  18082. /******/
  18083. /******/ // The require function
  18084. /******/ function __webpack_require__(moduleId) {
  18085. /******/ // Check if module is in cache
  18086. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  18087. /******/ if (cachedModule !== undefined) {
  18088. /******/ return cachedModule.exports;
  18089. /******/ }
  18090. /******/ // Create a new module (and put it into the cache)
  18091. /******/ var module = __webpack_module_cache__[moduleId] = {
  18092. /******/ // no module.id needed
  18093. /******/ // no module.loaded needed
  18094. /******/ exports: {}
  18095. /******/ };
  18096. /******/
  18097. /******/ // Execute the module function
  18098. /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
  18099. /******/
  18100. /******/ // Return the exports of the module
  18101. /******/ return module.exports;
  18102. /******/ }
  18103. /******/
  18104. /************************************************************************/
  18105. /******/ /* webpack/runtime/compat get default export */
  18106. /******/ (() => {
  18107. /******/ // getDefaultExport function for compatibility with non-harmony modules
  18108. /******/ __webpack_require__.n = (module) => {
  18109. /******/ var getter = module && module.__esModule ?
  18110. /******/ () => (module['default']) :
  18111. /******/ () => (module);
  18112. /******/ __webpack_require__.d(getter, { a: getter });
  18113. /******/ return getter;
  18114. /******/ };
  18115. /******/ })();
  18116. /******/
  18117. /******/ /* webpack/runtime/define property getters */
  18118. /******/ (() => {
  18119. /******/ // define getter functions for harmony exports
  18120. /******/ __webpack_require__.d = (exports, definition) => {
  18121. /******/ for(var key in definition) {
  18122. /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
  18123. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  18124. /******/ }
  18125. /******/ }
  18126. /******/ };
  18127. /******/ })();
  18128. /******/
  18129. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  18130. /******/ (() => {
  18131. /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  18132. /******/ })();
  18133. /******/
  18134. /************************************************************************/
  18135. var __webpack_exports__ = {};
  18136. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  18137. (() => {
  18138. "use strict";
  18139. // EXPORTS
  18140. __webpack_require__.d(__webpack_exports__, {
  18141. "default": () => (/* binding */ Martins)
  18142. });
  18143. // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
  18144. var speedy_vision = __webpack_require__(774);
  18145. var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
  18146. ;// CONCATENATED MODULE: ./src/utils/errors.ts
  18147. /*
  18148. * MARTINS.js
  18149. * GPU-accelerated Augmented Reality for the web
  18150. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18151. *
  18152. * This program is free software: you can redistribute it and/or modify
  18153. * it under the terms of the GNU Lesser General Public License as published
  18154. * by the Free Software Foundation, either version 3 of the License, or
  18155. * (at your option) any later version.
  18156. *
  18157. * This program is distributed in the hope that it will be useful,
  18158. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18159. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18160. * GNU Lesser General Public License for more details.
  18161. *
  18162. * You should have received a copy of the GNU Lesser General Public License
  18163. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18164. *
  18165. * errors.ts
  18166. * Error classes
  18167. */
  18168. /**
  18169. * Generic error class
  18170. */
  18171. class MartinsError extends Error {
  18172. /**
  18173. * Constructor
  18174. * @param message error message
  18175. * @param cause optional error cause
  18176. */
  18177. constructor(message = '', cause = null) {
  18178. super(`${message}\n${cause ? cause.toString() : ''}`);
  18179. this.cause = cause;
  18180. }
  18181. /**
  18182. * Error name
  18183. */
  18184. get name() {
  18185. return this.constructor.name;
  18186. }
  18187. }
  18188. /**
  18189. * A method has received one or more illegal arguments
  18190. */
  18191. class IllegalArgumentError extends MartinsError {
  18192. }
  18193. /**
  18194. * The method arguments are valid, but the method can't be called due to the
  18195. * current state of the object
  18196. */
  18197. class IllegalOperationError extends MartinsError {
  18198. }
  18199. /**
  18200. * The requested operation is not supported
  18201. */
  18202. class NotSupportedError extends MartinsError {
  18203. }
  18204. /**
  18205. * Access denied
  18206. */
  18207. class AccessDeniedError extends MartinsError {
  18208. }
  18209. /**
  18210. * Timeout
  18211. */
  18212. class TimeoutError extends MartinsError {
  18213. }
  18214. /**
  18215. * Assertion error
  18216. */
  18217. class AssertionError extends MartinsError {
  18218. }
  18219. /**
  18220. * Tracking error
  18221. */
  18222. class TrackingError extends MartinsError {
  18223. }
  18224. /**
  18225. * Detection error
  18226. */
  18227. class DetectionError extends MartinsError {
  18228. }
  18229. /**
  18230. * Training error
  18231. */
  18232. class TrainingError extends MartinsError {
  18233. }
  18234. ;// CONCATENATED MODULE: ./src/core/resolution.ts
  18235. /*
  18236. * MARTINS.js
  18237. * GPU-accelerated Augmented Reality for the web
  18238. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18239. *
  18240. * This program is free software: you can redistribute it and/or modify
  18241. * it under the terms of the GNU Lesser General Public License as published
  18242. * by the Free Software Foundation, either version 3 of the License, or
  18243. * (at your option) any later version.
  18244. *
  18245. * This program is distributed in the hope that it will be useful,
  18246. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18247. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18248. * GNU Lesser General Public License for more details.
  18249. *
  18250. * You should have received a copy of the GNU Lesser General Public License
  18251. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18252. *
  18253. * resolution.ts
  18254. * Resolution utilities
  18255. */
  18256. /** Reference heights when in landscape mode, measured in pixels */
  18257. const REFERENCE_HEIGHT = {
  18258. 'xs': 120,
  18259. 'xs+': 160,
  18260. 'sm': 200,
  18261. 'sm+': 240,
  18262. 'md': 320,
  18263. 'md+': 360,
  18264. 'lg': 480,
  18265. 'lg+': 600,
  18266. };
  18267. /**
  18268. * Convert a resolution type to a (width, height) pair
  18269. * @param resolution resolution type
  18270. * @param aspectRatio desired width / height ratio
  18271. * @returns size in pixels
  18272. */
  18273. function computeResolution(resolution, aspectRatio) {
  18274. const referenceHeight = REFERENCE_HEIGHT[resolution];
  18275. let width = 0, height = 0;
  18276. if (aspectRatio >= 1) {
  18277. // landscape
  18278. height = referenceHeight;
  18279. width = Math.round(height * aspectRatio);
  18280. width -= width % 2;
  18281. }
  18282. else {
  18283. // portrait
  18284. width = referenceHeight;
  18285. height = Math.round(width / aspectRatio);
  18286. height -= height % 2;
  18287. }
  18288. return speedy_vision_default().Size(width, height);
  18289. }
  18290. ;// CONCATENATED MODULE: ./src/utils/utils.ts
  18291. /*
  18292. * MARTINS.js
  18293. * GPU-accelerated Augmented Reality for the web
  18294. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18295. *
  18296. * This program is free software: you can redistribute it and/or modify
  18297. * it under the terms of the GNU Lesser General Public License as published
  18298. * by the Free Software Foundation, either version 3 of the License, or
  18299. * (at your option) any later version.
  18300. *
  18301. * This program is distributed in the hope that it will be useful,
  18302. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18303. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18304. * GNU Lesser General Public License for more details.
  18305. *
  18306. * You should have received a copy of the GNU Lesser General Public License
  18307. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18308. *
  18309. * utils.ts
  18310. * Generic utilities
  18311. */
  18312. /**
  18313. * Generic utilities
  18314. */
  18315. class Utils {
  18316. /**
  18317. * Log a message
  18318. * @param message
  18319. * @param args optional additional messages
  18320. */
  18321. static log(message, ...args) {
  18322. console.log('[martins-js]', message, ...args);
  18323. }
  18324. /**
  18325. * Display a warning
  18326. * @param message
  18327. * @param args optional additional messages
  18328. */
  18329. static warning(message, ...args) {
  18330. console.warn('[martins-js]', message, ...args);
  18331. }
  18332. /**
  18333. * Display an error message
  18334. * @param message
  18335. * @param args optional additional messages
  18336. */
  18337. static error(message, ...args) {
  18338. console.error('[martins-js]', message, ...args);
  18339. }
  18340. /**
  18341. * Assertion
  18342. * @param expr expression
  18343. * @param errorMessage optional error message
  18344. * @throws {AssertionError}
  18345. */
  18346. static assert(expr, errorMessage = '') {
  18347. if (!expr)
  18348. throw new AssertionError(errorMessage);
  18349. }
  18350. /**
  18351. * Returns a range [0, 1, ..., n-1]
  18352. * @param n non-negative integer
  18353. * @returns range from 0 to n-1, inclusive
  18354. */
  18355. static range(n) {
  18356. if ((n |= 0) < 0)
  18357. throw new IllegalArgumentError();
  18358. return Array.from({ length: n }, (_, i) => i);
  18359. }
  18360. /**
  18361. * Convert a resolution type to a resolution measured in pixels
  18362. * @param resolution resolution type
  18363. * @param aspectRatio width / height ratio
  18364. * @returns resolution measured in pixels
  18365. */
  18366. static resolution(resolution, aspectRatio) {
  18367. return computeResolution(resolution, aspectRatio);
  18368. }
  18369. /**
  18370. * Returns a string containing platform brand information
  18371. * @returns platform brand information
  18372. */
  18373. static platformString() {
  18374. return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
  18375. navigator.userAgentData.platform : // use only low entropy data
  18376. navigator.platform // navigator.platform is deprecated
  18377. )(navigator);
  18378. }
  18379. /**
  18380. * Checks if we're on iOS
  18381. * @returns true if we're on iOS
  18382. */
  18383. static isIOS() {
  18384. const platform = Utils.platformString();
  18385. if (/(iOS|iPhone|iPad|iPod)/i.test(platform))
  18386. return true;
  18387. if (/Mac/i.test(platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
  18388. return navigator.maxTouchPoints > 2;
  18389. return false;
  18390. }
  18391. /**
  18392. * Checks if we're on a WebKit-based browser
  18393. * @returns true if we're on a WebKit-based browser
  18394. */
  18395. static isWebKit() {
  18396. // note: navigator.vendor is deprecated.
  18397. // Alternatively, test GL_RENDERER == "Apple GPU"
  18398. if (/Apple/.test(navigator.vendor))
  18399. return true;
  18400. // Desktop and Mobile Safari, Epiphany on Linux
  18401. if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
  18402. return true;
  18403. // Chrome, Firefox, Edge on iOS
  18404. if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
  18405. return true;
  18406. // not WebKit
  18407. return false;
  18408. }
  18409. }
  18410. ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
  18411. /*
  18412. * MARTINS.js
  18413. * GPU-accelerated Augmented Reality for the web
  18414. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18415. *
  18416. * This program is free software: you can redistribute it and/or modify
  18417. * it under the terms of the GNU Lesser General Public License as published
  18418. * by the Free Software Foundation, either version 3 of the License, or
  18419. * (at your option) any later version.
  18420. *
  18421. * This program is distributed in the hope that it will be useful,
  18422. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18423. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18424. * GNU Lesser General Public License for more details.
  18425. *
  18426. * You should have received a copy of the GNU Lesser General Public License
  18427. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18428. *
  18429. * ar-events.ts
  18430. * AR-related Events
  18431. */
  18432. /**
  18433. * AR Event
  18434. */
  18435. class AREvent extends Event {
  18436. /**
  18437. * Constructor
  18438. * @param type event type
  18439. */
  18440. constructor(type) {
  18441. super(type);
  18442. }
  18443. /**
  18444. * Event type
  18445. */
  18446. get type() {
  18447. return super.type;
  18448. }
  18449. }
  18450. /**
  18451. * AR Event Target
  18452. */
  18453. class AREventTarget {
  18454. /**
  18455. * Constructor
  18456. */
  18457. constructor() {
  18458. this._delegate = new EventTarget();
  18459. }
  18460. /**
  18461. * Add event listener
  18462. * @param type event type
  18463. * @param callback
  18464. */
  18465. addEventListener(type, callback) {
  18466. this._delegate.addEventListener(type, callback);
  18467. }
  18468. /**
  18469. * Remove event listener
  18470. * @param type event type
  18471. * @param callback
  18472. */
  18473. removeEventListener(type, callback) {
  18474. this._delegate.removeEventListener(type, callback);
  18475. }
  18476. /**
  18477. * Synchronously trigger an event
  18478. * @param event
  18479. * @returns same value as a standard event target
  18480. * @internal
  18481. */
  18482. dispatchEvent(event) {
  18483. return this._delegate.dispatchEvent(event);
  18484. }
  18485. }
  18486. ;// CONCATENATED MODULE: ./src/core/hud.ts
  18487. /*
  18488. * MARTINS.js
  18489. * GPU-accelerated Augmented Reality for the web
  18490. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18491. *
  18492. * This program is free software: you can redistribute it and/or modify
  18493. * it under the terms of the GNU Lesser General Public License as published
  18494. * by the Free Software Foundation, either version 3 of the License, or
  18495. * (at your option) any later version.
  18496. *
  18497. * This program is distributed in the hope that it will be useful,
  18498. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18499. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18500. * GNU Lesser General Public License for more details.
  18501. *
  18502. * You should have received a copy of the GNU Lesser General Public License
  18503. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18504. *
  18505. * hud.ts
  18506. * Heads Up Display
  18507. */
  18508. /**
  18509. * Heads Up Display: an overlay displayed in front of the augmented scene
  18510. */
  18511. class HUD {
  18512. /**
  18513. * Constructor
  18514. * @param parent parent of the hud container
  18515. * @param hudContainer an existing hud container (optional)
  18516. */
  18517. constructor(parent, hudContainer) {
  18518. this._container = hudContainer || this._createContainer(parent);
  18519. this._ownContainer = (hudContainer == null);
  18520. // validate
  18521. if (this._container.parentElement !== parent)
  18522. throw new IllegalArgumentError('The container of the HUD must be a direct child of the container of the viewport');
  18523. // the HUD should be hidden initially
  18524. if (!this._container.hidden)
  18525. Utils.warning(`The container of the HUD should have the hidden attribute`);
  18526. }
  18527. /**
  18528. * The container of the HUD
  18529. */
  18530. get container() {
  18531. return this._container;
  18532. }
  18533. /**
  18534. * Whether or not the HUD is visible
  18535. */
  18536. get visible() {
  18537. return !this._container.hidden;
  18538. }
  18539. /**
  18540. * Whether or not the HUD is visible
  18541. */
  18542. set visible(visible) {
  18543. this._container.hidden = !visible;
  18544. }
  18545. /**
  18546. * Initialize the HUD
  18547. * @param zIndex the z-index of the container
  18548. * @internal
  18549. */
  18550. _init(zIndex) {
  18551. const container = this._container;
  18552. container.style.position = 'absolute';
  18553. container.style.left = container.style.top = '0px';
  18554. container.style.right = container.style.bottom = '0px';
  18555. container.style.padding = container.style.margin = '0px';
  18556. container.style.zIndex = String(zIndex);
  18557. container.style.userSelect = 'none';
  18558. }
  18559. /**
  18560. * Release the HUD
  18561. * @internal
  18562. */
  18563. _release() {
  18564. if (this._ownContainer) {
  18565. this._ownContainer = false;
  18566. this._container.remove();
  18567. }
  18568. }
  18569. /**
  18570. * Create a HUD container as an immediate child of the input node
  18571. * @param parent parent container
  18572. * @returns HUD container
  18573. */
  18574. _createContainer(parent) {
  18575. const node = document.createElement('div');
  18576. node.hidden = true;
  18577. parent.appendChild(node);
  18578. return node;
  18579. }
  18580. }
  18581. ;// CONCATENATED MODULE: ./src/core/viewport.ts
  18582. /*
  18583. * MARTINS.js
  18584. * GPU-accelerated Augmented Reality for the web
  18585. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18586. *
  18587. * This program is free software: you can redistribute it and/or modify
  18588. * it under the terms of the GNU Lesser General Public License as published
  18589. * by the Free Software Foundation, either version 3 of the License, or
  18590. * (at your option) any later version.
  18591. *
  18592. * This program is distributed in the hope that it will be useful,
  18593. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18594. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18595. * GNU Lesser General Public License for more details.
  18596. *
  18597. * You should have received a copy of the GNU Lesser General Public License
  18598. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18599. *
  18600. * viewport.ts
  18601. * Viewport
  18602. */
  18603. /** An event emitted by a Viewport */
  18604. class ViewportEvent extends AREvent {
  18605. }
  18606. /** Default viewport constructor settings */
  18607. const DEFAULT_VIEWPORT_SETTINGS = {
  18608. container: null,
  18609. hudContainer: null,
  18610. resolution: 'lg',
  18611. canvas: null,
  18612. };
  18613. /** Base z-index of the children of the viewport container */
  18614. const BASE_ZINDEX = 0;
  18615. /** Default viewport width, in pixels */
  18616. const DEFAULT_VIEWPORT_WIDTH = 300;
  18617. /** Default viewport height, in pixels */
  18618. const DEFAULT_VIEWPORT_HEIGHT = 150;
  18619. /**
  18620. * Viewport
  18621. */
  18622. class BaseViewport extends AREventTarget {
  18623. /**
  18624. * Constructor
  18625. * @param viewportSettings
  18626. */
  18627. constructor(viewportSettings) {
  18628. super();
  18629. // validate settings
  18630. const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
  18631. if (settings.container == null)
  18632. throw new IllegalArgumentError('Unspecified viewport container');
  18633. // initialize attributes
  18634. this._resolution = settings.resolution;
  18635. this._container = settings.container;
  18636. this._hud = new HUD(settings.container, settings.hudContainer);
  18637. this._parentOfImportedForegroundCanvas = settings.canvas ? settings.canvas.parentNode : null;
  18638. // create canvas elements
  18639. const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
  18640. this._backgroundCanvas = this._createBackgroundCanvas(this._container, size);
  18641. this._foregroundCanvas = settings.canvas == null ?
  18642. this._createForegroundCanvas(this._container, size) :
  18643. this._foregroundCanvas = this._importForegroundCanvas(settings.canvas, this._container, size);
  18644. }
  18645. /**
  18646. * Viewport container
  18647. */
  18648. get container() {
  18649. return this._container;
  18650. }
  18651. /**
  18652. * HUD
  18653. */
  18654. get hud() {
  18655. return this._hud;
  18656. }
  18657. /**
  18658. * Resolution of the virtual scene
  18659. */
  18660. get resolution() {
  18661. return this._resolution;
  18662. }
  18663. /**
  18664. * Size in pixels of the drawing buffer of the canvas
  18665. * on which the virtual scene will be drawn
  18666. */
  18667. get virtualSize() {
  18668. const aspectRatio = this._backgroundCanvas.width / this._backgroundCanvas.height;
  18669. return Utils.resolution(this._resolution, aspectRatio);
  18670. }
  18671. /**
  18672. * The canvas on which the virtual scene will be drawn
  18673. */
  18674. get canvas() {
  18675. return this._foregroundCanvas;
  18676. }
  18677. /**
  18678. * Background canvas
  18679. * @internal
  18680. */
  18681. get _background() {
  18682. return this._backgroundCanvas;
  18683. }
  18684. /**
  18685. * Size of the drawing buffer of the background canvas, in pixels
  18686. * @internal
  18687. */
  18688. get _size() {
  18689. throw new IllegalOperationError();
  18690. }
  18691. /**
  18692. * Initialize the viewport (when the session starts)
  18693. * @internal
  18694. */
  18695. _init() {
  18696. this._container.style.touchAction = 'none';
  18697. this._hud._init(BASE_ZINDEX + 2);
  18698. this._hud.visible = true;
  18699. }
  18700. /**
  18701. * Release the viewport (when the session starts)
  18702. * @internal
  18703. */
  18704. _release() {
  18705. //this._hud.visible = false; // depends on the type of the viewport
  18706. this._hud._release();
  18707. this._restoreImportedForegroundCanvas();
  18708. this._container.style.touchAction = 'auto';
  18709. }
  18710. /**
  18711. * Function to be called when the viewport is resized
  18712. * @internal
  18713. */
  18714. _onResize() {
  18715. // Resize the drawing buffer of the foreground canvas, so that it
  18716. // matches the desired resolution and the aspect ratio of the
  18717. // background canvas
  18718. const virtualSize = this.virtualSize;
  18719. this._foregroundCanvas.width = virtualSize.width;
  18720. this._foregroundCanvas.height = virtualSize.height;
  18721. this._styleCanvas(this._foregroundCanvas, 'foreground');
  18722. // dispatch event
  18723. const event = new ViewportEvent('resize');
  18724. this.dispatchEvent(event);
  18725. }
  18726. /**
  18727. * Create the background canvas
  18728. * @param parent parent container
  18729. * @param size size of the drawing buffer
  18730. * @returns a new canvas as a child of parent
  18731. */
  18732. _createBackgroundCanvas(parent, size) {
  18733. const canvas = this._createCanvas(parent, size);
  18734. return this._styleCanvas(canvas, 'background');
  18735. }
  18736. /**
  18737. * Create the foreground canvas
  18738. * @param parent parent container
  18739. * @param size size of the drawing buffer
  18740. * @returns a new canvas as a child of parent
  18741. */
  18742. _createForegroundCanvas(parent, size) {
  18743. const canvas = this._createCanvas(parent, size);
  18744. return this._styleCanvas(canvas, 'foreground');
  18745. }
  18746. /**
  18747. * Create a canvas and attach it to another HTML element
  18748. * @param parent parent container
  18749. * @param size size of the drawing buffer
  18750. * @returns a new canvas as a child of parent
  18751. */
  18752. _createCanvas(parent, size) {
  18753. const canvas = document.createElement('canvas');
  18754. canvas.width = size.width;
  18755. canvas.height = size.height;
  18756. parent.appendChild(canvas);
  18757. return canvas;
  18758. }
  18759. /**
  18760. * Add suitable CSS rules to a canvas
  18761. * @param canvas
  18762. * @param canvasType
  18763. * @returns canvas
  18764. */
  18765. _styleCanvas(canvas, canvasType) {
  18766. const offset = (canvasType == 'foreground') ? 1 : 0;
  18767. const zIndex = BASE_ZINDEX + offset;
  18768. canvas.setAttribute('style', [
  18769. 'position: absolute',
  18770. 'left: 0px',
  18771. 'top: 0px',
  18772. 'z-index: ' + String(zIndex),
  18773. 'width: 100% !important',
  18774. 'height: 100% !important',
  18775. ].join('; '));
  18776. return canvas;
  18777. }
  18778. /**
  18779. * Import an existing foreground canvas to the viewport
  18780. * @param canvas existing canvas
  18781. * @param parent parent container
  18782. * @param size size of the drawing buffer
  18783. * @returns the input canvas
  18784. */
  18785. _importForegroundCanvas(canvas, parent, size) {
  18786. if (!(canvas instanceof HTMLCanvasElement))
  18787. throw new IllegalArgumentError(`Not a <canvas>: ${canvas}`);
  18788. // borrow the canvas; add it as a child of the viewport container
  18789. canvas.remove();
  18790. parent.appendChild(canvas);
  18791. canvas.width = size.width;
  18792. canvas.height = size.height;
  18793. canvas.dataset.cssText = canvas.style.cssText; // save CSS
  18794. canvas.style.cssText = ''; // clear CSS
  18795. this._styleCanvas(canvas, 'foreground');
  18796. return canvas;
  18797. }
  18798. /**
  18799. * Restore a previously imported foreground canvas to its original parent
  18800. */
  18801. _restoreImportedForegroundCanvas() {
  18802. // not an imported canvas; nothing to do
  18803. if (this._parentOfImportedForegroundCanvas == null)
  18804. return;
  18805. const canvas = this._foregroundCanvas;
  18806. canvas.style.cssText = canvas.dataset.cssText || ''; // restore CSS
  18807. canvas.remove();
  18808. this._parentOfImportedForegroundCanvas.appendChild(canvas);
  18809. }
  18810. }
  18811. /**
  18812. * Viewport decorator
  18813. */
  18814. class ViewportDecorator extends AREventTarget {
  18815. /**
  18816. * Constructor
  18817. * @param base to be decorated
  18818. * @param getSize size getter
  18819. */
  18820. constructor(base, getSize) {
  18821. super();
  18822. this._base = base;
  18823. this._getSize = getSize;
  18824. }
  18825. /**
  18826. * Viewport container
  18827. */
  18828. get container() {
  18829. return this._base.container;
  18830. }
  18831. /**
  18832. * HUD
  18833. */
  18834. get hud() {
  18835. return this._base.hud;
  18836. }
  18837. /**
  18838. * Resolution of the virtual scene
  18839. */
  18840. get resolution() {
  18841. return this._base.resolution;
  18842. }
  18843. /**
  18844. * Size in pixels of the drawing buffer of the canvas
  18845. * on which the virtual scene will be drawn
  18846. */
  18847. get virtualSize() {
  18848. return this._base.virtualSize;
  18849. }
  18850. /**
  18851. * The canvas on which the virtual scene will be drawn
  18852. */
  18853. get canvas() {
  18854. return this._base.canvas;
  18855. }
  18856. /**
  18857. * Background canvas
  18858. * @internal
  18859. */
  18860. get _background() {
  18861. return this._base._background;
  18862. }
  18863. /**
  18864. * Size of the drawing buffer of the background canvas, in pixels
  18865. * @internal
  18866. */
  18867. get _size() {
  18868. return this._getSize();
  18869. }
  18870. /**
  18871. * Initialize the viewport
  18872. * @internal
  18873. */
  18874. _init() {
  18875. this._base._init();
  18876. }
  18877. /**
  18878. * Release the viewport
  18879. * @internal
  18880. */
  18881. _release() {
  18882. this._base._release();
  18883. }
  18884. /**
  18885. * Function to be called when the viewport is resized
  18886. * @internal
  18887. */
  18888. _onResize() {
  18889. this._base._onResize();
  18890. }
  18891. /**
  18892. * Add event listener
  18893. * @param type event type
  18894. * @param callback
  18895. */
  18896. addEventListener(type, callback) {
  18897. this._base.addEventListener(type, callback);
  18898. }
  18899. /**
  18900. * Remove event listener
  18901. * @param type event type
  18902. * @param callback
  18903. */
  18904. removeEventListener(type, callback) {
  18905. this._base.removeEventListener(type, callback);
  18906. }
  18907. /**
  18908. * Synchronously trigger an event
  18909. * @param event
  18910. * @returns same value as a standard event target
  18911. * @internal
  18912. */
  18913. dispatchEvent(event) {
  18914. return this._base.dispatchEvent(event);
  18915. }
  18916. }
  18917. /**
  18918. * A viewport that watches for page resizes
  18919. */
  18920. class ResizableViewport extends ViewportDecorator {
  18921. /**
  18922. * Constructor
  18923. * @param base to be decorated
  18924. * @param getSize size getter
  18925. */
  18926. constructor(base, getSize) {
  18927. super(base, getSize);
  18928. this._active = false;
  18929. }
  18930. /**
  18931. * Initialize the viewport
  18932. * @internal
  18933. */
  18934. _init() {
  18935. super._init();
  18936. this._active = true;
  18937. // Configure the resize listener. We want the viewport
  18938. // to adjust itself if the phone/screen is resized or
  18939. // changes orientation
  18940. let timeout = null;
  18941. const onresize = () => {
  18942. if (!this._active) {
  18943. window.removeEventListener('resize', onresize);
  18944. return;
  18945. }
  18946. if (timeout !== null)
  18947. clearTimeout(timeout);
  18948. timeout = setTimeout(() => {
  18949. timeout = null;
  18950. this._resize.call(this);
  18951. this._onResize.call(this);
  18952. }, 100);
  18953. };
  18954. window.addEventListener('resize', onresize);
  18955. this._resize();
  18956. this._onResize();
  18957. }
  18958. /**
  18959. * Release the viewport
  18960. * @internal
  18961. */
  18962. _release() {
  18963. this._active = false;
  18964. super._release();
  18965. }
  18966. }
  18967. /**
  18968. * Immersive viewport: it occupies the entire page
  18969. */
  18970. class ImmersiveViewport extends ResizableViewport {
  18971. /**
  18972. * Release the viewport
  18973. * @internal
  18974. */
  18975. _release() {
  18976. this.canvas.remove();
  18977. this._background.remove();
  18978. this.hud.visible = false;
  18979. this.container.style.cssText = ''; // reset CSS
  18980. super._release();
  18981. }
  18982. /**
  18983. * Resize the immersive viewport, so that it occupies the entire page.
  18984. * We respect the aspect ratio of the source media
  18985. */
  18986. _resize() {
  18987. const { width, height } = this._size;
  18988. const viewportSize = speedy_vision_default().Size(0, 0);
  18989. const viewportAspectRatio = width / height;
  18990. const windowSize = speedy_vision_default().Size(window.innerWidth, window.innerHeight);
  18991. const windowAspectRatio = windowSize.width / windowSize.height;
  18992. // figure out the viewport size
  18993. if (viewportAspectRatio <= windowAspectRatio) {
  18994. viewportSize.height = windowSize.height;
  18995. viewportSize.width = (viewportSize.height * viewportAspectRatio) | 0;
  18996. }
  18997. else {
  18998. viewportSize.width = windowSize.width;
  18999. viewportSize.height = (viewportSize.width / viewportAspectRatio) | 0;
  19000. }
  19001. // position the viewport and set its size
  19002. const container = this.container;
  19003. container.style.position = 'fixed';
  19004. container.style.left = `calc(50% - ${viewportSize.width >>> 1}px)`;
  19005. container.style.top = `calc(50% - ${viewportSize.height >>> 1}px)`;
  19006. container.style.zIndex = '1000000000'; // 1B //String(2147483647);
  19007. container.style.width = viewportSize.width + 'px';
  19008. container.style.height = viewportSize.height + 'px';
  19009. container.style.backgroundColor = '#000';
  19010. // set the size of the drawing buffer of the background canvas
  19011. const backgroundCanvas = this._background;
  19012. const backgroundCanvasAspectRatio = viewportAspectRatio;
  19013. const referenceHeight = height;
  19014. backgroundCanvas.height = referenceHeight;
  19015. backgroundCanvas.width = (backgroundCanvas.height * backgroundCanvasAspectRatio) | 0;
  19016. }
  19017. }
  19018. /**
  19019. * Inline viewport: it follows the typical flow of a web page
  19020. */
  19021. class InlineViewport extends ResizableViewport {
  19022. /**
  19023. * Resize the inline viewport
  19024. */
  19025. _resize() {
  19026. const { width, height } = this._size;
  19027. this.container.style.position = 'relative';
  19028. this.container.style.width = width + 'px';
  19029. this.container.style.height = height + 'px';
  19030. //this.container.style.display = 'inline-block';
  19031. this._background.width = width;
  19032. this._background.height = height;
  19033. }
  19034. }
  19035. ;// CONCATENATED MODULE: ./src/core/stats.ts
  19036. /*
  19037. * MARTINS.js
  19038. * GPU-accelerated Augmented Reality for the web
  19039. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19040. *
  19041. * This program is free software: you can redistribute it and/or modify
  19042. * it under the terms of the GNU Lesser General Public License as published
  19043. * by the Free Software Foundation, either version 3 of the License, or
  19044. * (at your option) any later version.
  19045. *
  19046. * This program is distributed in the hope that it will be useful,
  19047. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19048. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19049. * GNU Lesser General Public License for more details.
  19050. *
  19051. * You should have received a copy of the GNU Lesser General Public License
  19052. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19053. *
  19054. * stats.ts
  19055. * Stats for performance measurements
  19056. */
  19057. /** update interval, given in seconds */
  19058. const UPDATE_INTERVAL = 0.5;
  19059. /**
  19060. * Stats for performance measurements
  19061. */
  19062. class Stats {
  19063. /**
  19064. * Constructor
  19065. */
  19066. constructor() {
  19067. this._timeOfLastUpdate = this._now();
  19068. this._partialCycleCount = 0;
  19069. this._cyclesPerSecond = 0;
  19070. }
  19071. /**
  19072. * Update stats - call every frame
  19073. */
  19074. update() {
  19075. const now = this._now();
  19076. ++this._partialCycleCount;
  19077. if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
  19078. this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
  19079. this._partialCycleCount = 0;
  19080. this._timeOfLastUpdate = now;
  19081. }
  19082. }
  19083. /**
  19084. * Reset stats
  19085. */
  19086. reset() {
  19087. this._timeOfLastUpdate = this._now();
  19088. this._partialCycleCount = 0;
  19089. this._cyclesPerSecond = 0;
  19090. }
  19091. /**
  19092. * Number of cycles per second
  19093. */
  19094. get cyclesPerSecond() {
  19095. return this._cyclesPerSecond;
  19096. }
  19097. /**
  19098. * A measurement of time, in milliseconds
  19099. * @returns time in ms
  19100. */
  19101. _now() {
  19102. return performance.now();
  19103. }
  19104. }
  19105. ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
  19106. /*
  19107. * MARTINS.js
  19108. * GPU-accelerated Augmented Reality for the web
  19109. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19110. *
  19111. * This program is free software: you can redistribute it and/or modify
  19112. * it under the terms of the GNU Lesser General Public License as published
  19113. * by the Free Software Foundation, either version 3 of the License, or
  19114. * (at your option) any later version.
  19115. *
  19116. * This program is distributed in the hope that it will be useful,
  19117. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19118. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19119. * GNU Lesser General Public License for more details.
  19120. *
  19121. * You should have received a copy of the GNU Lesser General Public License
  19122. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19123. *
  19124. * stats-panel.ts
  19125. * Stats panel used for development purposes
  19126. */
  19127. /** Update interval, in ms */
  19128. const stats_panel_UPDATE_INTERVAL = 500;
  19129. /** Icons for different power profiles */
  19130. const POWER_ICON = Object.freeze({
  19131. 'default': '',
  19132. 'low-power': '<span style="color:#0f0">&#x1F50B</span>',
  19133. 'high-performance': '<span style="color:#ff0">&#x26A1</span>'
  19134. });
  19135. /**
  19136. * Stats panel used for development purposes
  19137. */
  19138. class StatsPanel {
  19139. /**
  19140. * Constructor
  19141. * @param parent parent element of the panel
  19142. */
  19143. constructor(parent) {
  19144. this._container = this._createContainer(parent);
  19145. this._lastUpdate = 0;
  19146. }
  19147. /**
  19148. * Release the panel
  19149. */
  19150. release() {
  19151. this._container.remove();
  19152. }
  19153. /**
  19154. * A method to be called in the update loop
  19155. * @param time current time in ms
  19156. * @param trackers the trackers attached to the session
  19157. * @param sources the sources of media linked to the session
  19158. * @param gpu GPU cycles per second
  19159. * @param fps frames per second
  19160. */
  19161. update(time, trackers, sources, gpu, fps) {
  19162. if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
  19163. this._lastUpdate = time;
  19164. this._update(trackers, sources, fps, gpu);
  19165. }
  19166. }
  19167. /**
  19168. * Visibility of the panel
  19169. */
  19170. get visible() {
  19171. return !this._container.hidden;
  19172. }
  19173. /**
  19174. * Visibility of the panel
  19175. */
  19176. set visible(visible) {
  19177. this._container.hidden = !visible;
  19178. }
  19179. /**
  19180. * Update the contents of the panel
  19181. * @param trackers the trackers attached to the session
  19182. * @param sources the sources of media linked to the session
  19183. * @param fps frames per second
  19184. * @param gpu GPU cycles per second
  19185. */
  19186. _update(trackers, sources, fps, gpu) {
  19187. const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
  19188. const sourceStats = sources.map(source => source._stats).join(', ');
  19189. const param = {
  19190. fps: this._colorize(fps),
  19191. gpu: this._colorize(gpu),
  19192. powerIcon: POWER_ICON[Settings.powerPreference]
  19193. };
  19194. this._container.textContent = (`MARTINS.js v${Martins.version}
  19195. FPS: [fps] | GPU: [gpu] [powerIcon]
  19196. IN : ${sourceStats}
  19197. OUT: ${trackerStats}`);
  19198. const fn = (_, x) => param[x];
  19199. this._container.innerHTML = this._container.innerHTML.replace(/\[(\w+)\]/g, fn);
  19200. }
  19201. /**
  19202. * Colorize a frequency number
  19203. * @param f frequency given in cycles per second
  19204. * @returns colorized number (HTML)
  19205. */
  19206. _colorize(f) {
  19207. const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
  19208. const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
  19209. const color2 = f >= 30 ? GREEN : RED;
  19210. const color = Settings.powerPreference != 'low-power' ? color3 : color2;
  19211. return `<span style="color:${color}">${Number(f)}</span>`;
  19212. }
  19213. /**
  19214. * Create the container for the panel
  19215. * @param parent parent element
  19216. * @returns a container
  19217. */
  19218. _createContainer(parent) {
  19219. const container = document.createElement('div');
  19220. container.style.position = 'absolute';
  19221. container.style.left = container.style.top = '0px';
  19222. container.style.zIndex = '1000000';
  19223. container.style.padding = '4px';
  19224. container.style.whiteSpace = 'pre-line';
  19225. container.style.backgroundColor = 'rgba(0,0,0,0.5)';
  19226. container.style.color = '#fff';
  19227. container.style.fontFamily = 'monospace';
  19228. container.style.fontSize = '14px';
  19229. parent.appendChild(container);
  19230. return container;
  19231. }
  19232. }
  19233. ;// CONCATENATED MODULE: ./src/core/frame.ts
  19234. /*
  19235. * MARTINS.js
  19236. * GPU-accelerated Augmented Reality for the web
  19237. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19238. *
  19239. * This program is free software: you can redistribute it and/or modify
  19240. * it under the terms of the GNU Lesser General Public License as published
  19241. * by the Free Software Foundation, either version 3 of the License, or
  19242. * (at your option) any later version.
  19243. *
  19244. * This program is distributed in the hope that it will be useful,
  19245. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19246. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19247. * GNU Lesser General Public License for more details.
  19248. *
  19249. * You should have received a copy of the GNU Lesser General Public License
  19250. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19251. *
  19252. * frame.ts
  19253. * A Frame holds information used to render a single animation frame of a Session
  19254. */
  19255. /**
  19256. * Iterable frame results (helper class)
  19257. */
  19258. class IterableTrackerResults {
  19259. constructor(_results) {
  19260. this._results = _results;
  19261. this._index = 0;
  19262. }
  19263. next() {
  19264. const i = this._index++;
  19265. return i < this._results.length ?
  19266. { done: false, value: this._results[i] } :
  19267. { done: true, value: undefined };
  19268. }
  19269. [Symbol.iterator]() {
  19270. return this;
  19271. }
  19272. }
  19273. /**
  19274. * A Frame holds information used to render a single animation frame of a Session
  19275. */
  19276. class Frame {
  19277. /**
  19278. * Constructor
  19279. * @param session
  19280. * @param results
  19281. */
  19282. constructor(session, results) {
  19283. this._session = session;
  19284. this._results = new IterableTrackerResults(results);
  19285. }
  19286. /**
  19287. * The session of which this frame holds data
  19288. */
  19289. get session() {
  19290. return this._session;
  19291. }
  19292. /**
  19293. * The results of all trackers in this frame
  19294. */
  19295. get results() {
  19296. return this._results;
  19297. }
  19298. }
  19299. ;// CONCATENATED MODULE: ./src/core/time.ts
  19300. /*
  19301. * MARTINS.js
  19302. * GPU-accelerated Augmented Reality for the web
  19303. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19304. *
  19305. * This program is free software: you can redistribute it and/or modify
  19306. * it under the terms of the GNU Lesser General Public License as published
  19307. * by the Free Software Foundation, either version 3 of the License, or
  19308. * (at your option) any later version.
  19309. *
  19310. * This program is distributed in the hope that it will be useful,
  19311. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19312. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19313. * GNU Lesser General Public License for more details.
  19314. *
  19315. * You should have received a copy of the GNU Lesser General Public License
  19316. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19317. *
  19318. * time.ts
  19319. * Time utilities
  19320. */
  19321. /**
  19322. * Time Manager
  19323. */
  19324. class Time {
  19325. constructor() {
  19326. /** time scale */
  19327. this._scale = 1;
  19328. /** time since the start of the session, in milliseconds */
  19329. this._time = 0;
  19330. /** unscaled time since the start of the session, in milliseconds */
  19331. this._unscaledTime = 0;
  19332. /** elapsed time between the current and the previous frame, in milliseconds */
  19333. this._delta = 0;
  19334. /** time of the first update call, in milliseconds */
  19335. this._firstUpdate = 0;
  19336. /** time of the last update call, in milliseconds */
  19337. this._lastUpdate = Number.POSITIVE_INFINITY;
  19338. }
  19339. /**
  19340. * Update the Time Manager
  19341. * @param timestamp in milliseconds
  19342. * @internal
  19343. */
  19344. _update(timestamp) {
  19345. if (timestamp < this._lastUpdate) {
  19346. this._firstUpdate = this._lastUpdate = timestamp;
  19347. return;
  19348. }
  19349. this._delta = (timestamp - this._lastUpdate) * this._scale;
  19350. this._time += this._delta;
  19351. this._unscaledTime = timestamp - this._firstUpdate;
  19352. this._lastUpdate = timestamp;
  19353. }
  19354. /**
  19355. * Elapsed time since the start of the session, measured at the
  19356. * beginning of the current animation frame and given in seconds
  19357. */
  19358. get elapsed() {
  19359. return this._time * 0.001;
  19360. }
  19361. /**
  19362. * Elapsed time between the current and the previous animation
  19363. * frame, given in seconds
  19364. */
  19365. get delta() {
  19366. return this._delta * 0.001;
  19367. }
  19368. /**
  19369. * Time scale (defaults to 1)
  19370. */
  19371. get scale() {
  19372. return this._scale;
  19373. }
  19374. /**
  19375. * Time scale (defaults to 1)
  19376. */
  19377. set scale(scale) {
  19378. this._scale = Math.max(0, +scale);
  19379. }
  19380. /**
  19381. * Time scale independent elapsed time since the start of the session,
  19382. * measured at the beginning of the current animation frame and given
  19383. * in seconds
  19384. */
  19385. get unscaled() {
  19386. return this._unscaledTime * 0.001;
  19387. }
  19388. }
  19389. ;// CONCATENATED MODULE: ./src/core/gizmos.ts
  19390. /*
  19391. * MARTINS.js
  19392. * GPU-accelerated Augmented Reality for the web
  19393. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19394. *
  19395. * This program is free software: you can redistribute it and/or modify
  19396. * it under the terms of the GNU Lesser General Public License as published
  19397. * by the Free Software Foundation, either version 3 of the License, or
  19398. * (at your option) any later version.
  19399. *
  19400. * This program is distributed in the hope that it will be useful,
  19401. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19402. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19403. * GNU Lesser General Public License for more details.
  19404. *
  19405. * You should have received a copy of the GNU Lesser General Public License
  19406. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19407. *
  19408. * gizmos.ts
  19409. * Visual cues for testing & debugging
  19410. */
  19411. /** The maximum match distance ratio we'll consider to be "good" */
  19412. const GOOD_MATCH_THRESHOLD = 0.7;
  19413. /**
  19414. * Visual cues for testing & debugging
  19415. */
  19416. class Gizmos {
  19417. /**
  19418. * Constructor
  19419. */
  19420. constructor() {
  19421. this._visible = false;
  19422. }
  19423. /**
  19424. * Whether or not the gizmos will be rendered
  19425. */
  19426. get visible() {
  19427. return this._visible;
  19428. }
  19429. /**
  19430. * Whether or not the gizmos will be rendered
  19431. */
  19432. set visible(visible) {
  19433. this._visible = visible;
  19434. }
  19435. /**
  19436. * Render gizmos
  19437. * @param viewport
  19438. * @param trackers
  19439. * @internal
  19440. */
  19441. _render(viewport, trackers) {
  19442. // no need to render?
  19443. if (!this._visible)
  19444. return;
  19445. // viewport
  19446. const viewportSize = viewport._size;
  19447. const canvas = viewport._background;
  19448. const ctx = canvas.getContext('2d', { alpha: false });
  19449. if (!ctx)
  19450. throw new IllegalOperationError();
  19451. // debug
  19452. //ctx.fillStyle = '#000';
  19453. //ctx.fillRect(0, 0, canvas.width, canvas.height);
  19454. //ctx.clearRect(0, 0, canvas.width, canvas.height);
  19455. // render keypoints
  19456. for (let i = 0; i < trackers.length; i++) {
  19457. if (trackers[i].type != 'image-tracker')
  19458. continue;
  19459. const output = trackers[i]._output;
  19460. const keypoints = output.keypoints;
  19461. const screenSize = output.screenSize;
  19462. if (keypoints !== undefined && screenSize !== undefined)
  19463. this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
  19464. }
  19465. // render polylines
  19466. for (let i = 0; i < trackers.length; i++) {
  19467. if (trackers[i].type != 'image-tracker')
  19468. continue;
  19469. const output = trackers[i]._output;
  19470. const polyline = output.polyline;
  19471. const screenSize = output.screenSize;
  19472. if (polyline !== undefined && screenSize !== undefined)
  19473. this._renderPolyline(ctx, polyline, screenSize, viewportSize);
  19474. }
  19475. // render the axes of the 3D coordinate system
  19476. for (let i = 0; i < trackers.length; i++) {
  19477. if (trackers[i].type != 'image-tracker')
  19478. continue;
  19479. const output = trackers[i]._output;
  19480. const cameraMatrix = output.cameraMatrix;
  19481. const screenSize = output.screenSize;
  19482. if (cameraMatrix !== undefined && screenSize !== undefined)
  19483. this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
  19484. }
  19485. }
  19486. /**
  19487. * Split keypoints in matched/unmatched categories and
  19488. * render them for testing & development purposes
  19489. * @param ctx canvas 2D context
  19490. * @param keypoints keypoints to render
  19491. * @param screenSize AR screen size
  19492. * @param viewportSize viewport size
  19493. * @param size base keypoint rendering size
  19494. */
  19495. _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
  19496. if (keypoints.length == 0)
  19497. return;
  19498. if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
  19499. this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
  19500. return;
  19501. }
  19502. const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
  19503. (keypoint.matches.length > 1 &&
  19504. keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
  19505. keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
  19506. const matchedKeypoints = keypoints;
  19507. const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
  19508. const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
  19509. this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
  19510. this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
  19511. }
  19512. /**
  19513. * Render keypoints for testing & development purposes
  19514. * @param ctx canvas 2D context
  19515. * @param keypoints keypoints to render
  19516. * @param screenSize AR screen size
  19517. * @param viewportSize viewport size
  19518. * @param color color of the rendered keypoints
  19519. * @param size base keypoint rendering size
  19520. */
  19521. _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
  19522. const sx = viewportSize.width / screenSize.width;
  19523. const sy = viewportSize.height / screenSize.height;
  19524. ctx.beginPath();
  19525. for (let i = keypoints.length - 1; i >= 0; i--) {
  19526. const keypoint = keypoints[i];
  19527. const x = (keypoint.x * sx + 0.5) | 0;
  19528. const y = (keypoint.y * sy + 0.5) | 0;
  19529. const r = (size * keypoint.scale + 0.5) | 0;
  19530. ctx.rect(x - r, y - r, 2 * r, 2 * r);
  19531. }
  19532. ctx.strokeStyle = color;
  19533. ctx.lineWidth = 1;
  19534. ctx.stroke();
  19535. }
  19536. /**
  19537. * Render polyline for testing & development purposes
  19538. * @param ctx canvas 2D context
  19539. * @param polyline vertices
  19540. * @param screenSize AR screen size
  19541. * @param viewportSize viewport size
  19542. * @param color color of the rendered polyline
  19543. * @param lineWidth
  19544. */
  19545. _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
  19546. if (polyline.length == 0)
  19547. return;
  19548. const n = polyline.length;
  19549. const sx = viewportSize.width / screenSize.width;
  19550. const sy = viewportSize.height / screenSize.height;
  19551. // render polyline
  19552. ctx.beginPath();
  19553. ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
  19554. for (let j = 0; j < n; j++)
  19555. ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
  19556. ctx.strokeStyle = color;
  19557. ctx.lineWidth = lineWidth;
  19558. ctx.stroke();
  19559. }
  19560. /**
  19561. * Render the axes of a 3D coordinate system
  19562. * @param ctx canvas 2D context
  19563. * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
  19564. * @param screenSize AR screen size
  19565. * @param viewportSize viewport size
  19566. * @param lineWidth
  19567. */
  19568. _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
  19569. const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
  19570. const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
  19571. const length = 1; // length of each axis-corresponding line, given in normalized space units
  19572. const sx = viewportSize.width / screenSize.width;
  19573. const sy = viewportSize.height / screenSize.height;
  19574. /*
  19575. Multiply the 3x4 camera matrix P by:
  19576. [ 0 L 0 0 ]
  19577. [ 0 0 L 0 ] , where L = length in normalized space of the lines
  19578. [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
  19579. [ 1 1 1 1 ]
  19580. Each column of the resulting matrix will give us the pixel coordinates
  19581. we're looking for.
  19582. Note: we're working with homogeneous coordinates
  19583. */
  19584. const p = cameraMatrix.read();
  19585. const l = length;
  19586. const o = [p[9], p[10], p[11]]; // origin of the coordinate system
  19587. const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
  19588. const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
  19589. const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
  19590. const axis = [x, y, z];
  19591. // draw each axis
  19592. const ox = o[0] / o[2], oy = o[1] / o[2];
  19593. for (let i = 0; i < 3; i++) {
  19594. const q = axis[i];
  19595. const x = q[0] / q[2], y = q[1] / q[2];
  19596. ctx.beginPath();
  19597. ctx.moveTo(ox * sx, oy * sy);
  19598. ctx.lineTo(x * sx, y * sy);
  19599. ctx.strokeStyle = color[i];
  19600. ctx.lineWidth = lineWidth;
  19601. ctx.stroke();
  19602. }
  19603. //console.log("Origin",ox,oy);
  19604. }
  19605. }
  19606. ;// CONCATENATED MODULE: ./src/utils/asap.ts
  19607. /*
  19608. * MARTINS.js
  19609. * GPU-accelerated Augmented Reality for the web
  19610. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19611. *
  19612. * This program is free software: you can redistribute it and/or modify
  19613. * it under the terms of the GNU Lesser General Public License as published
  19614. * by the Free Software Foundation, either version 3 of the License, or
  19615. * (at your option) any later version.
  19616. *
  19617. * This program is distributed in the hope that it will be useful,
  19618. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19619. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19620. * GNU Lesser General Public License for more details.
  19621. *
  19622. * You should have received a copy of the GNU Lesser General Public License
  19623. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19624. *
  19625. * asap.ts
  19626. * Schedule a function to run "as soon as possible"
  19627. */
  19628. /** callbacks */
  19629. const callbacks = [];
  19630. /** arguments to be passed to the callbacks */
  19631. const args = [];
  19632. /** asap key */
  19633. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  19634. // Register an event listener
  19635. window.addEventListener('message', event => {
  19636. if (event.source !== window || event.data !== ASAP_KEY)
  19637. return;
  19638. event.stopPropagation();
  19639. if (callbacks.length == 0)
  19640. return;
  19641. const fn = callbacks.pop();
  19642. const argArray = args.pop();
  19643. fn.apply(undefined, argArray);
  19644. }, true);
  19645. /**
  19646. * Schedule a function to run "as soon as possible"
  19647. * @param fn callback
  19648. * @param params optional parameters
  19649. */
  19650. function asap(fn, ...params) {
  19651. callbacks.unshift(fn);
  19652. args.unshift(params);
  19653. window.postMessage(ASAP_KEY, '*');
  19654. }
  19655. ;// CONCATENATED MODULE: ./src/core/session.ts
  19656. /*
  19657. * MARTINS.js
  19658. * GPU-accelerated Augmented Reality for the web
  19659. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19660. *
  19661. * This program is free software: you can redistribute it and/or modify
  19662. * it under the terms of the GNU Lesser General Public License as published
  19663. * by the Free Software Foundation, either version 3 of the License, or
  19664. * (at your option) any later version.
  19665. *
  19666. * This program is distributed in the hope that it will be useful,
  19667. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19668. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19669. * GNU Lesser General Public License for more details.
  19670. *
  19671. * You should have received a copy of the GNU Lesser General Public License
  19672. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19673. *
  19674. * session.ts
  19675. * WebAR Session
  19676. */
  19677. /** An event emitted by a Session */
  19678. class SessionEvent extends AREvent {
  19679. }
  19680. /** Default options when starting a session */
  19681. const DEFAULT_OPTIONS = {
  19682. mode: 'immersive',
  19683. trackers: [],
  19684. sources: [],
  19685. viewport: null,
  19686. stats: false,
  19687. gizmos: false,
  19688. };
  19689. /**
  19690. * A Session represents an intent to display AR content
  19691. * and encapsulates the main loop (update-render cycle)
  19692. */
  19693. class Session extends AREventTarget {
  19694. /**
  19695. * Constructor
  19696. * @param sources previously initialized sources of data
  19697. * @param mode session mode
  19698. * @param viewport viewport
  19699. * @param stats render stats panel?
  19700. * @param gizmos render gizmos?
  19701. */
  19702. constructor(sources, mode, viewport, stats, gizmos) {
  19703. super();
  19704. this._mode = mode;
  19705. this._trackers = [];
  19706. this._sources = sources;
  19707. this._updateStats = new Stats();
  19708. this._renderStats = new Stats();
  19709. this._active = true;
  19710. this._frameReady = true; // no trackers at the moment
  19711. this._rafQueue = [];
  19712. this._time = new Time();
  19713. this._gizmos = new Gizmos();
  19714. this._gizmos.visible = gizmos;
  19715. // get media
  19716. const media = this.media;
  19717. // setup the viewport
  19718. if (mode == 'immersive')
  19719. this._viewport = new ImmersiveViewport(viewport, () => media.size);
  19720. else if (mode == 'inline')
  19721. this._viewport = new InlineViewport(viewport, () => media.size);
  19722. else
  19723. throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
  19724. this._viewport._init();
  19725. // setup the main loop
  19726. this._setupUpdateLoop();
  19727. this._setupRenderLoop();
  19728. // setup the stats panel
  19729. this._statsPanel = new StatsPanel(this._viewport.hud.container);
  19730. this._statsPanel.visible = stats;
  19731. // done!
  19732. Session._count++;
  19733. Utils.log(`The ${mode} session is now active!`);
  19734. }
  19735. /**
  19736. * Checks if the engine can be run in the browser the client is using
  19737. * @returns true if the engine is compatible with the browser
  19738. */
  19739. static isSupported() {
  19740. // If Safari or iOS, require version 15.2 or later
  19741. if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
  19742. const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
  19743. const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
  19744. const matches = safari || ios; // match safari first (min version)
  19745. if (matches !== null) {
  19746. const version = matches[3] || '0.0';
  19747. const [x, y] = version.split(/[\._]/).map(v => parseInt(v));
  19748. if ((x < 15) || (x == 15 && y < 2)) {
  19749. Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
  19750. return false;
  19751. }
  19752. }
  19753. else
  19754. Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
  19755. }
  19756. // Check if WebGL2 and WebAssembly are supported
  19757. return speedy_vision_default().isSupported();
  19758. }
  19759. /**
  19760. * Instantiate a session
  19761. * @param options options
  19762. * @returns a promise that resolves to a new session
  19763. */
  19764. static instantiate(options = DEFAULT_OPTIONS) {
  19765. const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
  19766. Utils.log(`Starting a new ${mode} session...`);
  19767. return speedy_vision_default().Promise.resolve().then(() => {
  19768. // is the engine supported?
  19769. if (!Session.isSupported())
  19770. throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with the MARTINS.js engine');
  19771. // block multiple immersive sessions
  19772. if (mode !== 'inline' && Session.count > 0)
  19773. throw new IllegalOperationError(`Can't start more than one immersive session`);
  19774. // initialize matrix routines
  19775. return speedy_vision_default().Matrix.ready();
  19776. }).then(() => {
  19777. // validate sources of data
  19778. const videoSources = sources.filter(source => source._type == 'video');
  19779. if (videoSources.length != 1)
  19780. throw new IllegalArgumentError(`One video source of data must be provided`);
  19781. for (let i = sources.length - 1; i >= 0; i--) {
  19782. if (sources.indexOf(sources[i]) < i)
  19783. throw new IllegalArgumentError(`Found repeated sources of data`);
  19784. }
  19785. // initialize sources of data
  19786. return speedy_vision_default().Promise.all(sources.map(source => source._init()));
  19787. }).then(() => {
  19788. // get the viewport
  19789. if (!viewport)
  19790. throw new IllegalArgumentError(`Can't create a session without a viewport`);
  19791. // instantiate session
  19792. return new Session(sources, mode, viewport, stats, gizmos);
  19793. }).then(session => {
  19794. // validate trackers
  19795. if (trackers.length == 0)
  19796. Utils.warning(`No trackers have been attached to the session!`);
  19797. for (let i = trackers.length - 1; i >= 0; i--) {
  19798. if (trackers.indexOf(trackers[i]) < i)
  19799. throw new IllegalArgumentError(`Found repeated trackers`);
  19800. }
  19801. // attach trackers and return the session
  19802. return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
  19803. }).catch(err => {
  19804. // log errors, if any
  19805. Utils.error(`Can't start session: ${err.message}`);
  19806. throw err;
  19807. });
  19808. }
  19809. /**
  19810. * Number of active sessions
  19811. */
  19812. static get count() {
  19813. return this._count;
  19814. }
  19815. /**
  19816. * End the session
  19817. * @returns promise that resolves after the session is shut down
  19818. */
  19819. end() {
  19820. // is the session inactive?
  19821. if (!this._active)
  19822. return speedy_vision_default().Promise.resolve();
  19823. // deactivate the session
  19824. Utils.log('Shutting down the session...');
  19825. this._active = false; // set before wait()
  19826. // wait a few ms, so that the GPU is no longer sending any data
  19827. const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
  19828. setTimeout(resolve, ms);
  19829. });
  19830. // release resources
  19831. return wait(100).then(() => speedy_vision_default().Promise.all(
  19832. // release trackers
  19833. this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
  19834. // release input sources
  19835. this._sources.map(source => source._release()))).then(() => {
  19836. this._sources.length = 0;
  19837. this._trackers.length = 0;
  19838. // release internal components
  19839. this._updateStats.reset();
  19840. this._renderStats.reset();
  19841. this._statsPanel.release();
  19842. this._viewport._release();
  19843. // end the session
  19844. Session._count--;
  19845. // dispatch event
  19846. const event = new SessionEvent('end');
  19847. this.dispatchEvent(event);
  19848. // done!
  19849. Utils.log('Session ended.');
  19850. });
  19851. }
  19852. /**
  19853. * Analogous to window.requestAnimationFrame()
  19854. * @param callback
  19855. * @returns a handle
  19856. */
  19857. requestAnimationFrame(callback) {
  19858. const handle = Symbol('raf-handle');
  19859. if (this._active)
  19860. this._rafQueue.push([handle, callback]);
  19861. else
  19862. throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
  19863. return handle;
  19864. }
  19865. /**
  19866. * Analogous to window.cancelAnimationFrame()
  19867. * @param handle a handle returned by this.requestAnimationFrame()
  19868. */
  19869. cancelAnimationFrame(handle) {
  19870. for (let i = this._rafQueue.length - 1; i >= 0; i--) {
  19871. if (this._rafQueue[i][0] === handle) {
  19872. this._rafQueue.splice(i, 1);
  19873. break;
  19874. }
  19875. }
  19876. }
  19877. /**
  19878. * The underlying media (generally a camera stream)
  19879. * @internal
  19880. */
  19881. get media() {
  19882. for (let i = this._sources.length - 1; i >= 0; i--) {
  19883. if (this._sources[i]._type == 'video')
  19884. return this._sources[i]._data;
  19885. }
  19886. // this shouldn't happen
  19887. throw new IllegalOperationError(`Invalid input source`);
  19888. }
  19889. /**
  19890. * Session mode
  19891. */
  19892. get mode() {
  19893. return this._mode;
  19894. }
  19895. /**
  19896. * Rendering viewport
  19897. */
  19898. get viewport() {
  19899. return this._viewport;
  19900. }
  19901. /**
  19902. * Time utilities
  19903. */
  19904. get time() {
  19905. return this._time;
  19906. }
  19907. /**
  19908. * Visual cues for testing & debugging
  19909. */
  19910. get gizmos() {
  19911. return this._gizmos;
  19912. }
  19913. /**
  19914. * Attach a tracker to the session
  19915. * @param tracker
  19916. */
  19917. _attachTracker(tracker) {
  19918. if (this._trackers.indexOf(tracker) >= 0)
  19919. throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
  19920. else if (!this._active)
  19921. throw new IllegalOperationError(`Inactive session`);
  19922. this._trackers.push(tracker);
  19923. return tracker._init(this);
  19924. }
  19925. /**
  19926. * Render the user media to the background canvas
  19927. */
  19928. _renderUserMedia() {
  19929. const canvas = this._viewport._background;
  19930. const ctx = canvas.getContext('2d', { alpha: false });
  19931. if (ctx) {
  19932. ctx.imageSmoothingEnabled = false;
  19933. // draw user media
  19934. const image = this.media.source;
  19935. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  19936. // render output image(s)
  19937. for (let i = 0; i < this._trackers.length; i++) {
  19938. const image = this._trackers[i]._output.image;
  19939. if (image !== undefined)
  19940. ctx.drawImage(image.source, 0, 0, canvas.width, canvas.height);
  19941. //ctx.drawImage(image.source, canvas.width - image.width, canvas.height - image.height, image.width, image.height);
  19942. }
  19943. // render gizmos
  19944. this._gizmos._render(this._viewport, this._trackers);
  19945. }
  19946. }
  19947. /**
  19948. * Setup the update loop
  19949. */
  19950. _setupUpdateLoop() {
  19951. const scheduleNextFrame = () => {
  19952. if (this._active) {
  19953. if (Settings.powerPreference == 'high-performance')
  19954. asap(repeat);
  19955. else
  19956. window.requestAnimationFrame(repeat);
  19957. }
  19958. };
  19959. const update = () => {
  19960. this._update().then(scheduleNextFrame).turbocharge();
  19961. };
  19962. function repeat() {
  19963. if (Settings.powerPreference == 'low-power') // 30 fps
  19964. window.requestAnimationFrame(update);
  19965. else
  19966. update();
  19967. }
  19968. window.requestAnimationFrame(update);
  19969. }
  19970. /**
  19971. * The core of the update loop
  19972. */
  19973. _update() {
  19974. // active session?
  19975. if (this._active) {
  19976. return speedy_vision_default().Promise.all(
  19977. // update trackers
  19978. this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
  19979. // update internals
  19980. this._updateStats.update();
  19981. this._frameReady = true;
  19982. }).catch(err => {
  19983. // handle error
  19984. Utils.warning('Tracking error: ' + err.toString());
  19985. });
  19986. }
  19987. else {
  19988. // inactive session
  19989. this._updateStats.reset();
  19990. return speedy_vision_default().Promise.resolve();
  19991. }
  19992. }
  19993. /**
  19994. * Setup the render loop
  19995. */
  19996. _setupRenderLoop() {
  19997. let skip = false, toggle = false;
  19998. const render = (timestamp) => {
  19999. const enableFrameSkipping = (Settings.powerPreference == 'low-power');
  20000. const highPerformance = (Settings.powerPreference == 'high-performance');
  20001. // advance time
  20002. this._time._update(timestamp);
  20003. // skip frames
  20004. if (!enableFrameSkipping || !(skip = !skip))
  20005. this._render(timestamp, false);
  20006. //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
  20007. // repeat
  20008. if (this._active)
  20009. window.requestAnimationFrame(render);
  20010. };
  20011. window.requestAnimationFrame(render);
  20012. }
  20013. /**
  20014. * Render a frame (RAF callback)
  20015. * @param time current time, in ms
  20016. * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
  20017. */
  20018. _render(time, skipUserMedia) {
  20019. // is the session active?
  20020. if (this._active) {
  20021. // are we ready to render a frame?
  20022. if (this._frameReady) {
  20023. // create a frame
  20024. const results = this._trackers.map(tracker => tracker._output.exports || ({
  20025. tracker: tracker,
  20026. trackables: [],
  20027. }));
  20028. const frame = new Frame(this, results);
  20029. // clone & clear the RAF queue
  20030. const rafQueue = this._rafQueue.slice(0);
  20031. this._rafQueue.length = 0;
  20032. // render user media
  20033. if (!skipUserMedia)
  20034. this._renderUserMedia();
  20035. // render frame
  20036. for (let i = 0; i < rafQueue.length; i++)
  20037. rafQueue[i][1].call(undefined, time, frame);
  20038. // update internals
  20039. this._renderStats.update();
  20040. this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
  20041. this._frameReady = false;
  20042. }
  20043. else {
  20044. // skip frame
  20045. ;
  20046. // we'll update the renderStats even if we skip the frame,
  20047. // otherwise this becomes updateStats! (approximately)
  20048. // This is a window.requestAnimationFrame() call, so the
  20049. // browser is rendering content even if we're not.
  20050. this._renderStats.update();
  20051. }
  20052. }
  20053. else {
  20054. // inactive session
  20055. this._renderStats.reset();
  20056. }
  20057. }
  20058. }
  20059. /** Number of active sessions */
  20060. Session._count = 0;
  20061. ;// CONCATENATED MODULE: ./src/core/settings.ts
  20062. /*
  20063. * MARTINS.js
  20064. * GPU-accelerated Augmented Reality for the web
  20065. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20066. *
  20067. * This program is free software: you can redistribute it and/or modify
  20068. * it under the terms of the GNU Lesser General Public License as published
  20069. * by the Free Software Foundation, either version 3 of the License, or
  20070. * (at your option) any later version.
  20071. *
  20072. * This program is distributed in the hope that it will be useful,
  20073. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20074. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20075. * GNU Lesser General Public License for more details.
  20076. *
  20077. * You should have received a copy of the GNU Lesser General Public License
  20078. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20079. *
  20080. * settings.ts
  20081. * Global Settings
  20082. */
  20083. /**
  20084. * Global Settings
  20085. */
  20086. class Settings {
  20087. /**
  20088. * Power preference (may impact performance x battery life)
  20089. */
  20090. static get powerPreference() {
  20091. return this._powerPreference;
  20092. }
  20093. /**
  20094. * Power preference (may impact performance x battery life)
  20095. * Note: this setting should be the very first thing you set
  20096. * (before the WebGL context is created by Speedy)
  20097. */
  20098. static set powerPreference(value) {
  20099. // validate
  20100. if (Session.count > 0)
  20101. throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
  20102. else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
  20103. throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
  20104. /*
  20105. // we won't use 'high-performance' for Speedy's GPU computations
  20106. // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
  20107. // also, it seems like low-power mode may break WebGL2 in some drivers?!
  20108. if(value == 'high-performance')
  20109. Speedy.Settings.powerPreference = 'default';
  20110. else
  20111. Speedy.Settings.powerPreference = value;
  20112. */
  20113. // change the GPU polling mode
  20114. if (value == 'high-performance')
  20115. (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
  20116. else
  20117. (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
  20118. // update the power preference
  20119. this._powerPreference = value;
  20120. // log
  20121. Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
  20122. }
  20123. }
  20124. Settings._powerPreference = 'default';
  20125. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
  20126. /*
  20127. * MARTINS.js
  20128. * GPU-accelerated Augmented Reality for the web
  20129. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20130. *
  20131. * This program is free software: you can redistribute it and/or modify
  20132. * it under the terms of the GNU Lesser General Public License as published
  20133. * by the Free Software Foundation, either version 3 of the License, or
  20134. * (at your option) any later version.
  20135. *
  20136. * This program is distributed in the hope that it will be useful,
  20137. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20138. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20139. * GNU Lesser General Public License for more details.
  20140. *
  20141. * You should have received a copy of the GNU Lesser General Public License
  20142. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20143. *
  20144. * reference-image-database.ts
  20145. * A collection of Reference Images
  20146. */
  20147. /** Default capacity of a Reference Image Database */
  20148. const DEFAULT_CAPACITY = 100;
  20149. /** Generate a unique name for a reference image */
  20150. const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
  20151. /**
  20152. * A collection of Reference Images
  20153. */
  20154. class ReferenceImageDatabase {
  20155. /**
  20156. * Constructor
  20157. */
  20158. constructor() {
  20159. this._capacity = DEFAULT_CAPACITY;
  20160. this._database = [];
  20161. this._locked = false;
  20162. }
  20163. /**
  20164. * The number of reference images stored in this database
  20165. */
  20166. get count() {
  20167. return this._database.length;
  20168. }
  20169. /**
  20170. * Maximum number of elements
  20171. */
  20172. get capacity() {
  20173. return this._capacity;
  20174. }
  20175. /**
  20176. * Maximum number of elements
  20177. */
  20178. /*
  20179. set capacity(value: number)
  20180. {
  20181. const capacity = Math.max(0, value | 0);
  20182. if(this.count > capacity)
  20183. throw new IllegalArgumentError(`Can't set the capacity of the database to ${this._capacity}: it currently stores ${this.count} entries`);
  20184. this._capacity = capacity;
  20185. }
  20186. */
  20187. /**
  20188. * Iterates over the collection
  20189. */
  20190. *[Symbol.iterator]() {
  20191. const ref = this._database.map(entry => entry.referenceImage);
  20192. yield* ref;
  20193. }
  20194. /**
  20195. * Add reference images to this database
  20196. * Add only the images you actually need to track!
  20197. * (each image take up storage space)
  20198. * @param referenceImages one or more reference images with unique names (a unique name will
  20199. * be generated automatically if you don't specify one)
  20200. * @returns a promise that resolves as soon as the images are loaded and added to this database
  20201. */
  20202. add(referenceImages) {
  20203. // handle no input
  20204. if (referenceImages.length == 0)
  20205. return speedy_vision_default().Promise.resolve();
  20206. // handle multiple images as input
  20207. if (referenceImages.length > 1) {
  20208. const promises = referenceImages.map(image => this.add([image]));
  20209. return speedy_vision_default().Promise.all(promises).then(() => void (0));
  20210. }
  20211. // handle a single image as input
  20212. const referenceImage = referenceImages[0];
  20213. // locked database?
  20214. if (this._locked)
  20215. throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
  20216. // reached full capacity?
  20217. if (this.count >= this.capacity)
  20218. throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
  20219. // check for duplicate names
  20220. if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
  20221. throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
  20222. // load the media and add the reference image to the database
  20223. return speedy_vision_default().load(referenceImage.image).then(media => {
  20224. this._database.push({
  20225. referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
  20226. media: media
  20227. });
  20228. });
  20229. }
  20230. /**
  20231. * Lock the database, so that new reference images can no longer be added to it
  20232. * @internal
  20233. */
  20234. _lock() {
  20235. this._locked = true;
  20236. }
  20237. /**
  20238. * Get the media object associated to a reference image
  20239. * @param name reference image name
  20240. * @returns media
  20241. * @internal
  20242. */
  20243. _findMedia(name) {
  20244. for (let i = 0; i < this._database.length; i++) {
  20245. if (this._database[i].referenceImage.name === name)
  20246. return this._database[i].media;
  20247. }
  20248. throw new IllegalArgumentError(`Can't find reference image "${name}"`);
  20249. }
  20250. }
  20251. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
  20252. /*
  20253. * MARTINS.js
  20254. * GPU-accelerated Augmented Reality for the web
  20255. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20256. *
  20257. * This program is free software: you can redistribute it and/or modify
  20258. * it under the terms of the GNU Lesser General Public License as published
  20259. * by the Free Software Foundation, either version 3 of the License, or
  20260. * (at your option) any later version.
  20261. *
  20262. * This program is distributed in the hope that it will be useful,
  20263. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20264. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20265. * GNU Lesser General Public License for more details.
  20266. *
  20267. * You should have received a copy of the GNU Lesser General Public License
  20268. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20269. *
  20270. * settings.ts
  20271. * Settings of the Image Tracker
  20272. */
  20273. /** Default tracking resolution */
  20274. const DEFAULT_TRACKING_RESOLUTION = 'sm+';
  20275. /** Maximum number of keypoints to be stored for each reference image when in the training state */
  20276. const TRAIN_MAX_KEYPOINTS = 1024; //512;
  20277. /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
  20278. const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
  20279. /** Normalized width & height of an image target, in pixels */
  20280. const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
  20281. /** Used to identify the best maches */
  20282. const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
  20283. /** Maximum number of keypoints to be analyzed when in the scanning state */
  20284. const SCAN_MAX_KEYPOINTS = 512;
  20285. /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
  20286. const SCAN_PYRAMID_LEVELS = 4; //7;
  20287. /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
  20288. const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
  20289. /** Threshold of the FAST corner detector used in the scanning/training states */
  20290. const SCAN_FAST_THRESHOLD = 60;
  20291. /** Minimum number of accepted matches for us to move out from the scanning state */
  20292. const SCAN_MIN_MATCHES = 20; //30;
  20293. /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
  20294. const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
  20295. /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
  20296. const SCAN_RANSAC_REPROJECTIONERROR = 5;
  20297. /** Number of tables used in the LSH-based keypoint matching */
  20298. const SCAN_LSH_TABLES = 8; // up to 32
  20299. /** Hash size, in bits, used in the LSH-based keypoint matching */
  20300. const SCAN_LSH_HASHSIZE = 15; // up to 16
  20301. /** Use the Nightvision filter when in the scanning/training state? */
  20302. const SCAN_WITH_NIGHTVISION = true;
  20303. /** Nightvision filter: gain */
  20304. const NIGHTVISION_GAIN = 0.3; // 0.2;
  20305. /** Nightvision filter: offset */
  20306. const NIGHTVISION_OFFSET = 0.5;
  20307. /** Nightvision filter: decay */
  20308. const NIGHTVISION_DECAY = 0.0;
  20309. /** Nightvision filter: quality level */
  20310. const NIGHTVISION_QUALITY = 'low';
  20311. /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
  20312. const ORB_GAUSSIAN_KSIZE = 9;
  20313. /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
  20314. const ORB_GAUSSIAN_SIGMA = 2.0;
  20315. /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
  20316. const SUBPIXEL_GAUSSIAN_KSIZE = 5;
  20317. /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
  20318. const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
  20319. /** Subpixel refinement method */
  20320. const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
  20321. /** Minimum acceptable number of matched keypoints when in the tracking state */
  20322. const TRACK_MIN_MATCHES = 4; //10; //20;
  20323. /** Maximum number of keypoints to be analyzed in the tracking state */
  20324. const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
  20325. /** Capacity of the keypoint detector used in the the tracking state */
  20326. const TRACK_DETECTOR_CAPACITY = 2048; //4096;
  20327. /** Quality of the Harris/Shi-Tomasi corner detector */
  20328. const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
  20329. /** Use the Nightvision filter when in the tracking state? */
  20330. const TRACK_WITH_NIGHTVISION = false; // produces shaking?
  20331. /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
  20332. const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
  20333. /** Relative size (%) used to clip keypoints from the borders of the rectified image */
  20334. const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
  20335. /** Number of iterations used to refine the target image before tracking */
  20336. const TRACK_REFINEMENT_ITERATIONS = 3;
  20337. /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
  20338. const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
  20339. /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
  20340. const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
  20341. /** Used to identify the best maches */
  20342. const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
  20343. /** Number of consecutive frames in which we tolerate a "target lost" situation */
  20344. const TRACK_LOST_TOLERANCE = 10;
  20345. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
  20346. /*
  20347. * MARTINS.js
  20348. * GPU-accelerated Augmented Reality for the web
  20349. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20350. *
  20351. * This program is free software: you can redistribute it and/or modify
  20352. * it under the terms of the GNU Lesser General Public License as published
  20353. * by the Free Software Foundation, either version 3 of the License, or
  20354. * (at your option) any later version.
  20355. *
  20356. * This program is distributed in the hope that it will be useful,
  20357. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20358. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20359. * GNU Lesser General Public License for more details.
  20360. *
  20361. * You should have received a copy of the GNU Lesser General Public License
  20362. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20363. *
  20364. * state.ts
  20365. * Abstract state of the Image Tracker
  20366. */
  20367. /**
  20368. * Abstract state of the Image Tracker
  20369. */
  20370. class ImageTrackerState {
  20371. /**
  20372. * Constructor
  20373. * @param name
  20374. * @param imageTracker
  20375. */
  20376. constructor(name, imageTracker) {
  20377. this._name = name;
  20378. this._imageTracker = imageTracker;
  20379. this._pipeline = this._createPipeline();
  20380. }
  20381. /**
  20382. * State name
  20383. */
  20384. get name() {
  20385. return this._name;
  20386. }
  20387. /**
  20388. * AR screen size
  20389. */
  20390. get screenSize() {
  20391. const screen = this._pipeline.node('screen');
  20392. if (!screen)
  20393. throw new IllegalOperationError();
  20394. // this is available once this state has run at least once
  20395. return screen.size;
  20396. }
  20397. /**
  20398. * Initialize the state
  20399. */
  20400. init() {
  20401. }
  20402. /**
  20403. * Release resources
  20404. */
  20405. release() {
  20406. return this._pipeline.release();
  20407. }
  20408. /**
  20409. * Update the state
  20410. * @param media user media
  20411. * @param screenSize AR screen size for image processing
  20412. * @param state all states
  20413. * @returns promise
  20414. */
  20415. update(media, screenSize) {
  20416. const source = this._pipeline.node('source');
  20417. const screen = this._pipeline.node('screen');
  20418. // validate the pipeline
  20419. if (!source || !screen)
  20420. throw new IllegalOperationError();
  20421. // prepare the pipeline
  20422. source.media = media;
  20423. screen.size = screenSize;
  20424. // run the pipeline
  20425. return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
  20426. }
  20427. /**
  20428. * Called as soon as this becomes the active state, just before update() runs for the first time
  20429. * @param settings
  20430. */
  20431. onEnterState(settings) {
  20432. }
  20433. /**
  20434. * Called when leaving the state, after update()
  20435. */
  20436. onLeaveState() {
  20437. }
  20438. /**
  20439. * Called just before the GPU processing
  20440. * @returns promise
  20441. */
  20442. _beforeUpdate() {
  20443. return speedy_vision_default().Promise.resolve();
  20444. }
  20445. /**
  20446. * GPU processing
  20447. * @returns promise with the pipeline results
  20448. */
  20449. _gpuUpdate() {
  20450. return this._pipeline.run();
  20451. }
  20452. //
  20453. // Some utility methods common to various states
  20454. //
  20455. /**
  20456. * Find the coordinates of a polyline surrounding the target image
  20457. * @param homography maps the target image to the AR screen
  20458. * @param targetSize size of the target space
  20459. * @returns promise that resolves to 4 points in AR screen space
  20460. */
  20461. _findPolylineCoordinates(homography, targetSize) {
  20462. const w = targetSize.width, h = targetSize.height;
  20463. const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
  20464. 0, 0,
  20465. w, 0,
  20466. w, h,
  20467. 0, h,
  20468. ]);
  20469. const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
  20470. return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
  20471. }
  20472. /**
  20473. * Find a polyline surrounding the target image
  20474. * @param homography maps the target image to the AR screen
  20475. * @param targetSize size of the target space
  20476. * @returns promise that resolves to 4 points in AR screen space
  20477. */
  20478. _findPolyline(homography, targetSize) {
  20479. return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
  20480. const polydata = polylineCoordinates.read();
  20481. const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
  20482. return polyline;
  20483. });
  20484. }
  20485. /**
  20486. * Whether or not to rotate the warped image in order to best fit the AR screen
  20487. * @param media media associated with the reference image
  20488. * @param screenSize AR screen
  20489. * @returns boolean
  20490. */
  20491. _mustRotateWarpedImage(media, screenSize) {
  20492. const screenAspectRatio = screenSize.width / screenSize.height;
  20493. const mediaAspectRatio = media.width / media.height;
  20494. const eps = 0.1;
  20495. return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
  20496. }
  20497. /**
  20498. * Find a rectification matrix to be applied to an image fitting the entire AR screen
  20499. * @param media media associated with the reference image
  20500. * @param screenSize AR screen
  20501. * @returns promise that resolves to a rectification matrix
  20502. */
  20503. _findRectificationMatrixOfFullscreenImage(media, screenSize) {
  20504. const b = TRACK_RECTIFIED_BORDER;
  20505. const sw = screenSize.width, sh = screenSize.height;
  20506. const mediaAspectRatio = media.width / media.height;
  20507. const mustRotate = this._mustRotateWarpedImage(media, screenSize);
  20508. // compute the vertices of the target in screen space
  20509. // we suppose portrait or landscape mode for both screen & media
  20510. const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
  20511. const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
  20512. const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
  20513. const right = sw - left;
  20514. const bottom = sh - top;
  20515. const targetVertices = speedy_vision_default().Matrix(2, 4, [
  20516. left, top,
  20517. right, top,
  20518. right, bottom,
  20519. left, bottom,
  20520. ]);
  20521. const screenVertices = speedy_vision_default().Matrix(2, 4, [
  20522. 0, 0,
  20523. sw, 0,
  20524. sw, sh,
  20525. 0, sh
  20526. ]);
  20527. const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
  20528. const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
  20529. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  20530. return (mustRotate ? speedy_vision_default().Matrix.perspective(
  20531. // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
  20532. preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
  20533. // alignment: align the target to the center of the screen
  20534. speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
  20535. // pre-rectify and then align
  20536. rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
  20537. }
  20538. /**
  20539. * Find a rectification matrix to be applied to the target image
  20540. * @param homography maps a reference image to the AR screen
  20541. * @param targetSize size of the target space
  20542. * @param media media associated with the reference image
  20543. * @param screenSize AR screen
  20544. * @returns promise that resolves to a rectification matrix
  20545. */
  20546. _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
  20547. const sw = screenSize.width, sh = screenSize.height;
  20548. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  20549. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  20550. return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
  20551. // from target space to (full)screen
  20552. speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
  20553. // from (full)screen to rectified coordinates
  20554. this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
  20555. // function composition
  20556. rectificationMatrix.setTo(mat.times(rectificationMatrix)));
  20557. }
  20558. }
  20559. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
  20560. /*
  20561. * MARTINS.js
  20562. * GPU-accelerated Augmented Reality for the web
  20563. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20564. *
  20565. * This program is free software: you can redistribute it and/or modify
  20566. * it under the terms of the GNU Lesser General Public License as published
  20567. * by the Free Software Foundation, either version 3 of the License, or
  20568. * (at your option) any later version.
  20569. *
  20570. * This program is distributed in the hope that it will be useful,
  20571. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20572. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20573. * GNU Lesser General Public License for more details.
  20574. *
  20575. * You should have received a copy of the GNU Lesser General Public License
  20576. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20577. *
  20578. * initial.ts
  20579. * Initial state of the Image Tracker
  20580. */
  20581. /**
  20582. * The purpose of the initial state of the Image Tracker
  20583. * is to initialize the training state using the state machine
  20584. */
  20585. class ImageTrackerInitialState extends ImageTrackerState {
  20586. /**
  20587. * Constructor
  20588. * @param imageTracker
  20589. */
  20590. constructor(imageTracker) {
  20591. super('initial', imageTracker);
  20592. }
  20593. /**
  20594. * Called just before the GPU processing
  20595. * @returns promise
  20596. */
  20597. _beforeUpdate() {
  20598. const source = this._pipeline.node('source');
  20599. const media = source.media;
  20600. const mediaSize = media.size;
  20601. if (mediaSize.area() < this.screenSize.area())
  20602. Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
  20603. return speedy_vision_default().Promise.resolve();
  20604. }
  20605. /**
  20606. * Post processing that takes place just after the GPU processing
  20607. * @param result pipeline results
  20608. * @returns state output
  20609. */
  20610. _afterUpdate(result) {
  20611. return speedy_vision_default().Promise.resolve({
  20612. nextState: 'training',
  20613. trackerOutput: {},
  20614. });
  20615. }
  20616. /**
  20617. * Create & setup the pipeline
  20618. * @returns pipeline
  20619. */
  20620. _createPipeline() {
  20621. // this pipeline does nothing useful,
  20622. // but it does preload some shaders...
  20623. const pipeline = speedy_vision_default().Pipeline();
  20624. const source = speedy_vision_default().Image.Source('source');
  20625. const screen = speedy_vision_default().Transform.Resize('screen');
  20626. const greyscale = speedy_vision_default().Filter.Greyscale();
  20627. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
  20628. const nightvision = speedy_vision_default().Filter.Nightvision();
  20629. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  20630. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  20631. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20632. const blur = speedy_vision_default().Filter.GaussianBlur();
  20633. const clipper = speedy_vision_default().Keypoint.Clipper();
  20634. const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
  20635. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  20636. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  20637. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  20638. const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
  20639. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
  20640. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
  20641. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  20642. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
  20643. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  20644. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
  20645. source.media = null;
  20646. screen.size = speedy_vision_default().Size(0, 0);
  20647. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20648. nightvision.quality = NIGHTVISION_QUALITY;
  20649. subpixel.method = SUBPIXEL_METHOD;
  20650. //borderClipper.imageSize = screen.size;
  20651. borderClipper.imageSize = speedy_vision_default().Size(100, 100);
  20652. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  20653. matcher.k = 1; //2;
  20654. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20655. keypointPortalSource.source = keypointPortalSink;
  20656. muxOfReferenceKeypoints.port = 0;
  20657. muxOfBufferOfReferenceKeypoints.port = 0;
  20658. bufferOfReferenceKeypoints.frozen = false;
  20659. keypointSink.turbo = false;
  20660. // prepare input
  20661. source.output().connectTo(screen.input());
  20662. screen.output().connectTo(greyscale.input());
  20663. // preprocess images
  20664. greyscale.output().connectTo(imageRectifier.input());
  20665. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  20666. imageRectifier.output().connectTo(nightvision.input());
  20667. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20668. nightvisionMux.output().connectTo(blur.input());
  20669. // keypoint detection & clipping
  20670. nightvisionMux.output().connectTo(detector.input());
  20671. detector.output().connectTo(borderClipper.input());
  20672. borderClipper.output().connectTo(clipper.input());
  20673. // keypoint refinement
  20674. imageRectifier.output().connectTo(denoiser.input());
  20675. denoiser.output().connectTo(subpixel.input('image'));
  20676. clipper.output().connectTo(subpixel.input('keypoints'));
  20677. // keypoint description
  20678. blur.output().connectTo(descriptor.input('image'));
  20679. subpixel.output().connectTo(descriptor.input('keypoints'));
  20680. // keypoint matching
  20681. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  20682. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  20683. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  20684. descriptor.output().connectTo(matcher.input('keypoints'));
  20685. // store reference keypoints
  20686. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  20687. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  20688. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  20689. // portals
  20690. descriptor.output().connectTo(keypointPortalSink.input());
  20691. // prepare output
  20692. descriptor.output().connectTo(keypointRectifier.input());
  20693. keypointRectifier.output().connectTo(keypointSink.input());
  20694. matcher.output().connectTo(keypointSink.input('matches'));
  20695. // done!
  20696. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  20697. /*
  20698. const run = pipeline.run.bind(pipeline);
  20699. pipeline.run = function() {
  20700. console.time("TIME");
  20701. return run().then(x => {
  20702. console.timeEnd("TIME");
  20703. return x;
  20704. });
  20705. };
  20706. */
  20707. return pipeline;
  20708. }
  20709. }
  20710. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
  20711. /*
  20712. * MARTINS.js
  20713. * GPU-accelerated Augmented Reality for the web
  20714. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20715. *
  20716. * This program is free software: you can redistribute it and/or modify
  20717. * it under the terms of the GNU Lesser General Public License as published
  20718. * by the Free Software Foundation, either version 3 of the License, or
  20719. * (at your option) any later version.
  20720. *
  20721. * This program is distributed in the hope that it will be useful,
  20722. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20723. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20724. * GNU Lesser General Public License for more details.
  20725. *
  20726. * You should have received a copy of the GNU Lesser General Public License
  20727. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20728. *
  20729. * training.ts
  20730. * Training state of the Image Tracker
  20731. */
  20732. /**
  20733. * Training state of the Image Tracker
  20734. */
  20735. class ImageTrackerTrainingState extends ImageTrackerState {
  20736. /**
  20737. * Constructor
  20738. * @param imageTracker
  20739. */
  20740. constructor(imageTracker) {
  20741. super('training', imageTracker);
  20742. this._currentImageIndex = 0;
  20743. this._image = [];
  20744. // initialize the training map
  20745. this._trainingMap = {
  20746. referenceImageIndex: [],
  20747. referenceImage: [],
  20748. keypoints: []
  20749. };
  20750. }
  20751. /**
  20752. * Called as soon as this becomes the active state, just before update() runs for the first time
  20753. * @param settings
  20754. */
  20755. onEnterState(settings) {
  20756. const database = this._imageTracker.database;
  20757. // validate
  20758. if (database.count == 0)
  20759. throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
  20760. // prepare to train...
  20761. this._currentImageIndex = 0;
  20762. this._image.length = 0;
  20763. this._trainingMap.referenceImageIndex.length = 0;
  20764. this._trainingMap.referenceImage.length = 0;
  20765. this._trainingMap.keypoints.length = 0;
  20766. // lock the database
  20767. Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
  20768. database._lock();
  20769. // collect all images
  20770. for (const referenceImage of database)
  20771. this._image.push(referenceImage);
  20772. }
  20773. /**
  20774. * Called just before the GPU processing
  20775. * @returns promise
  20776. */
  20777. _beforeUpdate() {
  20778. const arScreenSize = this.screenSize;
  20779. const source = this._pipeline.node('source');
  20780. const screen = this._pipeline.node('screen');
  20781. const keypointScaler = this._pipeline.node('keypointScaler');
  20782. // this shouldn't happen
  20783. if (this._currentImageIndex >= this._image.length)
  20784. return speedy_vision_default().Promise.reject(new IllegalOperationError());
  20785. // set the appropriate training media
  20786. const database = this._imageTracker.database;
  20787. const referenceImage = this._image[this._currentImageIndex];
  20788. const media = database._findMedia(referenceImage.name);
  20789. source.media = media;
  20790. // compute the appropriate size of the training image space
  20791. const resolution = this._imageTracker.resolution;
  20792. const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
  20793. const aspectRatioOfTrainingImage = media.width / media.height;
  20794. /*
  20795. let sin = 0, cos = 1;
  20796. if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
  20797. // training image and source video: both in landscape mode or both in portrait mode
  20798. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  20799. screen.size.width = Math.round(screen.size.width * scale);
  20800. screen.size.height = Math.round(screen.size.height * scale);
  20801. }
  20802. else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
  20803. // training image: portrait mode; source video: landscape mode
  20804. screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
  20805. screen.size.width = Math.round(screen.size.width * scale);
  20806. screen.size.height = Math.round(screen.size.height * scale);
  20807. sin = 1; cos = 0; // rotate 90deg
  20808. }
  20809. else {
  20810. // training image: landscape mode; source video: portrait mode
  20811. }
  20812. */
  20813. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  20814. screen.size.width = Math.round(screen.size.width * scale);
  20815. screen.size.height = Math.round(screen.size.height * scale);
  20816. // convert keypoints from the training image space to AR screen space
  20817. // let's pretend that trained keypoints belong to the AR screen space,
  20818. // regardless of the size of the target image. This will make things
  20819. // easier when computing the homography.
  20820. /*
  20821. const sw = arScreenSize.width / screen.size.width;
  20822. const sh = arScreenSize.height / screen.size.height;
  20823. */
  20824. const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
  20825. const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
  20826. keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
  20827. sw, 0, 0,
  20828. 0, sh, 0,
  20829. 0, 0, 1,
  20830. ]);
  20831. // log
  20832. Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
  20833. // done!
  20834. return speedy_vision_default().Promise.resolve();
  20835. }
  20836. /**
  20837. * Post processing that takes place just after the GPU processing
  20838. * @param result pipeline results
  20839. * @returns state output
  20840. */
  20841. _afterUpdate(result) {
  20842. const referenceImage = this._image[this._currentImageIndex];
  20843. const keypoints = result.keypoints;
  20844. const image = result.image;
  20845. // log
  20846. Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
  20847. // set the training map, so that we can map all keypoints of the current image to the current image
  20848. this._trainingMap.referenceImage.push(referenceImage);
  20849. for (let i = 0; i < keypoints.length; i++) {
  20850. this._trainingMap.keypoints.push(keypoints[i]);
  20851. this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
  20852. }
  20853. // the current image has been processed!
  20854. ++this._currentImageIndex;
  20855. // set output
  20856. if (this._currentImageIndex >= this._image.length) {
  20857. // finished training!
  20858. return speedy_vision_default().Promise.resolve({
  20859. //nextState: 'training',
  20860. nextState: 'scanning',
  20861. nextStateSettings: {
  20862. keypoints: this._trainingMap.keypoints,
  20863. },
  20864. trackerOutput: {},
  20865. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  20866. });
  20867. }
  20868. else {
  20869. // we're not done yet
  20870. return speedy_vision_default().Promise.resolve({
  20871. nextState: 'training',
  20872. trackerOutput: {},
  20873. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  20874. });
  20875. }
  20876. }
  20877. /**
  20878. * Create & setup the pipeline
  20879. * @returns pipeline
  20880. */
  20881. _createPipeline() {
  20882. const pipeline = speedy_vision_default().Pipeline();
  20883. const source = speedy_vision_default().Image.Source('source');
  20884. const screen = speedy_vision_default().Transform.Resize('screen');
  20885. const greyscale = speedy_vision_default().Filter.Greyscale();
  20886. const blur = speedy_vision_default().Filter.GaussianBlur();
  20887. const nightvision = speedy_vision_default().Filter.Nightvision();
  20888. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  20889. const pyramid = speedy_vision_default().Image.Pyramid();
  20890. const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
  20891. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20892. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  20893. const blurredPyramid = speedy_vision_default().Image.Pyramid();
  20894. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  20895. const clipper = speedy_vision_default().Keypoint.Clipper();
  20896. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  20897. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  20898. const imageSink = speedy_vision_default().Image.Sink('image');
  20899. source.media = null;
  20900. screen.size = speedy_vision_default().Size(0, 0);
  20901. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  20902. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  20903. nightvision.gain = NIGHTVISION_GAIN;
  20904. nightvision.offset = NIGHTVISION_OFFSET;
  20905. nightvision.decay = NIGHTVISION_DECAY;
  20906. nightvision.quality = NIGHTVISION_QUALITY;
  20907. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  20908. detector.levels = SCAN_PYRAMID_LEVELS;
  20909. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  20910. detector.threshold = SCAN_FAST_THRESHOLD;
  20911. detector.capacity = 8192;
  20912. subpixel.method = SUBPIXEL_METHOD;
  20913. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  20914. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  20915. clipper.size = TRAIN_MAX_KEYPOINTS;
  20916. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  20917. keypointSink.turbo = false;
  20918. // prepare input
  20919. source.output().connectTo(screen.input());
  20920. screen.output().connectTo(greyscale.input());
  20921. // preprocess image
  20922. greyscale.output().connectTo(nightvisionMux.input('in0'));
  20923. greyscale.output().connectTo(nightvision.input());
  20924. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20925. nightvisionMux.output().connectTo(pyramid.input());
  20926. // keypoint detection
  20927. pyramid.output().connectTo(detector.input());
  20928. detector.output().connectTo(clipper.input());
  20929. // keypoint refinement
  20930. greyscale.output().connectTo(denoiser.input()); // reduce noise
  20931. denoiser.output().connectTo(blurredPyramid.input());
  20932. clipper.output().connectTo(subpixel.input('keypoints'));
  20933. blurredPyramid.output().connectTo(subpixel.input('image'));
  20934. // keypoint description
  20935. greyscale.output().connectTo(blur.input());
  20936. blur.output().connectTo(descriptor.input('image'));
  20937. clipper.output().connectTo(descriptor.input('keypoints'));
  20938. // prepare output
  20939. descriptor.output().connectTo(keypointScaler.input());
  20940. keypointScaler.output().connectTo(keypointSink.input());
  20941. nightvisionMux.output().connectTo(imageSink.input());
  20942. // done!
  20943. pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
  20944. return pipeline;
  20945. }
  20946. /**
  20947. * Get reference image
  20948. * @param keypointIndex -1 if not found
  20949. * @returns reference image
  20950. */
  20951. referenceImageOfKeypoint(keypointIndex) {
  20952. const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
  20953. if (imageIndex < 0)
  20954. return null;
  20955. return this._trainingMap.referenceImage[imageIndex];
  20956. }
  20957. /**
  20958. * Get reference image index
  20959. * @param keypointIndex -1 if not found
  20960. * @returns reference image index, or -1 if not found
  20961. */
  20962. referenceImageIndexOfKeypoint(keypointIndex) {
  20963. const n = this._trainingMap.referenceImageIndex.length;
  20964. if (keypointIndex < 0 || keypointIndex >= n)
  20965. return -1;
  20966. const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
  20967. if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
  20968. return -1;
  20969. return imageIndex;
  20970. }
  20971. /**
  20972. * Get keypoint of the trained set
  20973. * @param keypointIndex -1 if not found
  20974. * @returns a keypoint
  20975. */
  20976. referenceKeypoint(keypointIndex) {
  20977. if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
  20978. return null;
  20979. return this._trainingMap.keypoints[keypointIndex];
  20980. }
  20981. }
  20982. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
  20983. /*
  20984. * MARTINS.js
  20985. * GPU-accelerated Augmented Reality for the web
  20986. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20987. *
  20988. * This program is free software: you can redistribute it and/or modify
  20989. * it under the terms of the GNU Lesser General Public License as published
  20990. * by the Free Software Foundation, either version 3 of the License, or
  20991. * (at your option) any later version.
  20992. *
  20993. * This program is distributed in the hope that it will be useful,
  20994. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20995. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20996. * GNU Lesser General Public License for more details.
  20997. *
  20998. * You should have received a copy of the GNU Lesser General Public License
  20999. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21000. *
  21001. * scanning.ts
  21002. * Scanning state of the Image Tracker
  21003. */
  21004. /** Default target space size (used when training) */
  21005. const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  21006. /** Port of the portal multiplexer: get new data from the camera */
  21007. const PORT_CAMERA = 0;
  21008. /** Port of the portal multiplexer: get previously memorized data */
  21009. const PORT_MEMORY = 1;
  21010. /**
  21011. * Scanning state of the Image Tracker
  21012. */
  21013. class ImageTrackerScanningState extends ImageTrackerState {
  21014. /**
  21015. * Constructor
  21016. * @param imageTracker
  21017. */
  21018. constructor(imageTracker) {
  21019. super('scanning', imageTracker);
  21020. this._counter = 0;
  21021. this._bestScore = 0;
  21022. this._bestHomography = speedy_vision_default().Matrix.Eye(3);
  21023. }
  21024. /**
  21025. * Called as soon as this becomes the active state, just before update() runs for the first time
  21026. * @param settings
  21027. */
  21028. onEnterState(settings) {
  21029. const imagePortalMux = this._pipeline.node('imagePortalMux');
  21030. const lshTables = this._pipeline.node('lshTables');
  21031. const keypoints = settings.keypoints;
  21032. // set attributes
  21033. this._counter = 0;
  21034. this._bestScore = 0;
  21035. // reset the image memorization circuit
  21036. imagePortalMux.port = PORT_CAMERA;
  21037. // prepare the keypoint matcher
  21038. if (keypoints !== undefined)
  21039. lshTables.keypoints = keypoints;
  21040. }
  21041. /**
  21042. * Post processing that takes place just after the GPU processing
  21043. * @param result pipeline results
  21044. * @returns state output
  21045. */
  21046. _afterUpdate(result) {
  21047. const imagePortalMux = this._pipeline.node('imagePortalMux');
  21048. const keypoints = result.keypoints;
  21049. const matchedKeypoints = this._goodMatches(keypoints);
  21050. // tracker output
  21051. const trackerOutput = {
  21052. keypoints: keypoints,
  21053. screenSize: this.screenSize
  21054. };
  21055. // keep the last memorized image
  21056. imagePortalMux.port = PORT_MEMORY;
  21057. // have we found enough matches...?
  21058. if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
  21059. return this._findHomography(matchedKeypoints).then(([homography, score]) => {
  21060. // have we found the best homography so far?
  21061. if (score >= this._bestScore) {
  21062. // store it only if we'll be running the pipeline again
  21063. if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
  21064. this._bestScore = score;
  21065. this._bestHomography = homography;
  21066. // memorize the last image, corresponding to the best homography(*)
  21067. imagePortalMux.port = PORT_CAMERA;
  21068. /*
  21069. (*) technically speaking, this is not exactly the case. Since we're
  21070. using turbo to download the keypoints, there's a slight difference
  21071. between the data used to compute the homography and the last image.
  21072. Still, assuming continuity of the video stream, this logic is
  21073. good enough.
  21074. */
  21075. }
  21076. }
  21077. // find a polyline surrounding the target
  21078. return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
  21079. }).then(polyline => {
  21080. // continue a little longer in the scanning state
  21081. if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
  21082. return {
  21083. nextState: this.name,
  21084. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  21085. };
  21086. }
  21087. // this image should correspond to the best homography
  21088. const snapshot = this._pipeline.node('imagePortalSink');
  21089. // the reference image that we'll track
  21090. const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
  21091. // let's track the target!
  21092. return {
  21093. nextState: 'pre-tracking',
  21094. nextStateSettings: {
  21095. homography: this._bestHomography,
  21096. snapshot: snapshot,
  21097. referenceImage: referenceImage,
  21098. },
  21099. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  21100. };
  21101. }).catch(() => {
  21102. // continue in the scanning state
  21103. return {
  21104. nextState: this.name,
  21105. trackerOutput: trackerOutput,
  21106. };
  21107. });
  21108. }
  21109. else {
  21110. // not enough matches...!
  21111. this._counter = 0;
  21112. this._bestScore = 0;
  21113. }
  21114. // we'll continue to scan the scene
  21115. return speedy_vision_default().Promise.resolve({
  21116. nextState: this.name,
  21117. trackerOutput: trackerOutput,
  21118. });
  21119. }
  21120. /**
  21121. * Find "high quality" matches of a single reference image
  21122. * @param keypoints
  21123. * @returns high quality matches
  21124. */
  21125. _goodMatches(keypoints) {
  21126. const matchedKeypointsPerImageIndex = Object.create(null);
  21127. // filter "good matches"
  21128. for (let j = keypoints.length - 1; j >= 0; j--) {
  21129. const keypoint = keypoints[j];
  21130. if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
  21131. const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
  21132. // the best match should be "much better" than the second best match,
  21133. // which means that they are "distinct enough"
  21134. if (d1 <= SCAN_MATCH_RATIO * d2) {
  21135. const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
  21136. //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
  21137. //if(idx1 == idx2 && idx1 >= 0) {
  21138. if (idx1 >= 0) {
  21139. if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
  21140. matchedKeypointsPerImageIndex[idx1] = [];
  21141. matchedKeypointsPerImageIndex[idx1].push(keypoint);
  21142. }
  21143. }
  21144. }
  21145. }
  21146. // find the image with the most matches
  21147. let matchedKeypoints = [];
  21148. for (const imageIndex in matchedKeypointsPerImageIndex) {
  21149. if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
  21150. matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
  21151. }
  21152. // done!
  21153. return matchedKeypoints;
  21154. }
  21155. /**
  21156. * Find a homography matrix using matched keypoints
  21157. * @param matchedKeypoints "good" matches only
  21158. * @returns homography from reference image space to AR screen space & homography "quality" score
  21159. */
  21160. _findHomography(matchedKeypoints) {
  21161. const srcCoords = [];
  21162. const dstCoords = [];
  21163. // find matching coordinates of the keypoints
  21164. for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
  21165. const matchedKeypoint = matchedKeypoints[i];
  21166. const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
  21167. if (referenceKeypoint != null) {
  21168. srcCoords.push(referenceKeypoint.x);
  21169. srcCoords.push(referenceKeypoint.y);
  21170. dstCoords.push(matchedKeypoint.x);
  21171. dstCoords.push(matchedKeypoint.y);
  21172. }
  21173. else {
  21174. // this shouldn't happen
  21175. return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
  21176. }
  21177. }
  21178. // too few points?
  21179. const n = srcCoords.length / 2;
  21180. if (n < 4) {
  21181. return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
  21182. }
  21183. // compute a homography
  21184. const src = speedy_vision_default().Matrix(2, n, srcCoords);
  21185. const dst = speedy_vision_default().Matrix(2, n, dstCoords);
  21186. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  21187. const homography = speedy_vision_default().Matrix.Zeros(3);
  21188. return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
  21189. method: 'pransac',
  21190. reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
  21191. numberOfHypotheses: 512,
  21192. bundleSize: 128,
  21193. mask: mask,
  21194. }).then(homography => {
  21195. // check if this is a valid homography
  21196. const a00 = homography.at(0, 0);
  21197. if (Number.isNaN(a00))
  21198. throw new DetectionError(`Can't compute homography`);
  21199. // count the number of inliers
  21200. const inliers = mask.read();
  21201. let inlierCount = 0;
  21202. for (let i = inliers.length - 1; i >= 0; i--)
  21203. inlierCount += inliers[i];
  21204. const score = inlierCount / inliers.length;
  21205. // done!
  21206. return [homography, score];
  21207. });
  21208. }
  21209. /**
  21210. * Create & setup the pipeline
  21211. * @returns pipeline
  21212. */
  21213. _createPipeline() {
  21214. const pipeline = speedy_vision_default().Pipeline();
  21215. const source = speedy_vision_default().Image.Source('source');
  21216. const screen = speedy_vision_default().Transform.Resize('screen');
  21217. const greyscale = speedy_vision_default().Filter.Greyscale();
  21218. const blur = speedy_vision_default().Filter.GaussianBlur();
  21219. const nightvision = speedy_vision_default().Filter.Nightvision();
  21220. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  21221. const pyramid = speedy_vision_default().Image.Pyramid();
  21222. const detector = speedy_vision_default().Keypoint.Detector.FAST();
  21223. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  21224. const clipper = speedy_vision_default().Keypoint.Clipper();
  21225. const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
  21226. const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
  21227. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  21228. const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
  21229. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  21230. const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
  21231. const imagePortalBuffer = speedy_vision_default().Image.Buffer();
  21232. const imagePortalCopy = speedy_vision_default().Transform.Resize();
  21233. //const imageSink = Speedy.Image.Sink('image');
  21234. source.media = null;
  21235. screen.size = speedy_vision_default().Size(0, 0);
  21236. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  21237. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  21238. nightvision.gain = NIGHTVISION_GAIN;
  21239. nightvision.offset = NIGHTVISION_OFFSET;
  21240. nightvision.decay = NIGHTVISION_DECAY;
  21241. nightvision.quality = NIGHTVISION_QUALITY;
  21242. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  21243. detector.levels = SCAN_PYRAMID_LEVELS;
  21244. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  21245. detector.threshold = SCAN_FAST_THRESHOLD;
  21246. detector.capacity = 2048;
  21247. clipper.size = SCAN_MAX_KEYPOINTS;
  21248. lshTables.keypoints = [];
  21249. lshTables.numberOfTables = SCAN_LSH_TABLES;
  21250. lshTables.hashSize = SCAN_LSH_HASHSIZE;
  21251. knn.k = 2;
  21252. knn.quality = 'default';
  21253. //knn.quality = 'fastest';
  21254. imagePortalSource.source = imagePortalSink;
  21255. imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
  21256. imagePortalCopy.size = speedy_vision_default().Size(0, 0);
  21257. imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
  21258. keypointSink.turbo = true;
  21259. // prepare input
  21260. source.output().connectTo(screen.input());
  21261. screen.output().connectTo(greyscale.input());
  21262. // preprocess image
  21263. greyscale.output().connectTo(blur.input());
  21264. greyscale.output().connectTo(nightvisionMux.input('in0'));
  21265. greyscale.output().connectTo(nightvision.input());
  21266. nightvision.output().connectTo(nightvisionMux.input('in1'));
  21267. nightvisionMux.output().connectTo(pyramid.input());
  21268. // keypoint detection
  21269. pyramid.output().connectTo(detector.input());
  21270. detector.output().connectTo(clipper.input());
  21271. // keypoint description
  21272. blur.output().connectTo(descriptor.input('image'));
  21273. clipper.output().connectTo(descriptor.input('keypoints'));
  21274. // keypoint matching
  21275. descriptor.output().connectTo(knn.input('keypoints'));
  21276. lshTables.output().connectTo(knn.input('lsh'));
  21277. // prepare output
  21278. clipper.output().connectTo(keypointSink.input());
  21279. knn.output().connectTo(keypointSink.input('matches'));
  21280. //pyramid.output().connectTo(imageSink.input());
  21281. // memorize image
  21282. source.output().connectTo(imagePortalBuffer.input());
  21283. imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
  21284. imagePortalSource.output().connectTo(imagePortalCopy.input());
  21285. imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
  21286. imagePortalMux.output().connectTo(imagePortalSink.input());
  21287. // done!
  21288. pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
  21289. return pipeline;
  21290. }
  21291. }
  21292. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
  21293. /*
  21294. * MARTINS.js
  21295. * GPU-accelerated Augmented Reality for the web
  21296. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21297. *
  21298. * This program is free software: you can redistribute it and/or modify
  21299. * it under the terms of the GNU Lesser General Public License as published
  21300. * by the Free Software Foundation, either version 3 of the License, or
  21301. * (at your option) any later version.
  21302. *
  21303. * This program is distributed in the hope that it will be useful,
  21304. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21305. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21306. * GNU Lesser General Public License for more details.
  21307. *
  21308. * You should have received a copy of the GNU Lesser General Public License
  21309. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21310. *
  21311. * pre-tracking.ts
  21312. * Pre-tracking state of the Image Tracker
  21313. */
  21314. /** Default target space size (used when training) */
  21315. const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  21316. /** Use the camera stream as the input of the pipeline */
  21317. const PORT_CAMERA_IMAGE = 1;
  21318. /** Use the reference image as the input of the pipeline */
  21319. const PORT_REFERENCE_IMAGE = 0;
  21320. /**
  21321. * The pre-tracking state of the Image Tracker is a new training
  21322. * phase for the particular, actual target we'll be tracking
  21323. */
  21324. class ImageTrackerPreTrackingState extends ImageTrackerState {
  21325. /**
  21326. * Constructor
  21327. * @param imageTracker
  21328. */
  21329. constructor(imageTracker) {
  21330. super('pre-tracking', imageTracker);
  21331. this._homography = speedy_vision_default().Matrix.Eye(3);
  21332. this._referenceImage = null;
  21333. this._step = 'read-reference-image';
  21334. this._referenceKeypoints = [];
  21335. this._iterations = 0;
  21336. }
  21337. /**
  21338. * Called as soon as this becomes the active state, just before update() runs for the first time
  21339. * @param settings
  21340. */
  21341. onEnterState(settings) {
  21342. const imagePortalSource = this._pipeline.node('imagePortalSource');
  21343. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  21344. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  21345. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  21346. const homography = settings.homography;
  21347. const referenceImage = settings.referenceImage;
  21348. const snapshot = settings.snapshot;
  21349. // this shouldn't happen
  21350. if (!referenceImage)
  21351. throw new TrackingError(`Can't track a null reference image`);
  21352. // set attributes
  21353. this._homography = homography;
  21354. this._referenceImage = referenceImage;
  21355. this._step = 'read-reference-image';
  21356. this._referenceKeypoints = [];
  21357. this._iterations = 0;
  21358. // setup the pipeline
  21359. imagePortalSource.source = snapshot;
  21360. muxOfReferenceKeypoints.port = 0;
  21361. muxOfBufferOfReferenceKeypoints.port = 0;
  21362. bufferOfReferenceKeypoints.frozen = false;
  21363. }
  21364. /**
  21365. * Called just before the GPU processing
  21366. * @returns promise
  21367. */
  21368. _beforeUpdate() {
  21369. const referenceImage = this._referenceImage;
  21370. const source = this._pipeline.node('source');
  21371. const sourceMux = this._pipeline.node('sourceMux');
  21372. const imageRectifier = this._pipeline.node('imageRectifier');
  21373. const keypointRectifier = this._pipeline.node('keypointRectifier');
  21374. const borderClipper = this._pipeline.node('borderClipper');
  21375. const screenSize = this.screenSize;
  21376. // set the source media to the reference image we're going to track
  21377. const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
  21378. source.media = targetMedia;
  21379. // setup the source multiplexer
  21380. if (this._step == 'read-reference-image')
  21381. sourceMux.port = PORT_REFERENCE_IMAGE;
  21382. else
  21383. sourceMux.port = PORT_CAMERA_IMAGE;
  21384. // clip keypoints from the borders of the target image
  21385. borderClipper.imageSize = screenSize;
  21386. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  21387. // rectify the image
  21388. const rectify = (this._step == 'read-reference-image') ?
  21389. this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
  21390. this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
  21391. return rectify.then(rectificationMatrix => {
  21392. imageRectifier.transform = rectificationMatrix;
  21393. });
  21394. }
  21395. /**
  21396. * Post processing that takes place just after the GPU processing
  21397. * @param result pipeline results
  21398. * @returns state output
  21399. */
  21400. _afterUpdate(result) {
  21401. const referenceImage = this._referenceImage;
  21402. const imagePortalSink = this._pipeline.node('imagePortal');
  21403. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  21404. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  21405. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  21406. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  21407. const keypoints = result.keypoints;
  21408. const image = result.image;
  21409. // tracker output
  21410. const trackerOutput = {
  21411. keypoints: image !== undefined ? keypoints : undefined,
  21412. image: image,
  21413. screenSize: this.screenSize,
  21414. };
  21415. // decide what to do next
  21416. switch (this._step) {
  21417. case 'read-reference-image': {
  21418. // enable matching
  21419. muxOfReferenceKeypoints.port = 1;
  21420. // store reference keypoints
  21421. this._referenceKeypoints = keypoints;
  21422. // next step
  21423. this._step = 'warp-camera-image';
  21424. return speedy_vision_default().Promise.resolve({
  21425. nextState: 'pre-tracking',
  21426. trackerOutput: trackerOutput,
  21427. });
  21428. }
  21429. case 'warp-camera-image': {
  21430. // freeze reference keypoints
  21431. bufferOfReferenceKeypoints.frozen = true;
  21432. muxOfBufferOfReferenceKeypoints.port = 1;
  21433. // refine warp?
  21434. if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
  21435. this._step = 'warp-camera-image';
  21436. else
  21437. this._step = 'train-camera-image';
  21438. // warp image & go to next step
  21439. return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
  21440. nextState: 'pre-tracking',
  21441. trackerOutput: trackerOutput,
  21442. })).catch(err => {
  21443. Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
  21444. return {
  21445. nextState: 'scanning',
  21446. trackerOutput: trackerOutput,
  21447. };
  21448. });
  21449. }
  21450. case 'train-camera-image': {
  21451. // log
  21452. Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
  21453. // change the coordinates
  21454. return this._changeSpace(this._homography, this.screenSize).then(homography => {
  21455. // we're ready to track the target!
  21456. return speedy_vision_default().Promise.resolve({
  21457. //nextState: 'pre-tracking',
  21458. nextState: 'tracking',
  21459. trackerOutput: trackerOutput,
  21460. nextStateSettings: {
  21461. homography: homography,
  21462. referenceImage: referenceImage,
  21463. templateKeypoints: keypoints,
  21464. keypointPortalSink: keypointPortalSink,
  21465. imagePortalSink: imagePortalSink,
  21466. screenSize: this.screenSize,
  21467. },
  21468. });
  21469. });
  21470. }
  21471. }
  21472. }
  21473. /**
  21474. * Find an adjustment warp between the camera image and the reference image
  21475. * @param dstKeypoints destination
  21476. * @param srcKeypoints source
  21477. * @returns a promise that resolves to a 3x3 homography
  21478. */
  21479. _findWarp(dstKeypoints, srcKeypoints) {
  21480. //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
  21481. const srcCoords = [];
  21482. const dstCoords = [];
  21483. // find matching coordinates of the keypoints
  21484. for (let i = 0; i < dstKeypoints.length; i++) {
  21485. const dstKeypoint = dstKeypoints[i];
  21486. if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
  21487. const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
  21488. // the best match should be "much better" than the second best match,
  21489. // which means that they are "distinct enough"
  21490. if (d1 <= TRACK_MATCH_RATIO * d2) {
  21491. const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
  21492. srcCoords.push(srcKeypoint.x);
  21493. srcCoords.push(srcKeypoint.y);
  21494. dstCoords.push(dstKeypoint.x);
  21495. dstCoords.push(dstKeypoint.y);
  21496. }
  21497. }
  21498. }
  21499. // too few points?
  21500. const n = srcCoords.length / 2;
  21501. if (n < 4) {
  21502. return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
  21503. }
  21504. // compute warp
  21505. const model = speedy_vision_default().Matrix.Eye(3);
  21506. return this._findKeypointWarp().then(transform =>
  21507. // rectify keypoints
  21508. speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
  21509. // find warp
  21510. speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
  21511. method: 'pransac',
  21512. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21513. numberOfHypotheses: 512 * 4,
  21514. bundleSize: 128,
  21515. })).then(_ => {
  21516. // validate the model
  21517. const a00 = model.at(0, 0);
  21518. if (Number.isNaN(a00))
  21519. throw new TrackingError(`Can't compute warp: bad keypoints`);
  21520. // done!
  21521. return model;
  21522. });
  21523. }
  21524. /**
  21525. * Find a warp to be applied to the keypoints
  21526. * @returns affine transform
  21527. */
  21528. _findKeypointWarp() {
  21529. const referenceImage = this._referenceImage;
  21530. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21531. const screenSize = this.screenSize;
  21532. // no rotation is needed
  21533. if (!this._mustRotateWarpedImage(media, screenSize))
  21534. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
  21535. // rotate by 90 degrees clockwise around the pivot
  21536. const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
  21537. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
  21538. 0, 1, 0,
  21539. -1, 0, 0,
  21540. py + px, py - px, 1,
  21541. ]));
  21542. }
  21543. /**
  21544. * Change the space of the homography in order to improve tracking quality
  21545. * @param homography mapping coordinates from normalized target space to AR screen space
  21546. * @param screenSize AR screen size
  21547. * @returns homography mapping coordinates from AR screen space to AR screen space
  21548. */
  21549. _changeSpace(homography, screenSize) {
  21550. const sw = screenSize.width, sh = screenSize.height;
  21551. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  21552. const mat = speedy_vision_default().Matrix.Zeros(3);
  21553. return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
  21554. }
  21555. /**
  21556. * Create & setup the pipeline
  21557. * @returns pipeline
  21558. */
  21559. _createPipeline() {
  21560. const pipeline = speedy_vision_default().Pipeline();
  21561. const source = speedy_vision_default().Image.Source('source');
  21562. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  21563. const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
  21564. const screen = speedy_vision_default().Transform.Resize('screen');
  21565. const greyscale = speedy_vision_default().Filter.Greyscale();
  21566. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  21567. const nightvision = speedy_vision_default().Filter.Nightvision();
  21568. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  21569. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  21570. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  21571. const blur = speedy_vision_default().Filter.GaussianBlur();
  21572. const clipper = speedy_vision_default().Keypoint.Clipper();
  21573. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  21574. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  21575. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  21576. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  21577. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  21578. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  21579. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  21580. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
  21581. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
  21582. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
  21583. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  21584. const imageSink = speedy_vision_default().Image.Sink('image');
  21585. source.media = null;
  21586. screen.size = speedy_vision_default().Size(0, 0);
  21587. imagePortalSource.source = null;
  21588. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21589. sourceMux.port = PORT_REFERENCE_IMAGE;
  21590. nightvision.gain = NIGHTVISION_GAIN;
  21591. nightvision.offset = NIGHTVISION_OFFSET;
  21592. nightvision.decay = NIGHTVISION_DECAY;
  21593. nightvision.quality = NIGHTVISION_QUALITY;
  21594. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  21595. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  21596. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  21597. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  21598. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  21599. detector.quality = TRACK_HARRIS_QUALITY;
  21600. detector.capacity = TRACK_DETECTOR_CAPACITY;
  21601. subpixel.method = SUBPIXEL_METHOD;
  21602. clipper.size = TRACK_MAX_KEYPOINTS;
  21603. borderClipper.imageSize = screen.size;
  21604. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  21605. matcher.k = 2;
  21606. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21607. keypointPortalSource.source = keypointPortalSink;
  21608. muxOfReferenceKeypoints.port = 0;
  21609. muxOfBufferOfReferenceKeypoints.port = 0;
  21610. bufferOfReferenceKeypoints.frozen = false;
  21611. keypointSink.turbo = false;
  21612. // prepare input
  21613. source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
  21614. imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
  21615. sourceMux.output().connectTo(screen.input());
  21616. screen.output().connectTo(greyscale.input());
  21617. // preprocess images
  21618. greyscale.output().connectTo(imageRectifier.input());
  21619. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  21620. imageRectifier.output().connectTo(nightvision.input());
  21621. nightvision.output().connectTo(nightvisionMux.input('in1'));
  21622. nightvisionMux.output().connectTo(blur.input());
  21623. // keypoint detection & clipping
  21624. nightvisionMux.output().connectTo(detector.input());
  21625. detector.output().connectTo(borderClipper.input());
  21626. borderClipper.output().connectTo(clipper.input());
  21627. // keypoint refinement
  21628. imageRectifier.output().connectTo(denoiser.input());
  21629. denoiser.output().connectTo(subpixel.input('image'));
  21630. clipper.output().connectTo(subpixel.input('keypoints'));
  21631. // keypoint description
  21632. blur.output().connectTo(descriptor.input('image'));
  21633. subpixel.output().connectTo(descriptor.input('keypoints'));
  21634. // keypoint matching
  21635. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  21636. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  21637. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  21638. descriptor.output().connectTo(matcher.input('keypoints'));
  21639. // store reference keypoints
  21640. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  21641. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  21642. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  21643. // portals
  21644. descriptor.output().connectTo(keypointPortalSink.input());
  21645. // prepare output
  21646. descriptor.output().connectTo(keypointRectifier.input());
  21647. keypointRectifier.output().connectTo(keypointSink.input());
  21648. matcher.output().connectTo(keypointSink.input('matches'));
  21649. //imageRectifier.output().connectTo(imageSink.input());
  21650. // done!
  21651. pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  21652. return pipeline;
  21653. }
  21654. }
  21655. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
  21656. /*
  21657. * MARTINS.js
  21658. * GPU-accelerated Augmented Reality for the web
  21659. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21660. *
  21661. * This program is free software: you can redistribute it and/or modify
  21662. * it under the terms of the GNU Lesser General Public License as published
  21663. * by the Free Software Foundation, either version 3 of the License, or
  21664. * (at your option) any later version.
  21665. *
  21666. * This program is distributed in the hope that it will be useful,
  21667. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21668. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21669. * GNU Lesser General Public License for more details.
  21670. *
  21671. * You should have received a copy of the GNU Lesser General Public License
  21672. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21673. *
  21674. * image-tracker-event.ts
  21675. * Events emitted by an Image Tracker
  21676. */
  21677. /**
  21678. * An event emitted by an Image Tracker
  21679. */
  21680. class ImageTrackerEvent extends AREvent {
  21681. /**
  21682. * Constructor
  21683. * @param type event type
  21684. * @param referenceImage optional reference image
  21685. */
  21686. constructor(type, referenceImage) {
  21687. super(type);
  21688. this._referenceImage = referenceImage;
  21689. }
  21690. /**
  21691. * Reference image
  21692. */
  21693. get referenceImage() {
  21694. return this._referenceImage;
  21695. }
  21696. }
  21697. ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
  21698. /*
  21699. * MARTINS.js
  21700. * GPU-accelerated Augmented Reality for the web
  21701. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21702. *
  21703. * This program is free software: you can redistribute it and/or modify
  21704. * it under the terms of the GNU Lesser General Public License as published
  21705. * by the Free Software Foundation, either version 3 of the License, or
  21706. * (at your option) any later version.
  21707. *
  21708. * This program is distributed in the hope that it will be useful,
  21709. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21710. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21711. * GNU Lesser General Public License for more details.
  21712. *
  21713. * You should have received a copy of the GNU Lesser General Public License
  21714. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21715. *
  21716. * camera-model.ts
  21717. * Camera model
  21718. */
  21719. /** Number of samples we'll be keeping to help calibrate the camera */
  21720. const INTRISICS_SAMPLES = 401; //201; //31; // odd number
  21721. /** Whether or not to auto-calibrate the camera */
  21722. const FOVY_AUTODETECT = false; //true;
  21723. /** A guess of the vertical field-of-view of a generic camera, in degrees */
  21724. const FOVY_GUESS = 45; //50; // will be part of the viewing frustum
  21725. /** Number of iterations used to refine the estimated pose */
  21726. const POSE_ITERATIONS = 30;
  21727. /** Number of samples used in the rotation filter */
  21728. const ROTATION_FILTER_SAMPLES = 10;
  21729. /** Number of samples used in the translation filter */
  21730. const TRANSLATION_FILTER_SAMPLES = 10;
  21731. /** Convert degrees to radians */
  21732. const DEG2RAD = 0.017453292519943295; // pi / 180
  21733. /** Convert radians to degrees */
  21734. const RAD2DEG = 57.29577951308232; // 180 / pi
  21735. /** Numerical tolerance */
  21736. const EPSILON = 1e-6;
  21737. /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
  21738. const FX = 0;
  21739. /** Index of the vertical focal length in the camera intrinsics matrix */
  21740. const FY = 4;
  21741. /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
  21742. const U0 = 6;
  21743. /** Index of the vertical position of the principal point in the camera intrinsics matrix */
  21744. const V0 = 7;
  21745. /** Translation refinement: predefined buffers for efficiency */
  21746. const TRANSLATION_REFINEMENT_BUFFERS = (() => {
  21747. const l = 1.0;
  21748. const x = [0, l, 0, -l, 0];
  21749. const y = [-l, 0, l, 0, 0];
  21750. const n = x.length;
  21751. return Object.freeze({
  21752. x, y,
  21753. a1: new Array(n),
  21754. a2: new Array(n),
  21755. a3: new Array(n),
  21756. m: new Array(3 * n * 3),
  21757. v: new Array(3 * n),
  21758. t: new Array(3),
  21759. r: new Array(3 * n),
  21760. c: new Array(3),
  21761. Mc: new Array(3 * n),
  21762. });
  21763. })();
  21764. /** Translation refinement: number of iterations */
  21765. const TRANSLATION_REFINEMENT_ITERATIONS = 3; // 1; // 5;
  21766. /** Translation refinement: number of samples */
  21767. const TRANSLATION_REFINEMENT_SAMPLES = 5; // TRANSLATION_REFINEMENT_BUFFERS.x.length;
  21768. /** Translation refinement: the triple of the number of samples */
  21769. const TRANSLATION_REFINEMENT_SAMPLES_3X = 15; //3 * TRANSLATION_REFINEMENT_SAMPLES;
  21770. /**
  21771. * Camera model
  21772. */
  21773. class CameraModel {
  21774. /**
  21775. * Constructor
  21776. */
  21777. constructor() {
  21778. this._screenSize = speedy_vision_default().Size(0, 0);
  21779. this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
  21780. this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // identity matrix
  21781. this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // no rotation & no translation [ R | t ] = [ I | 0 ]
  21782. this._f = (new Array(INTRISICS_SAMPLES)).fill(this._intrinsics[FY]);
  21783. this._fp = 0;
  21784. this._partialRotationBuffer = [];
  21785. this._translationBuffer = [];
  21786. }
  21787. /**
  21788. * Initialize the model
  21789. * @param screenSize
  21790. */
  21791. init(screenSize) {
  21792. // validate
  21793. if (screenSize.area() == 0)
  21794. throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
  21795. // set the screen size
  21796. this._screenSize.width = screenSize.width;
  21797. this._screenSize.height = screenSize.height;
  21798. // reset the model
  21799. this._resetIntrinsics();
  21800. this._resetExtrinsics();
  21801. // log
  21802. Utils.log(`Initializing the camera model...`);
  21803. }
  21804. /**
  21805. * Release the model
  21806. */
  21807. release() {
  21808. this.reset();
  21809. return null;
  21810. }
  21811. /**
  21812. * Update the camera model
  21813. * @param homography 3x3 perspective transform
  21814. * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
  21815. * @returns promise that resolves to a camera matrix
  21816. */
  21817. update(homography, screenSize) {
  21818. // validate the shape of the homography
  21819. if (homography.rows != 3 || homography.columns != 3)
  21820. throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
  21821. // validate screenSize
  21822. if (screenSize.area() == 0)
  21823. throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
  21824. // changed screen size?
  21825. if (!this._screenSize.equals(screenSize)) {
  21826. Utils.log(`Camera model: detected a change in screen size...`);
  21827. // update the screen size
  21828. this._screenSize.width = screenSize.width;
  21829. this._screenSize.height = screenSize.height;
  21830. // reset camera
  21831. this.reset();
  21832. }
  21833. // read the entries of the homography
  21834. const h = homography.read();
  21835. const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
  21836. // validate the homography (homography matrices aren't singular)
  21837. const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
  21838. if (Math.abs(det) < EPSILON) {
  21839. Utils.warning(`Can't update the camera model using an invalid homography matrix`);
  21840. return speedy_vision_default().Promise.resolve(this._matrix);
  21841. }
  21842. // estimate the focal length (auto-calibration)
  21843. const f = this._estimateFocal(homography);
  21844. if (f > 0)
  21845. this._storeFocal(f);
  21846. //console.log(this.fovy * RAD2DEG);
  21847. // estimate the pose
  21848. const pose = this._estimatePose(homography);
  21849. this._storePose(pose);
  21850. // compute the camera matrix
  21851. const C = this.denormalizer();
  21852. const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
  21853. const E = speedy_vision_default().Matrix(3, 4, this._extrinsics);
  21854. this._matrix.setToSync(K.times(E).times(C));
  21855. //console.log("intrinsics -----------", K.toString());
  21856. //console.log("matrix ----------------",this._matrix.toString());
  21857. return speedy_vision_default().Promise.resolve(this._matrix);
  21858. }
  21859. /**
  21860. * Reset camera model
  21861. */
  21862. reset() {
  21863. this._resetIntrinsics();
  21864. this._resetExtrinsics();
  21865. }
  21866. /**
  21867. * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
  21868. * 2D AR screen space (measured in pixels)
  21869. * @returns 3x4 camera matrix
  21870. */
  21871. get matrix() {
  21872. return this._matrix;
  21873. }
  21874. /**
  21875. * Camera intrinsics matrix
  21876. * @returns 3x3 intrinsics matrix in column-major format
  21877. */
  21878. get intrinsics() {
  21879. return this._intrinsics;
  21880. }
  21881. /**
  21882. * Camera extrinsics matrix
  21883. * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
  21884. */
  21885. get extrinsics() {
  21886. return this._extrinsics;
  21887. }
  21888. /**
  21889. * Convert coordinates from normalized space [-1,1]^3 to a
  21890. * "3D pixel space" based on the dimensions of the AR screen.
  21891. *
  21892. * We perform a 180-degrees rotation around the x-axis so that
  21893. * it looks nicer (the y-axis grows downwards in image space).
  21894. *
  21895. * The final camera matrix is P = K * [ R | t ] * C, where
  21896. * C is this conversion matrix. The intent behind this is to
  21897. * make tracking independent of target and screen sizes.
  21898. *
  21899. * Reminder: we use a right-handed coordinate system in 3D!
  21900. * In 2D image space the coordinate system is left-handed.
  21901. *
  21902. * @returns 4x4 conversion matrix C
  21903. */
  21904. denormalizer() {
  21905. const w = this._screenSize.width / 2; // half width, in pixels
  21906. const h = this._screenSize.height / 2; // half height, in pixels
  21907. const d = Math.min(w, h); // virtual unit length, in pixels
  21908. /*
  21909. return Speedy.Matrix(4, 4, [
  21910. 1, 0, 0, 0,
  21911. 0,-1, 0, 0,
  21912. 0, 0,-1, 0,
  21913. w/d, h/d, 0, 1/d
  21914. ]);
  21915. */
  21916. return speedy_vision_default().Matrix(4, 4, [
  21917. d, 0, 0, 0,
  21918. 0, -d, 0, 0,
  21919. 0, 0, -d, 0,
  21920. w, h, 0, 1,
  21921. ]);
  21922. }
  21923. /**
  21924. * Size of the AR screen space, in pixels
  21925. * @returns size in pixels
  21926. */
  21927. get screenSize() {
  21928. return this._screenSize;
  21929. }
  21930. /**
  21931. * Focal length in pixel units (projection distance in the pinhole camera model)
  21932. * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
  21933. * @returns focal length
  21934. */
  21935. get focalLength() {
  21936. return this._intrinsics[FY]; // fx == fy
  21937. }
  21938. /**
  21939. * Horizontal field-of-view, given in radians
  21940. * @returns vertical field-of-view
  21941. */
  21942. get fovx() {
  21943. return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
  21944. }
  21945. /**
  21946. * Vertical field-of-view, given in radians
  21947. * @returns vertical field-of-view
  21948. */
  21949. get fovy() {
  21950. return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
  21951. }
  21952. /**
  21953. * Principal point
  21954. * @returns principal point, in pixel coordinates
  21955. */
  21956. principalPoint() {
  21957. return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
  21958. }
  21959. /**
  21960. * Reset camera extrinsics
  21961. */
  21962. _resetExtrinsics() {
  21963. // set the rotation matrix to the identity
  21964. this._extrinsics.fill(0);
  21965. this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
  21966. // reset filters
  21967. this._partialRotationBuffer.length = 0;
  21968. this._translationBuffer.length = 0;
  21969. }
  21970. /**
  21971. * Reset camera intrinsics
  21972. */
  21973. _resetIntrinsics() {
  21974. const u0 = this._screenSize.width / 2;
  21975. const v0 = this._screenSize.height / 2;
  21976. const f = v0 / Math.tan(DEG2RAD * FOVY_GUESS / 2);
  21977. this._intrinsics[FX] = f;
  21978. this._intrinsics[FY] = f;
  21979. this._intrinsics[U0] = u0;
  21980. this._intrinsics[V0] = v0;
  21981. this._f.fill(this._intrinsics[FY]);
  21982. this._fp = 0;
  21983. }
  21984. /**
  21985. * Estimate the focal length
  21986. * @param homography valid homography
  21987. * @returns estimated focal length, or 0 on error
  21988. */
  21989. _estimateFocal(homography) {
  21990. // auto-detect the focal length?
  21991. if (!FOVY_AUTODETECT)
  21992. return 0;
  21993. // read the entries of the homography
  21994. const h = homography.read();
  21995. const h11 = h[0], h12 = h[3]; //, h13 = h[6];
  21996. const h21 = h[1], h22 = h[4]; //, h23 = h[7];
  21997. const h31 = h[2], h32 = h[5]; //, h33 = h[8];
  21998. // read the principal point
  21999. const u0 = this._intrinsics[U0];
  22000. const v0 = this._intrinsics[V0];
  22001. // estimate the focal length based on the orthogonality
  22002. // constraint r1'r2 = 0 of a rotation matrix
  22003. const f2 = -((h11 / h31 - u0) * (h12 / h32 - u0) + (h21 / h31 - v0) * (h22 / h32 - v0));
  22004. // can't estimate it?
  22005. if (f2 < 0)
  22006. return this._intrinsics[FY];
  22007. //return 0;
  22008. // done!
  22009. return Math.sqrt(f2);
  22010. }
  22011. /**
  22012. * Store an estimated focal length
  22013. * @param f estimated focal length
  22014. */
  22015. _storeFocal(f) {
  22016. // store the focal length
  22017. this._f[this._fp] = f;
  22018. this._fp = (this._fp + 1) % INTRISICS_SAMPLES;
  22019. // take the median of the estimated focal lengths
  22020. const sorted = this._f.concat([]).sort((a, b) => a - b);
  22021. const median = sorted[sorted.length >>> 1];
  22022. // update the intrinsics matrix
  22023. this._intrinsics[FX] = this._intrinsics[FY] = median;
  22024. /*
  22025. // test
  22026. const u0 = this._intrinsics[U0];
  22027. const v0 = this._intrinsics[V0];
  22028. const fovx = 2 * Math.atan(u0 / median) * RAD2DEG;
  22029. const fovy = 2 * Math.atan(v0 / median) * RAD2DEG;
  22030. console.log('---------------');
  22031. console.log("fov:",fovx,fovy);
  22032. console.log("f:",median);
  22033. */
  22034. }
  22035. /**
  22036. * Compute a normalized homography H' = K^(-1) * H for an
  22037. * ideal pinhole with f = 1 and principal point = (0,0)
  22038. * @param homography homography H to be normalized
  22039. * @param f focal length
  22040. * @returns normalized homography H'
  22041. */
  22042. _normalizeHomography(homography, f = this._intrinsics[FY]) {
  22043. const h = homography.read();
  22044. const u0 = this._intrinsics[U0];
  22045. const v0 = this._intrinsics[V0];
  22046. const h11 = h[0] - u0 * h[2], h12 = h[3] - u0 * h[5], h13 = h[6] - u0 * h[8];
  22047. const h21 = h[1] - v0 * h[2], h22 = h[4] - v0 * h[5], h23 = h[7] - v0 * h[8];
  22048. const h31 = h[2] * f, h32 = h[5] * f, h33 = h[8] * f;
  22049. return speedy_vision_default().Matrix(3, 3, [
  22050. h11, h21, h31,
  22051. h12, h22, h32,
  22052. h13, h23, h33,
  22053. ]);
  22054. }
  22055. /**
  22056. * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
  22057. * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
  22058. * @returns a 3x3 matrix
  22059. */
  22060. _estimatePartialPose(normalizedHomography) {
  22061. const h = normalizedHomography.read();
  22062. const h11 = h[0], h12 = h[3], h13 = h[6];
  22063. const h21 = h[1], h22 = h[4], h23 = h[7];
  22064. const h31 = h[2], h32 = h[5], h33 = h[8];
  22065. // select the sign so that t3 = tz > 0
  22066. const sign = h33 >= 0 ? 1 : -1;
  22067. // compute the scale factor
  22068. const h1norm = Math.sqrt(h11 * h11 + h21 * h21 + h31 * h31);
  22069. const h2norm = Math.sqrt(h12 * h12 + h22 * h22 + h32 * h32);
  22070. //const scale = sign * 2 / (h1norm + h2norm);
  22071. //const scale = sign / h1norm;
  22072. //const scale = sign / h2norm;
  22073. const scale = sign / Math.max(h1norm, h2norm); // this seems to work. why?
  22074. // invalid homography?
  22075. if (Number.isNaN(scale))
  22076. return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
  22077. // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
  22078. // if h1norm is not approximately h2norm, it means that the first two columns of
  22079. // the normalized homography are not really encoding a rotation (up to a scale)
  22080. // what is causing this? does h3 (and h33) tell us anything about it?
  22081. // what about the intrinsics matrix? the principal point...? the fov...?
  22082. //console.log("h1,h2",h1norm,h2norm);
  22083. //console.log(normalizedHomography.toString());
  22084. // recover the translation and the rotation
  22085. const t1 = scale * h13;
  22086. const t2 = scale * h23;
  22087. const t3 = scale * h33;
  22088. const r11 = scale * h11;
  22089. const r21 = scale * h21;
  22090. const r31 = scale * h31;
  22091. const r12 = scale * h12;
  22092. const r22 = scale * h22;
  22093. const r32 = scale * h32;
  22094. // refine the pose
  22095. const r = this._refineRotation(r11, r21, r31, r12, r22, r32);
  22096. const t = this._refineTranslation(normalizedHomography, r, [t1, t2, t3]);
  22097. //const t = [t1, t2, t3]; // faster, but less accurate
  22098. // done!
  22099. return speedy_vision_default().Matrix(3, 3, r.concat(t)); // this is possibly NaN... why? homography...
  22100. }
  22101. /**
  22102. * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
  22103. * @param r11 x of r1
  22104. * @param r21 y of r1
  22105. * @param r31 z of r1
  22106. * @param r12 x of r2
  22107. * @param r22 y of r2
  22108. * @param r32 z of r2
  22109. * @returns a 3x2 matrix R such that R'R = I (column-major format)
  22110. */
  22111. _refineRotation(r11, r21, r31, r12, r22, r32) {
  22112. /*
  22113. A little technique I figured out to correct the rotation vectors
  22114. ----------------------------------------------------------------
  22115. We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
  22116. R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
  22117. because vectors r1 and r2 are not perfectly orthonormal due to noise.
  22118. Let's first notice that R'R is symmetric. You can easily check that its
  22119. two eigenvalues are both real and positive (as long as r1, r2 != 0 and
  22120. r1 is not parallel to r2, but we never take such vectors as input).
  22121. R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
  22122. [ r1'r2 r2'r2 ]
  22123. We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
  22124. chosen to be orthogonal and D is a diagonal matrix whose entries are
  22125. the eigenvalues of R'R.
  22126. Let LL' be the Cholesky decomposition of D. Such decomposition exists
  22127. and is trivially computed: just take the square roots of the entries of
  22128. D. Since L is diagonal, we have L = L'. Its inverse is also trivially
  22129. computed - call it Linv.
  22130. Now, define a 2x2 correction matrix C as follows:
  22131. C = Q * Linv * Q'
  22132. This matrix rotates the input vector, scales it by some amount, and
  22133. then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
  22134. We compute RC in order to correct the rotation vectors. We take its
  22135. two columns as the corrected vectors.
  22136. In order to show that the two columns of RC are orthonormal, we can
  22137. show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
  22138. expand the expression:
  22139. (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
  22140. Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
  22141. Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
  22142. I have provided below a closed formula to correct the rotation vectors.
  22143. What C does to R is very interesting: it makes the singular values
  22144. become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
  22145. values of R are the square roots of the eigenvalues of R'R. Letting
  22146. S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
  22147. means that RC is equivalent to the correction "trick" using the SVD
  22148. found in the computer vision literature (i.e., compute the SVD and
  22149. return U V'). That "trick" is known to return the rotation matrix that
  22150. minimizes the Frobenius norm of the difference between the input and
  22151. the output. Consequently, the technique I have just presented is also
  22152. optimal in that sense!
  22153. By the way, the input matrix R does not need to be 3x2.
  22154. */
  22155. // compute the entries of R'R
  22156. const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
  22157. const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
  22158. const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
  22159. // compute the two real eigenvalues of R'R
  22160. const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
  22161. const sqrt = Math.sqrt(delta); // delta >= 0 always
  22162. const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
  22163. const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
  22164. // compute two unit eigenvectors qi = (xi,yi) of R'R
  22165. const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
  22166. const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
  22167. const y1 = x1 / alpha1;
  22168. const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
  22169. const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
  22170. const y2 = x2 / alpha2;
  22171. // compute the Cholesky decomposition LL' of the diagonal matrix D
  22172. // whose entries are the two eigenvalues of R'R and then invert L
  22173. const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
  22174. const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
  22175. // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
  22176. // is orthogonal and Linv is computed as above
  22177. const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
  22178. const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
  22179. const C = Q.times(Linv).times(Qt);
  22180. // correct the rotation vectors r1 and r2 using C
  22181. const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
  22182. return speedy_vision_default().Matrix(R.times(C)).read();
  22183. }
  22184. /**
  22185. * Compute a refined translation vector
  22186. * @param normalizedHomography ideal pinhole K = I
  22187. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  22188. * @param t0 initial estimate for the translation vector
  22189. * @returns 3x1 translation vector in column-major format
  22190. */
  22191. _refineTranslation(normalizedHomography, rot, t0) {
  22192. /*
  22193. Given a normalized homography H, the rotation vectors r1, r2, and a
  22194. translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
  22195. scale factor s.
  22196. If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
  22197. [ r1 | r2 | t ] u is parallel to H u, which means that their cross
  22198. product is zero:
  22199. [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
  22200. The following code finds an optimal translation vector t based on the
  22201. above observation. H, r1, r2 are known.
  22202. */
  22203. const B = TRANSLATION_REFINEMENT_BUFFERS;
  22204. const n = TRANSLATION_REFINEMENT_SAMPLES;
  22205. const n3 = TRANSLATION_REFINEMENT_SAMPLES_3X;
  22206. Utils.assert(B.x.length === n);
  22207. const h = normalizedHomography.read();
  22208. const h11 = h[0], h12 = h[3], h13 = h[6];
  22209. const h21 = h[1], h22 = h[4], h23 = h[7];
  22210. const h31 = h[2], h32 = h[5], h33 = h[8];
  22211. const r11 = rot[0], r12 = rot[3];
  22212. const r21 = rot[1], r22 = rot[4];
  22213. const r31 = rot[2], r32 = rot[5];
  22214. // get sample points (xi, yi), 0 <= i < n
  22215. const x = B.x, y = B.y;
  22216. // set auxiliary values: ai = H [ xi yi 1 ]'
  22217. const a1 = B.a1, a2 = B.a2, a3 = B.a3;
  22218. for (let i = 0; i < n; i++) {
  22219. a1[i] = x[i] * h11 + y[i] * h12 + h13;
  22220. a2[i] = x[i] * h21 + y[i] * h22 + h23;
  22221. a3[i] = x[i] * h31 + y[i] * h32 + h33;
  22222. }
  22223. // solve M t = v for t; M: 3n x 3, v: 3n x 1, t: 3 x 1 (linear least squares)
  22224. const m = B.m, v = B.v;
  22225. for (let i = 0, k = 0; k < n; i += 3, k++) {
  22226. m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
  22227. m[i + n3] = -(m[i + 1] = a3[k]);
  22228. m[i + 2] = -(m[i + n3 + n3] = a2[k]);
  22229. m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
  22230. v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
  22231. v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
  22232. v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
  22233. }
  22234. /*
  22235. // this works, but I want more lightweight
  22236. const M = Speedy.Matrix(n3, 3, m);
  22237. const v_ = Speedy.Matrix(n3, 1, v);
  22238. return Speedy.Matrix(M.ldiv(v_)).read();
  22239. */
  22240. /*
  22241. Gradient descent with optimal step size / learning rate
  22242. -------------------------------------------------------
  22243. Let's find the column-vector x that minimizes the error function
  22244. E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
  22245. least squares. We want to find x easily, QUICKLY and iteratively.
  22246. The update rule of gradient descent is set to:
  22247. x := x - w * grad(E)
  22248. where w is the learning rate and grad(E) is the gradient of E(x):
  22249. grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
  22250. Let's adjust w to make x "converge quickly". Define function S(w) as:
  22251. S(w) = x - w * grad(E) (step)
  22252. and another function F(w) as:
  22253. F(w) = E(S(w))
  22254. which is the error of the step. We minimize F by setting its derivative
  22255. to zero:
  22256. 0 = dF = dF dS
  22257. dw dS dw
  22258. What follows is a fair amount of algebra. Do the math and you'll find
  22259. the following optimal update rule:
  22260. (c'c)
  22261. x := x - --------- c
  22262. (Ac)'(Ac)
  22263. where c = A'r = A'(Ax - b)
  22264. */
  22265. // initial guess
  22266. const t = B.t;
  22267. t[0] = t0[0];
  22268. t[1] = t0[1];
  22269. t[2] = t0[2];
  22270. // gradient descent: super lightweight implementation
  22271. const r = B.r, c = B.c, Mc = B.Mc;
  22272. for (let it = 0; it < TRANSLATION_REFINEMENT_ITERATIONS; it++) {
  22273. // compute residual r = Mt - v
  22274. for (let i = 0; i < n3; i++) {
  22275. r[i] = 0;
  22276. for (let j = 0; j < 3; j++)
  22277. r[i] += m[j * n3 + i] * t[j];
  22278. r[i] -= v[i];
  22279. }
  22280. // compute c = M'r
  22281. for (let i = 0; i < 3; i++) {
  22282. c[i] = 0;
  22283. for (let j = 0; j < n3; j++)
  22284. c[i] += m[i * n3 + j] * r[j];
  22285. }
  22286. // compute Mc
  22287. for (let i = 0; i < n3; i++) {
  22288. Mc[i] = 0;
  22289. for (let j = 0; j < 3; j++)
  22290. Mc[i] += m[j * n3 + i] * c[j];
  22291. }
  22292. // compute num = c'c and den = (Mc)'(Mc)
  22293. let num = 0, den = 0;
  22294. for (let i = 0; i < 3; i++)
  22295. num += c[i] * c[i];
  22296. for (let i = 0; i < n3; i++)
  22297. den += Mc[i] * Mc[i];
  22298. // compute num / den
  22299. const frc = num / den;
  22300. if (Number.isNaN(frc))
  22301. break;
  22302. // iterate: t = t - (num / den) * c
  22303. for (let i = 0; i < 3; i++)
  22304. t[i] -= frc * c[i];
  22305. }
  22306. //console.log("OLD t:\n\n",t0.join('\n'));
  22307. //console.log("new t:\n\n",t.join('\n'));
  22308. // done!
  22309. return t;
  22310. }
  22311. /**
  22312. * Apply a smoothing filter to the partial pose
  22313. * @param partialPose 3x3 [ r1 | r2 | t ]
  22314. * @returns filtered partial pose
  22315. */
  22316. _filterPartialPose(partialPose) {
  22317. const avg = new Array(9).fill(0);
  22318. const entries = partialPose.read();
  22319. const rotationBlock = entries.slice(0, 6);
  22320. const translationBlock = entries.slice(6, 9);
  22321. // how many samples should we store, at most?
  22322. const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
  22323. const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
  22324. const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
  22325. // is it a valid partial pose?
  22326. if (!Number.isNaN(entries[0])) {
  22327. // store samples
  22328. this._partialRotationBuffer.unshift(rotationBlock);
  22329. if (this._partialRotationBuffer.length > N)
  22330. this._partialRotationBuffer.length = N;
  22331. this._translationBuffer.unshift(translationBlock);
  22332. if (this._translationBuffer.length > M)
  22333. this._translationBuffer.length = M;
  22334. }
  22335. else if (this._partialRotationBuffer.length == 0) {
  22336. // invalid pose, no samples
  22337. return speedy_vision_default().Matrix.Eye(3);
  22338. }
  22339. // average *nearby* rotations
  22340. const n = this._partialRotationBuffer.length;
  22341. for (let i = 0; i < n; i++) {
  22342. const r = this._partialRotationBuffer[i];
  22343. for (let j = 0; j < 6; j++)
  22344. avg[j] += r[j] / n;
  22345. }
  22346. const r = this._refineRotation(avg[0], avg[1], avg[2], avg[3], avg[4], avg[5]);
  22347. // average translations
  22348. const m = this._translationBuffer.length;
  22349. for (let i = 0; i < m; i++) {
  22350. const t = this._translationBuffer[i];
  22351. for (let j = 0; j < 3; j++)
  22352. avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
  22353. //avg[6 + j] += t[j] / m;
  22354. }
  22355. const t = [avg[6], avg[7], avg[8]];
  22356. // done!
  22357. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  22358. }
  22359. /**
  22360. * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
  22361. * @param partialPose
  22362. * @returns 3x4 matrix
  22363. */
  22364. _estimateFullPose(partialPose) {
  22365. const p = partialPose.read();
  22366. const r11 = p[0], r12 = p[3], t1 = p[6];
  22367. const r21 = p[1], r22 = p[4], t2 = p[7];
  22368. const r31 = p[2], r32 = p[5], t3 = p[8];
  22369. // r3 = +- ( r1 x r2 )
  22370. let r13 = r21 * r32 - r31 * r22;
  22371. let r23 = r31 * r12 - r11 * r32;
  22372. let r33 = r11 * r22 - r21 * r12;
  22373. // let's make sure that det R = +1 (keep the orientation)
  22374. const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
  22375. if (det < 0) {
  22376. r13 = -r13;
  22377. r23 = -r23;
  22378. r33 = -r33;
  22379. }
  22380. // done!
  22381. return speedy_vision_default().Matrix(3, 4, [
  22382. r11, r21, r31,
  22383. r12, r22, r32,
  22384. r13, r23, r33,
  22385. t1, t2, t3,
  22386. ]);
  22387. }
  22388. /**
  22389. * Estimate the pose [ R | t ] given a homography in AR screen space
  22390. * @param homography must be valid
  22391. * @param f focal length
  22392. * @returns 3x4 matrix
  22393. */
  22394. _estimatePose(homography, f = this._intrinsics[FY]) {
  22395. const normalizedHomography = this._normalizeHomography(homography, f);
  22396. const partialPose = speedy_vision_default().Matrix.Eye(3);
  22397. // we want the estimated partial pose [ r1 | r2 | t ] to be as close
  22398. // as possible to the normalized homography, up to a scale factor;
  22399. // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
  22400. // it won't be a perfect equality due to noise in the homography
  22401. const residual = speedy_vision_default().Matrix(normalizedHomography);
  22402. for (let k = 0; k < POSE_ITERATIONS; k++) {
  22403. // incrementally improve the partial pose
  22404. const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
  22405. partialPose.setToSync(rt.times(partialPose));
  22406. residual.setToSync(residual.times(rt.inverse()));
  22407. //console.log("residual",residual.toString());
  22408. }
  22409. //console.log('-----------');
  22410. /*
  22411. // test
  22412. const result = Speedy.Matrix.Zeros(3);
  22413. result.setToSync(partialPose.times(normalizedHomography.inverse()));
  22414. const m11 = result.at(0,0);
  22415. result.setToSync(result.times(1/m11));
  22416. console.log("Pose * NORMALIZED HOM^-1", result.toString());
  22417. */
  22418. /*
  22419. const rt = partialPose.read();
  22420. const r = rt.slice(0, 6);
  22421. const t = this._refineTranslation(normalizedHomography, r, rt.slice(6, 9));
  22422. const refinedPartialPose = Speedy.Matrix(3, 3, r.concat(t));
  22423. const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
  22424. */
  22425. // filter the partial pose
  22426. const filteredPartialPose = this._filterPartialPose(partialPose);
  22427. // estimate the full pose
  22428. return this._estimateFullPose(filteredPartialPose);
  22429. }
  22430. /**
  22431. * Store an estimated pose
  22432. * @param pose 3x4 matrix
  22433. */
  22434. _storePose(pose) {
  22435. this._extrinsics = pose.read();
  22436. }
  22437. }
  22438. ;// CONCATENATED MODULE: ./src/geometry/pose.ts
  22439. /*
  22440. * MARTINS.js
  22441. * GPU-accelerated Augmented Reality for the web
  22442. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22443. *
  22444. * This program is free software: you can redistribute it and/or modify
  22445. * it under the terms of the GNU Lesser General Public License as published
  22446. * by the Free Software Foundation, either version 3 of the License, or
  22447. * (at your option) any later version.
  22448. *
  22449. * This program is distributed in the hope that it will be useful,
  22450. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22451. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22452. * GNU Lesser General Public License for more details.
  22453. *
  22454. * You should have received a copy of the GNU Lesser General Public License
  22455. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22456. *
  22457. * pose.ts
  22458. * A pose represents a position and an orientation in a 3D space
  22459. */
  22460. /**
  22461. * A pose represents a position and an orientation in a 3D space
  22462. * (and sometimes a scale, too...)
  22463. */
  22464. class Pose {
  22465. /**
  22466. * Constructor
  22467. * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
  22468. */
  22469. constructor(transform) {
  22470. this._transform = transform;
  22471. }
  22472. /**
  22473. * A transform describing the position and the orientation
  22474. * of the pose relative to the 3D space to which it belongs
  22475. */
  22476. get transform() {
  22477. return this._transform;
  22478. }
  22479. }
  22480. ;// CONCATENATED MODULE: ./src/geometry/transform.ts
  22481. /*
  22482. * MARTINS.js
  22483. * GPU-accelerated Augmented Reality for the web
  22484. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22485. *
  22486. * This program is free software: you can redistribute it and/or modify
  22487. * it under the terms of the GNU Lesser General Public License as published
  22488. * by the Free Software Foundation, either version 3 of the License, or
  22489. * (at your option) any later version.
  22490. *
  22491. * This program is distributed in the hope that it will be useful,
  22492. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22493. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22494. * GNU Lesser General Public License for more details.
  22495. *
  22496. * You should have received a copy of the GNU Lesser General Public License
  22497. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22498. *
  22499. * transform.ts
  22500. * 3D geometrical transforms
  22501. */
  22502. /**
  22503. * A 3D transformation
  22504. */
  22505. class BaseTransform {
  22506. /**
  22507. * Constructor
  22508. * @param matrix a 4x4 matrix
  22509. */
  22510. constructor(matrix) {
  22511. if (matrix.rows != 4 || matrix.columns != 4)
  22512. throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
  22513. this._matrix = matrix;
  22514. }
  22515. /**
  22516. * The 4x4 transformation matrix (read-only)
  22517. */
  22518. get matrix() {
  22519. return this._matrix;
  22520. }
  22521. }
  22522. /**
  22523. * An invertible 3D transformation
  22524. */
  22525. class InvertibleTransform extends BaseTransform {
  22526. /**
  22527. * Constructor
  22528. * @param matrix a 4x4 matrix
  22529. */
  22530. constructor(matrix) {
  22531. // WARNING: we do not check if the matrix actually encodes an invertible transform!
  22532. super(matrix);
  22533. }
  22534. /**
  22535. * The inverse of the transform
  22536. */
  22537. get inverse() {
  22538. const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
  22539. return new InvertibleTransform(inverseMatrix);
  22540. }
  22541. }
  22542. /**
  22543. * A 3D transformation described by translation, rotation and scale
  22544. */
  22545. class StandardTransform extends InvertibleTransform {
  22546. // TODO: position, rotation and scale attributes
  22547. /**
  22548. * Constructor
  22549. * @param matrix a 4x4 matrix
  22550. */
  22551. constructor(matrix) {
  22552. // WARNING: we do not check if the matrix actually encodes a standard transform!
  22553. super(matrix);
  22554. }
  22555. /**
  22556. * The inverse of the transform
  22557. */
  22558. get inverse() {
  22559. /*
  22560. The inverse of a 4x4 standard transform T * R * S...
  22561. [ RS t ] is [ ZR' -ZR't ]
  22562. [ 0' 1 ] [ 0' 1 ]
  22563. where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
  22564. */
  22565. return super.inverse;
  22566. }
  22567. }
  22568. /**
  22569. * A 3D transformation described by position and orientation
  22570. */
  22571. class RigidTransform extends StandardTransform {
  22572. // TODO: position and rotation attributes (need to decompose the matrix)
  22573. /**
  22574. * Constructor
  22575. * @param matrix a 4x4 matrix
  22576. */
  22577. constructor(matrix) {
  22578. // WARNING: we do not check if the matrix actually encodes a rigid transform!
  22579. super(matrix);
  22580. }
  22581. /**
  22582. * The inverse of the transform
  22583. */
  22584. get inverse() {
  22585. /*
  22586. The inverse of a 4x4 rigid transform
  22587. [ R t ] is [ R' -R't ]
  22588. [ 0' 1 ] [ 0' 1 ]
  22589. where R is 3x3, t is 3x1 and 0' is 1x3
  22590. */
  22591. const m = this._matrix.read();
  22592. if (m[15] == 0) // error? abs()??
  22593. throw new IllegalOperationError('Not a rigid transform');
  22594. const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
  22595. const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
  22596. const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
  22597. const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
  22598. const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
  22599. const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
  22600. const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
  22601. const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
  22602. const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
  22603. r11, r12, r13, 0,
  22604. r21, r22, r23, 0,
  22605. r31, r32, r33, 0,
  22606. -rt1, -rt2, -rt3, 1
  22607. ]);
  22608. return new RigidTransform(inverseMatrix);
  22609. }
  22610. }
  22611. ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
  22612. /*
  22613. * MARTINS.js
  22614. * GPU-accelerated Augmented Reality for the web
  22615. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22616. *
  22617. * This program is free software: you can redistribute it and/or modify
  22618. * it under the terms of the GNU Lesser General Public License as published
  22619. * by the Free Software Foundation, either version 3 of the License, or
  22620. * (at your option) any later version.
  22621. *
  22622. * This program is distributed in the hope that it will be useful,
  22623. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22624. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22625. * GNU Lesser General Public License for more details.
  22626. *
  22627. * You should have received a copy of the GNU Lesser General Public License
  22628. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22629. *
  22630. * viewer-pose.ts
  22631. * The pose of a virtual camera in 3D world space at a moment in time
  22632. */
  22633. /**
  22634. * The pose of a virtual camera in 3D world space at a moment in time
  22635. */
  22636. class ViewerPose extends Pose {
  22637. /**
  22638. * Constructor
  22639. * @param camera camera model
  22640. */
  22641. constructor(camera) {
  22642. // compute the view matrix and its inverse in AR screen space
  22643. const viewMatrix = ViewerPose._computeViewMatrix(camera);
  22644. const inverseTransform = new RigidTransform(viewMatrix);
  22645. super(inverseTransform.inverse);
  22646. this._viewMatrix = viewMatrix;
  22647. }
  22648. /**
  22649. * This 4x4 matrix moves 3D points from world space to viewer space. We
  22650. * assume that the camera is looking in the direction of the negative
  22651. * z-axis (WebGL-friendly)
  22652. */
  22653. get viewMatrix() {
  22654. return this._viewMatrix;
  22655. }
  22656. /**
  22657. * Compute the view matrix in AR screen space, measured in pixels
  22658. * @param camera
  22659. * @returns a 4x4 matrix describing a rotation and a translation
  22660. */
  22661. static _computeViewMatrix(camera) {
  22662. /*
  22663. // this is the view matrix in AR screen space, measured in pixels
  22664. // we augment the extrinsics matrix, making it 4x4 by adding a
  22665. // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
  22666. const V = Speedy.Matrix(4, 4, [
  22667. E[0], E[1], E[2], 0,
  22668. E[3], E[4], E[5], 0,
  22669. E[6], E[7], E[8], 0,
  22670. E[9], E[10], E[11], 1
  22671. ]);
  22672. // we premultiply V by F, which performs a rotation around the
  22673. // x-axis by 180 degrees, so that we get the 3D objects in front
  22674. // of the camera pointing in the direction of the negative z-axis
  22675. const F = Speedy.Matrix(4, 4, [
  22676. 1, 0, 0, 0,
  22677. 0,-1, 0, 0,
  22678. 0, 0,-1, 0,
  22679. 0, 0, 0, 1
  22680. ]);
  22681. Matrix F * V is matrix V with the second and third rows negated
  22682. */
  22683. const E = camera.extrinsics;
  22684. return speedy_vision_default().Matrix(4, 4, [
  22685. E[0], -E[1], -E[2], 0,
  22686. E[3], -E[4], -E[5], 0,
  22687. E[6], -E[7], -E[8], 0,
  22688. E[9], -E[10], -E[11], 1
  22689. ]);
  22690. }
  22691. }
  22692. ;// CONCATENATED MODULE: ./src/geometry/view.ts
  22693. /*
  22694. * MARTINS.js
  22695. * GPU-accelerated Augmented Reality for the web
  22696. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22697. *
  22698. * This program is free software: you can redistribute it and/or modify
  22699. * it under the terms of the GNU Lesser General Public License as published
  22700. * by the Free Software Foundation, either version 3 of the License, or
  22701. * (at your option) any later version.
  22702. *
  22703. * This program is distributed in the hope that it will be useful,
  22704. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22705. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22706. * GNU Lesser General Public License for more details.
  22707. *
  22708. * You should have received a copy of the GNU Lesser General Public License
  22709. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22710. *
  22711. * view.ts
  22712. * A view of the 3D world at a moment in time,
  22713. * featuring the means to project points into clip space
  22714. */
  22715. /** Default distance in pixels of the near plane to the optical center of the camera */
  22716. const DEFAULT_NEAR = 1;
  22717. /** Default distance in pixels of the far plane to the optical center of the camera */
  22718. const DEFAULT_FAR = 20000;
  22719. /**
  22720. * A PerspectiveView is a View defining a symmetric frustum around the z-axis
  22721. * (perspective projection)
  22722. */
  22723. class PerspectiveView {
  22724. /**
  22725. * Constructor
  22726. * @param camera camera model
  22727. * @param near distance of the near plane
  22728. * @param far distance of the far plane
  22729. */
  22730. constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
  22731. const intrinsics = camera.intrinsics;
  22732. const screenSize = camera.screenSize;
  22733. this._near = Math.max(0, +near);
  22734. this._far = Math.max(0, +far);
  22735. if (this._near >= this._far)
  22736. throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
  22737. this._aspect = screenSize.width / screenSize.height;
  22738. this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
  22739. this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
  22740. }
  22741. /**
  22742. * A 4x4 projection matrix for WebGL
  22743. */
  22744. get projectionMatrix() {
  22745. return this._projectionMatrix;
  22746. }
  22747. /**
  22748. * Aspect ratio of the frustum
  22749. */
  22750. get aspect() {
  22751. return this._aspect;
  22752. }
  22753. /**
  22754. * Vertical field-of-view of the frustum, measured in radians
  22755. */
  22756. get fovy() {
  22757. return 2 * Math.atan(this._tanOfHalfFovy);
  22758. }
  22759. /**
  22760. * Distance of the near plane
  22761. */
  22762. get near() {
  22763. return this._near;
  22764. }
  22765. /**
  22766. * Distance of the far plane
  22767. */
  22768. get far() {
  22769. return this._far;
  22770. }
  22771. /**
  22772. * Compute a perspective projection matrix for WebGL
  22773. * @param K camera intrinsics
  22774. * @param near distance of the near plane
  22775. * @param far distance of the far plane
  22776. */
  22777. static _computeProjectionMatrix(K, near, far) {
  22778. // we assume that the principal point is at the center of the image
  22779. const top = near * (K[V0] / K[FY]);
  22780. const right = near * (K[U0] / K[FX]);
  22781. const bottom = -top, left = -right; // symmetric frustum
  22782. // a derivation of this projection matrix can be found at
  22783. // https://www.songho.ca/opengl/gl_projectionmatrix.html
  22784. // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
  22785. return speedy_vision_default().Matrix(4, 4, [
  22786. 2 * near / (right - left), 0, 0, 0,
  22787. 0, 2 * near / (top - bottom), 0, 0,
  22788. (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
  22789. 0, 0, -2 * far * near / (far - near), 0
  22790. ]);
  22791. }
  22792. }
  22793. ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
  22794. /*
  22795. * MARTINS.js
  22796. * GPU-accelerated Augmented Reality for the web
  22797. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22798. *
  22799. * This program is free software: you can redistribute it and/or modify
  22800. * it under the terms of the GNU Lesser General Public License as published
  22801. * by the Free Software Foundation, either version 3 of the License, or
  22802. * (at your option) any later version.
  22803. *
  22804. * This program is distributed in the hope that it will be useful,
  22805. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22806. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22807. * GNU Lesser General Public License for more details.
  22808. *
  22809. * You should have received a copy of the GNU Lesser General Public License
  22810. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22811. *
  22812. * view.ts
  22813. * A viewer represents a virtual camera in 3D world space
  22814. */
  22815. /**
  22816. * A viewer represents a virtual camera in 3D world space
  22817. */
  22818. class Viewer {
  22819. /**
  22820. * Constructor
  22821. * @param camera camera model
  22822. */
  22823. constructor(camera) {
  22824. this._pose = new ViewerPose(camera);
  22825. this._views = [new PerspectiveView(camera)];
  22826. }
  22827. /**
  22828. * The pose of this viewer
  22829. */
  22830. get pose() {
  22831. return this._pose;
  22832. }
  22833. /**
  22834. * The view of this viewer (only for monoscopic rendering)
  22835. */
  22836. get view() {
  22837. /*
  22838. if(this._views.length > 1)
  22839. throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
  22840. */
  22841. return this._views[0];
  22842. }
  22843. /**
  22844. * The views of this viewer
  22845. */
  22846. /*
  22847. get views(): View[]
  22848. {
  22849. return this._views.concat([]);
  22850. }
  22851. */
  22852. /**
  22853. * Convert a pose from world space to viewer space
  22854. * @param pose a pose in world space
  22855. * @returns a pose in viewer space
  22856. */
  22857. convertToViewerSpace(pose) {
  22858. const modelMatrix = pose.transform.matrix;
  22859. const viewMatrix = this._pose.viewMatrix;
  22860. const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
  22861. const transform = new StandardTransform(modelViewMatrix);
  22862. return new Pose(transform);
  22863. }
  22864. }
  22865. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
  22866. /*
  22867. * MARTINS.js
  22868. * GPU-accelerated Augmented Reality for the web
  22869. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22870. *
  22871. * This program is free software: you can redistribute it and/or modify
  22872. * it under the terms of the GNU Lesser General Public License as published
  22873. * by the Free Software Foundation, either version 3 of the License, or
  22874. * (at your option) any later version.
  22875. *
  22876. * This program is distributed in the hope that it will be useful,
  22877. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22878. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22879. * GNU Lesser General Public License for more details.
  22880. *
  22881. * You should have received a copy of the GNU Lesser General Public License
  22882. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22883. *
  22884. * tracking.ts
  22885. * Tracking state of the Image Tracker
  22886. */
  22887. /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
  22888. const USE_TURBO = true;
  22889. /** Number of PBOs; meaningful only when using turbo */
  22890. const NUMBER_OF_PBOS = 2;
  22891. /** Frame skipping; meaningful only when using turbo */
  22892. const TURBO_SKIP = 2;
  22893. /**
  22894. * The tracking state of the Image Tracker tracks
  22895. * keypoints of the image target and updates the
  22896. * rectification matrix
  22897. */
  22898. class ImageTrackerTrackingState extends ImageTrackerState {
  22899. /**
  22900. * Constructor
  22901. * @param imageTracker
  22902. */
  22903. constructor(imageTracker) {
  22904. super('tracking', imageTracker);
  22905. this._referenceImage = null;
  22906. this._warpHomography = speedy_vision_default().Matrix.Eye(3);
  22907. this._poseHomography = speedy_vision_default().Matrix.Eye(3);
  22908. this._initialHomography = speedy_vision_default().Matrix.Eye(3);
  22909. this._initialKeypoints = [];
  22910. this._counter = 0;
  22911. this._camera = new CameraModel();
  22912. this._predictedKeypoints = [];
  22913. this._lastPipelineOutput = { keypoints: [] };
  22914. this._pipelineCounter = 0;
  22915. this._lastOutput = {};
  22916. this._lostCounter = 0;
  22917. // we need at least 4 correspondences of points to compute a homography matrix
  22918. Utils.assert(TRACK_MIN_MATCHES >= 4);
  22919. }
  22920. /**
  22921. * Called as soon as this becomes the active state, just before update() runs for the first time
  22922. * @param settings
  22923. */
  22924. onEnterState(settings) {
  22925. const homography = settings.homography;
  22926. const referenceImage = settings.referenceImage;
  22927. const templateKeypoints = settings.templateKeypoints;
  22928. const keypointPortalSink = settings.keypointPortalSink;
  22929. const screenSize = settings.screenSize; // this.screenSize is not yet set
  22930. const keypointPortalSource = this._pipeline.node('keypointPortalSource');
  22931. // this shouldn't happen
  22932. if (!referenceImage)
  22933. throw new IllegalOperationError(`Can't track a null reference image`);
  22934. // set attributes
  22935. this._referenceImage = referenceImage;
  22936. this._warpHomography = speedy_vision_default().Matrix(homography);
  22937. this._poseHomography = speedy_vision_default().Matrix(homography);
  22938. this._initialHomography = speedy_vision_default().Matrix(homography);
  22939. this._initialKeypoints = templateKeypoints;
  22940. this._counter = 0;
  22941. this._predictedKeypoints = [];
  22942. this._lastPipelineOutput = { keypoints: [] };
  22943. this._pipelineCounter = 0;
  22944. this._lastOutput = {};
  22945. this._lostCounter = 0;
  22946. // setup portals
  22947. keypointPortalSource.source = keypointPortalSink;
  22948. // setup camera
  22949. this._camera.init(screenSize);
  22950. // emit event
  22951. const ev = new ImageTrackerEvent('targetfound', referenceImage);
  22952. this._imageTracker.dispatchEvent(ev);
  22953. // log
  22954. Utils.log(`Tracking image "${referenceImage.name}"...`);
  22955. }
  22956. /**
  22957. * Called when leaving the state
  22958. */
  22959. onLeaveState() {
  22960. const referenceImage = this._referenceImage;
  22961. // release the camera
  22962. this._camera.release();
  22963. // emit event
  22964. const ev = new ImageTrackerEvent('targetlost', referenceImage);
  22965. this._imageTracker.dispatchEvent(ev);
  22966. }
  22967. /**
  22968. * Called just before the GPU processing
  22969. * @returns promise
  22970. */
  22971. _beforeUpdate() {
  22972. const imageRectifier = this._pipeline.node('imageRectifier');
  22973. const borderClipper = this._pipeline.node('borderClipper');
  22974. const keypointRectifier = this._pipeline.node('keypointRectifier');
  22975. const screenSize = this.screenSize;
  22976. /*
  22977. // pause media (test)
  22978. const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
  22979. const media = source.media as SpeedyMedia;
  22980. (media.source as HTMLVideoElement).pause();
  22981. */
  22982. // clip keypoints from the borders of the target image
  22983. borderClipper.imageSize = screenSize;
  22984. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  22985. // rectify the image
  22986. return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
  22987. imageRectifier.transform = warp;
  22988. });
  22989. }
  22990. /**
  22991. * GPU processing
  22992. * @returns promise with the pipeline results
  22993. */
  22994. _gpuUpdate() {
  22995. //return super._gpuUpdate();
  22996. // No turbo?
  22997. if (!USE_TURBO || Settings.powerPreference == 'low-power')
  22998. return super._gpuUpdate();
  22999. // When using turbo, we reduce the GPU usage by skipping every other frame
  23000. const counter = this._pipelineCounter;
  23001. this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
  23002. // Skip frame
  23003. if (counter != 0) {
  23004. if (this._lastPipelineOutput.keypoints !== undefined) {
  23005. this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
  23006. }
  23007. else
  23008. this._predictedKeypoints.length = 0;
  23009. this._lastPipelineOutput.keypoints = this._predictedKeypoints;
  23010. return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
  23011. }
  23012. // Run the pipeline and store the results
  23013. return super._gpuUpdate().then(results => {
  23014. this._lastPipelineOutput = results;
  23015. return results;
  23016. });
  23017. }
  23018. /**
  23019. * Post processing that takes place just after the GPU processing
  23020. * @param result pipeline results
  23021. * @returns state output
  23022. */
  23023. _afterUpdate(result) {
  23024. const imageRectifier = this._pipeline.node('imageRectifier');
  23025. const keypoints = result.keypoints;
  23026. const image = result.image;
  23027. const referenceImage = this._referenceImage;
  23028. // find the best keypoint matches
  23029. return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
  23030. // find motion models
  23031. return speedy_vision_default().Promise.all([
  23032. this._findAffineMotion(matches),
  23033. this._findPerspectiveMotion(matches)
  23034. ]);
  23035. }).then(([affineMotion, perspectiveMotion]) => {
  23036. const lowPower = (Settings.powerPreference == 'low-power');
  23037. const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
  23038. // update warp homography
  23039. const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
  23040. const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
  23041. if (!USE_TURBO || this._counter % delay == remainder)
  23042. this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
  23043. // update pose homography
  23044. if (!frozen)
  23045. this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
  23046. // update counter
  23047. this._counter = (this._counter + 1) % delay;
  23048. // update the camera
  23049. if (!frozen)
  23050. return this._camera.update(this._poseHomography, this.screenSize);
  23051. else
  23052. return this._camera.matrix;
  23053. }).then(_ => {
  23054. // find the inverse of the rectification matrix
  23055. const rectificationMatrix = imageRectifier.transform;
  23056. const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
  23057. // move keypoints from rectified space back to image space
  23058. const n = keypoints.length;
  23059. const coords = new Array(2 * n);
  23060. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23061. coords[j] = keypoints[i].position.x;
  23062. coords[j + 1] = keypoints[i].position.y;
  23063. }
  23064. return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
  23065. /*
  23066. // test image center
  23067. const coords2: number[] = new Array(2 * n);
  23068. for(let i = 0, j = 0; i < n; i++, j += 2) {
  23069. coords2[j] = this._imageTracker.screenSize.width / 2;
  23070. coords2[j+1] = this._imageTracker.screenSize.height / 2;
  23071. if(i % 2 == 0) {
  23072. coords2[j] = this._imageTracker.screenSize.width / 4;
  23073. coords2[j+1] = this._imageTracker.screenSize.height / 4;
  23074. }
  23075. }
  23076. return Speedy.Matrix.applyPerspectiveTransform(
  23077. Speedy.Matrix.Zeros(2, n),
  23078. Speedy.Matrix(2, n, coords2),
  23079. this._poseHomography
  23080. //this._warpHomography
  23081. );
  23082. */
  23083. }).then(mat => {
  23084. /*
  23085. const n = keypoints.length;
  23086. const coords = mat.read();
  23087. // ** this will interfere with the calculations when frame skipping is on **
  23088. // get keypoints in image space
  23089. for(let i = 0, j = 0; i < n; i++, j += 2) {
  23090. keypoints[i].position.x = coords[j];
  23091. keypoints[i].position.y = coords[j+1];
  23092. }
  23093. */
  23094. // find a polyline surrounding the target
  23095. return this._findPolyline(this._poseHomography, this.screenSize);
  23096. //return this._findPolyline(this._warpHomography, this.screenSize);
  23097. }).then(polyline => {
  23098. // we let the target object be at the origin of the world space
  23099. // (identity transform). We also perform a change of coordinates,
  23100. // so that we move out from pixel space and into normalized space
  23101. const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
  23102. const transform = new StandardTransform(modelMatrix);
  23103. const pose = new Pose(transform);
  23104. // given the current state of the camera model, we get a viewer
  23105. // compatible with the pose of the target
  23106. const viewer = new Viewer(this._camera);
  23107. // the trackable object
  23108. const trackable = {
  23109. pose: pose,
  23110. referenceImage: referenceImage
  23111. };
  23112. // the result generated by the image tracker
  23113. const result = {
  23114. tracker: this._imageTracker,
  23115. trackables: [trackable],
  23116. viewer: viewer
  23117. };
  23118. // build and save the output
  23119. this._lastOutput = {
  23120. exports: result,
  23121. cameraMatrix: this._camera.matrix,
  23122. homography: this._warpHomography,
  23123. //keypoints: keypoints,
  23124. screenSize: this.screenSize,
  23125. image: image,
  23126. polyline: polyline,
  23127. };
  23128. // we have successfully tracked the target in this frame
  23129. this._lostCounter = 0;
  23130. // done!
  23131. return {
  23132. nextState: 'tracking',
  23133. trackerOutput: this._lastOutput
  23134. };
  23135. }).catch(err => {
  23136. // give some tolerance to tracking errors
  23137. if (err instanceof TrackingError) {
  23138. if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
  23139. //console.log("ABSORB",this._lostCounter,err.toString())
  23140. // absorb the error
  23141. return {
  23142. nextState: 'tracking',
  23143. trackerOutput: this._lastOutput
  23144. };
  23145. }
  23146. }
  23147. // lost tracking
  23148. Utils.warning(`The target has been lost! ${err.toString()}`);
  23149. this._camera.reset();
  23150. // go back to the scanning state
  23151. return {
  23152. nextState: 'scanning',
  23153. trackerOutput: {
  23154. image: image,
  23155. screenSize: this.screenSize,
  23156. },
  23157. };
  23158. });
  23159. }
  23160. /**
  23161. * Find quality matches between two sets of keypoints
  23162. * @param currKeypoints keypoints of the current frame
  23163. * @param prevKeypoints keypoints of the previous frame
  23164. * @returns quality matches
  23165. */
  23166. _findQualityMatches(currKeypoints, prevKeypoints) {
  23167. const result = [[], []];
  23168. const n = currKeypoints.length;
  23169. for (let i = 0; i < n; i++) {
  23170. const currKeypoint = currKeypoints[i];
  23171. if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
  23172. const d1 = currKeypoint.matches[0].distance;
  23173. const d2 = currKeypoint.matches[1].distance;
  23174. if (d1 <= TRACK_MATCH_RATIO * d2) {
  23175. const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
  23176. result[0].push(currKeypoint);
  23177. result[1].push(prevKeypoint);
  23178. }
  23179. }
  23180. }
  23181. return result;
  23182. }
  23183. /**
  23184. * Find a better spatial distribution of the input matches
  23185. * @param matches quality matches
  23186. * @returns refined quality matches
  23187. */
  23188. _refineQualityMatches(matches) {
  23189. const currKeypoints = matches[0];
  23190. const prevKeypoints = matches[1];
  23191. // find a better spatial distribution of the keypoints
  23192. const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
  23193. const n = indices.length; // number of refined matches
  23194. // assemble output
  23195. const result = [new Array(n), new Array(n)];
  23196. for (let i = 0; i < n; i++) {
  23197. result[0][i] = currKeypoints[indices[i]];
  23198. result[1][i] = prevKeypoints[indices[i]];
  23199. }
  23200. // done!
  23201. return result;
  23202. }
  23203. /**
  23204. * Spatially distribute keypoints over a grid
  23205. * @param keypoints keypoints to be distributed
  23206. * @param gridCells number of grid elements in each axis
  23207. * @returns a list of indices of keypoints[]
  23208. */
  23209. _distributeKeypoints(keypoints, gridCells) {
  23210. // get the coordinates of the keypoints
  23211. const n = keypoints.length;
  23212. const points = new Array(2 * n);
  23213. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23214. points[j] = keypoints[i].x;
  23215. points[j + 1] = keypoints[i].y;
  23216. }
  23217. // normalize the coordinates to [0,1] x [0,1]
  23218. this._normalizePoints(points);
  23219. // distribute the keypoints over a grid
  23220. const numberOfCells = gridCells * gridCells;
  23221. const grid = (new Array(numberOfCells)).fill(-1);
  23222. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23223. // find the grid location of the i-th point
  23224. const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
  23225. const yg = Math.floor(points[j + 1] * gridCells);
  23226. // store the index of the i-th point in the grid
  23227. grid[yg * gridCells + xg] = i;
  23228. }
  23229. // retrieve points of the grid
  23230. const indices = [];
  23231. for (let g = 0; g < numberOfCells; g++) {
  23232. if (grid[g] >= 0) {
  23233. const i = grid[g];
  23234. indices.push(i);
  23235. }
  23236. }
  23237. // done!
  23238. return indices;
  23239. }
  23240. /**
  23241. * Normalize points to [0,1)^2
  23242. * @param points 2 x n matrix of points in column-major format
  23243. * @returns points
  23244. */
  23245. _normalizePoints(points) {
  23246. Utils.assert(points.length % 2 == 0);
  23247. const n = points.length / 2;
  23248. if (n == 0)
  23249. return points;
  23250. let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
  23251. let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
  23252. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23253. const x = points[j], y = points[j + 1];
  23254. xmin = x < xmin ? x : xmin;
  23255. ymin = y < ymin ? y : ymin;
  23256. xmax = x > xmax ? x : xmax;
  23257. ymax = y > ymax ? y : ymax;
  23258. }
  23259. const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
  23260. const ylen = ymax - ymin + 1;
  23261. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23262. points[j] = (points[j] - xmin) / xlen;
  23263. points[j + 1] = (points[j + 1] - ymin) / ylen;
  23264. }
  23265. return points;
  23266. }
  23267. /**
  23268. * Find a matrix with the coordinates of quality matches
  23269. * @param matches n quality matches
  23270. * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
  23271. */
  23272. _findMatrixOfMatches(matches) {
  23273. const n = matches[0].length;
  23274. Utils.assert(n > 0);
  23275. // sets of keypoints
  23276. const currKeypoints = matches[0];
  23277. const prevKeypoints = matches[1];
  23278. // get the coordinates of the keypoints of the set of refined matches
  23279. const src = new Array(2 * n);
  23280. const dst = new Array(2 * n);
  23281. for (let i = 0, j = 0; i < n; i++, j += 2) {
  23282. src[j] = prevKeypoints[i].x;
  23283. src[j + 1] = prevKeypoints[i].y;
  23284. dst[j] = currKeypoints[i].x;
  23285. dst[j + 1] = currKeypoints[i].y;
  23286. }
  23287. // assemble the matrix
  23288. return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
  23289. }
  23290. /**
  23291. * Preprocess keypoint matches
  23292. * @param currKeypoints keypoints of the current frame
  23293. * @param prevKeypoints keypoints of the previous frame
  23294. * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
  23295. * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
  23296. */
  23297. _preprocessMatches(currKeypoints, prevKeypoints) {
  23298. // find and refine quality matches
  23299. const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
  23300. const refinedMatches = this._refineQualityMatches(qualityMatches);
  23301. // not enough matches?
  23302. const n = refinedMatches[0].length;
  23303. if (n < TRACK_MIN_MATCHES)
  23304. return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
  23305. // find matrix of matches
  23306. const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
  23307. // warp matrix of matches
  23308. const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
  23309. return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
  23310. }
  23311. /**
  23312. * Find an affine motion model of the target image
  23313. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  23314. * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
  23315. */
  23316. _findAffineMotion(preprocessedMatches) {
  23317. const model = speedy_vision_default().Matrix.Eye(3);
  23318. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  23319. // find motion model
  23320. return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  23321. method: 'pransac',
  23322. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  23323. numberOfHypotheses: 512,
  23324. bundleSize: 128,
  23325. }).then(_ => {
  23326. // validate the model
  23327. const a00 = model.at(0, 0);
  23328. if (Number.isNaN(a00))
  23329. throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
  23330. // done!
  23331. return model;
  23332. });
  23333. }
  23334. /**
  23335. * Find a perspective motion model of the target image
  23336. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  23337. * @returns a promise that resolves to a 3x3 perspective motion model
  23338. */
  23339. _findPerspectiveMotion(preprocessedMatches) {
  23340. /*
  23341. We can probably get more accurate motion estimates if we
  23342. work in 3D rather than in 2D. We're currently estimating
  23343. an affine transform in image space. What if we projected
  23344. the keypoints into world space, estimated the camera motion
  23345. (rotation and translation) that best describes the observed
  23346. observed motion of the keypoints, and then projected things
  23347. back to image space? Need to figure this out; we'll get a
  23348. homography matrix.
  23349. Note: keypoints are in rectified image space.
  23350. Note: work with a 6 DoF perspective transform instead of 8.
  23351. */
  23352. const model = speedy_vision_default().Matrix.Zeros(3);
  23353. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  23354. // find motion model
  23355. return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  23356. method: 'pransac',
  23357. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  23358. numberOfHypotheses: 512 * 2,
  23359. bundleSize: 128 * 4, //*4
  23360. }).then(_ => {
  23361. // validate the model
  23362. const a00 = model.at(0, 0);
  23363. if (Number.isNaN(a00))
  23364. throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
  23365. // done!
  23366. return model;
  23367. });
  23368. }
  23369. /**
  23370. * Find a rectification matrix to be applied to the target image
  23371. * @param homography maps a reference image to the AR screen
  23372. * @param media target
  23373. * @param screenSize AR screen
  23374. * @returns promise that resolves to a rectification matrix
  23375. */
  23376. _findImageWarp(homography, screenSize) {
  23377. const referenceImage = this._referenceImage;
  23378. const media = this._imageTracker.database._findMedia(referenceImage.name);
  23379. const mat = speedy_vision_default().Matrix.Zeros(3);
  23380. return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
  23381. }
  23382. /**
  23383. * Find a warp to be applied to the keypoints
  23384. * @returns affine transform
  23385. */
  23386. _findKeypointWarp() {
  23387. const referenceImage = this._referenceImage;
  23388. const media = this._imageTracker.database._findMedia(referenceImage.name);
  23389. const screenSize = this.screenSize;
  23390. const sw = screenSize.width, sh = screenSize.height;
  23391. const mat = speedy_vision_default().Matrix.Eye(3, 3);
  23392. // no rotation is needed
  23393. if (!this._mustRotateWarpedImage(media, screenSize))
  23394. return speedy_vision_default().Promise.resolve(mat);
  23395. // rotate by 90 degrees clockwise and scale
  23396. return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
  23397. }
  23398. /**
  23399. * Predict the keypoints without actually looking at the image
  23400. * @param curr keypoints at time t (will modify the contents)
  23401. * @param initial keypoints at time t-1 (not just t = 0)
  23402. * @returns keypoints at time t+1
  23403. */
  23404. _predictKeypoints(curr, initial) {
  23405. // the target image is likely to be moving roughly in
  23406. // the same manner as it was in the previous frame
  23407. const next = [];
  23408. const n = curr.length;
  23409. for (let i = 0; i < n; i++) {
  23410. const cur = curr[i];
  23411. if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
  23412. continue;
  23413. /*
  23414. else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
  23415. continue;
  23416. */
  23417. const ini = initial[cur.matches[0].index];
  23418. const dx = cur.position.x - ini.position.x;
  23419. const dy = cur.position.y - ini.position.y;
  23420. // a better mathematical model is needed
  23421. const alpha = 0.8; //0.2;
  23422. cur.position.x = ini.position.x + alpha * dx;
  23423. cur.position.y = ini.position.y + alpha * dy;
  23424. next.push(cur);
  23425. }
  23426. // done!
  23427. return next;
  23428. }
  23429. /**
  23430. * Create & setup the pipeline
  23431. * @returns pipeline
  23432. */
  23433. _createPipeline() {
  23434. const pipeline = speedy_vision_default().Pipeline();
  23435. const source = speedy_vision_default().Image.Source('source');
  23436. const screen = speedy_vision_default().Transform.Resize('screen');
  23437. const greyscale = speedy_vision_default().Filter.Greyscale();
  23438. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  23439. const nightvision = speedy_vision_default().Filter.Nightvision();
  23440. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  23441. const blur = speedy_vision_default().Filter.GaussianBlur();
  23442. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  23443. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  23444. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  23445. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  23446. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  23447. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  23448. const clipper = speedy_vision_default().Keypoint.Clipper();
  23449. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  23450. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  23451. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  23452. const imageSink = speedy_vision_default().Image.Sink('image');
  23453. source.media = null;
  23454. screen.size = speedy_vision_default().Size(0, 0);
  23455. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  23456. nightvision.gain = NIGHTVISION_GAIN;
  23457. nightvision.offset = NIGHTVISION_OFFSET;
  23458. nightvision.decay = NIGHTVISION_DECAY;
  23459. nightvision.quality = NIGHTVISION_QUALITY;
  23460. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  23461. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  23462. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  23463. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  23464. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  23465. detector.quality = TRACK_HARRIS_QUALITY;
  23466. detector.capacity = TRACK_DETECTOR_CAPACITY;
  23467. subpixel.method = SUBPIXEL_METHOD;
  23468. clipper.size = TRACK_MAX_KEYPOINTS;
  23469. borderClipper.imageSize = screen.size;
  23470. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  23471. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  23472. matcher.k = 2;
  23473. keypointPortalSource.source = null;
  23474. keypointSink.turbo = USE_TURBO;
  23475. // prepare input
  23476. source.output().connectTo(screen.input());
  23477. screen.output().connectTo(greyscale.input());
  23478. // preprocess images
  23479. greyscale.output().connectTo(imageRectifier.input());
  23480. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  23481. imageRectifier.output().connectTo(nightvision.input());
  23482. nightvision.output().connectTo(nightvisionMux.input('in1'));
  23483. // keypoint detection & clipping
  23484. nightvisionMux.output().connectTo(detector.input());
  23485. detector.output().connectTo(borderClipper.input());
  23486. borderClipper.output().connectTo(clipper.input());
  23487. // keypoint refinement
  23488. imageRectifier.output().connectTo(denoiser.input());
  23489. denoiser.output().connectTo(subpixel.input('image'));
  23490. clipper.output().connectTo(subpixel.input('keypoints'));
  23491. // keypoint description
  23492. imageRectifier.output().connectTo(blur.input());
  23493. blur.output().connectTo(descriptor.input('image'));
  23494. subpixel.output().connectTo(descriptor.input('keypoints'));
  23495. // keypoint matching
  23496. keypointPortalSource.output().connectTo(matcher.input('database'));
  23497. descriptor.output().connectTo(matcher.input('keypoints'));
  23498. // prepare output
  23499. descriptor.output().connectTo(keypointRectifier.input());
  23500. //preMatcher.output().connectTo(keypointRectifier.input());
  23501. keypointRectifier.output().connectTo(keypointSink.input());
  23502. matcher.output().connectTo(keypointSink.input('matches'));
  23503. //imageRectifier.output().connectTo(imageSink.input());
  23504. // done!
  23505. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
  23506. return pipeline;
  23507. }
  23508. }
  23509. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
  23510. /*
  23511. * MARTINS.js
  23512. * GPU-accelerated Augmented Reality for the web
  23513. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23514. *
  23515. * This program is free software: you can redistribute it and/or modify
  23516. * it under the terms of the GNU Lesser General Public License as published
  23517. * by the Free Software Foundation, either version 3 of the License, or
  23518. * (at your option) any later version.
  23519. *
  23520. * This program is distributed in the hope that it will be useful,
  23521. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23522. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23523. * GNU Lesser General Public License for more details.
  23524. *
  23525. * You should have received a copy of the GNU Lesser General Public License
  23526. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23527. *
  23528. * image-tracker.ts
  23529. * Image Tracker
  23530. */
  23531. /** A helper */
  23532. const formatSize = (size) => `${size.width}x${size.height}`;
  23533. /**
  23534. * The ImageTracker tracks an image (one at a time)
  23535. */
  23536. class ImageTracker extends AREventTarget {
  23537. /**
  23538. * Constructor
  23539. */
  23540. constructor() {
  23541. super();
  23542. // the states
  23543. this._state = {
  23544. 'initial': new ImageTrackerInitialState(this),
  23545. 'training': new ImageTrackerTrainingState(this),
  23546. 'scanning': new ImageTrackerScanningState(this),
  23547. 'pre-tracking': new ImageTrackerPreTrackingState(this),
  23548. 'tracking': new ImageTrackerTrackingState(this),
  23549. };
  23550. // initial setup
  23551. this._session = null;
  23552. this._activeStateName = 'initial';
  23553. this._lastOutput = {};
  23554. this._database = new ReferenceImageDatabase();
  23555. // user settings
  23556. this._resolution = DEFAULT_TRACKING_RESOLUTION;
  23557. }
  23558. /**
  23559. * The type of the tracker
  23560. */
  23561. get type() {
  23562. return 'image-tracker';
  23563. }
  23564. /**
  23565. * Current state name
  23566. */
  23567. get state() {
  23568. return this._activeStateName;
  23569. }
  23570. /**
  23571. * Reference Image Database
  23572. * Must be configured before training the tracker
  23573. */
  23574. get database() {
  23575. return this._database;
  23576. }
  23577. /**
  23578. * Resolution of the AR screen space
  23579. */
  23580. get resolution() {
  23581. return this._resolution;
  23582. }
  23583. /**
  23584. * Resolution of the AR screen space
  23585. */
  23586. set resolution(resolution) {
  23587. this._resolution = resolution;
  23588. }
  23589. /**
  23590. * Size of the AR screen space, in pixels
  23591. * @internal
  23592. */
  23593. get screenSize() {
  23594. return this._state[this._activeStateName].screenSize;
  23595. }
  23596. /**
  23597. * Last emitted output
  23598. * @internal
  23599. */
  23600. get _output() {
  23601. return this._lastOutput;
  23602. }
  23603. /**
  23604. * Stats related to this tracker
  23605. * @internal
  23606. */
  23607. get _stats() {
  23608. return `${formatSize(this.screenSize)} ${this.state}`;
  23609. }
  23610. /**
  23611. * Initialize this tracker
  23612. * @param session
  23613. * @returns promise that resolves after the tracker has been initialized
  23614. * @internal
  23615. */
  23616. _init(session) {
  23617. // store the session
  23618. this._session = session;
  23619. // initialize states
  23620. for (const state of Object.values(this._state))
  23621. state.init();
  23622. // done!
  23623. return speedy_vision_default().Promise.resolve();
  23624. }
  23625. /**
  23626. * Release this tracker
  23627. * @returns promise that resolves after the tracker has been released
  23628. * @internal
  23629. */
  23630. _release() {
  23631. // release states
  23632. for (const state of Object.values(this._state))
  23633. state.release();
  23634. // unlink session
  23635. this._session = null;
  23636. // done!
  23637. return speedy_vision_default().Promise.resolve();
  23638. }
  23639. /**
  23640. * Update the tracker
  23641. * @returns promise
  23642. * @internal
  23643. */
  23644. _update() {
  23645. // validate
  23646. if (this._session == null)
  23647. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
  23648. // compute the screen size for image processing purposes
  23649. // note: this may change over time...!
  23650. const media = this._session.media;
  23651. const aspectRatio = media.width / media.height;
  23652. const screenSize = Utils.resolution(this._resolution, aspectRatio);
  23653. // run the active state
  23654. const activeState = this._state[this._activeStateName];
  23655. return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
  23656. // update the output of the tracker
  23657. this._lastOutput = trackerOutput;
  23658. // need to change the state?
  23659. if (this._activeStateName != nextState) {
  23660. activeState.onLeaveState();
  23661. this._activeStateName = nextState;
  23662. this._state[nextState].onEnterState(nextStateSettings || {});
  23663. }
  23664. });
  23665. }
  23666. /**
  23667. * Get reference image
  23668. * @param keypointIndex -1 if not found
  23669. * @returns reference image
  23670. * @internal
  23671. */
  23672. _referenceImageOfKeypoint(keypointIndex) {
  23673. const training = this._state.training;
  23674. return training.referenceImageOfKeypoint(keypointIndex);
  23675. }
  23676. /**
  23677. * Get reference image index
  23678. * @param keypointIndex -1 if not found
  23679. * @returns reference image index, or -1 if not found
  23680. * @internal
  23681. */
  23682. _referenceImageIndexOfKeypoint(keypointIndex) {
  23683. const training = this._state.training;
  23684. return training.referenceImageIndexOfKeypoint(keypointIndex);
  23685. }
  23686. /**
  23687. * Get a keypoint of the trained set
  23688. * @param keypointIndex
  23689. * @returns a keypoint
  23690. * @internal
  23691. */
  23692. _referenceKeypoint(keypointIndex) {
  23693. const training = this._state.training;
  23694. return training.referenceKeypoint(keypointIndex);
  23695. }
  23696. }
  23697. ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
  23698. /*
  23699. * MARTINS.js
  23700. * GPU-accelerated Augmented Reality for the web
  23701. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23702. *
  23703. * This program is free software: you can redistribute it and/or modify
  23704. * it under the terms of the GNU Lesser General Public License as published
  23705. * by the Free Software Foundation, either version 3 of the License, or
  23706. * (at your option) any later version.
  23707. *
  23708. * This program is distributed in the hope that it will be useful,
  23709. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23710. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23711. * GNU Lesser General Public License for more details.
  23712. *
  23713. * You should have received a copy of the GNU Lesser General Public License
  23714. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23715. *
  23716. * tracker-factory.ts
  23717. * Tracker factory
  23718. */
  23719. /**
  23720. * Tracker factory
  23721. */
  23722. class TrackerFactory {
  23723. /**
  23724. * Create an Image Tracker
  23725. */
  23726. static ImageTracker() {
  23727. return new ImageTracker();
  23728. }
  23729. }
  23730. ;// CONCATENATED MODULE: ./src/sources/video-source.ts
  23731. /*
  23732. * MARTINS.js
  23733. * GPU-accelerated Augmented Reality for the web
  23734. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23735. *
  23736. * This program is free software: you can redistribute it and/or modify
  23737. * it under the terms of the GNU Lesser General Public License as published
  23738. * by the Free Software Foundation, either version 3 of the License, or
  23739. * (at your option) any later version.
  23740. *
  23741. * This program is distributed in the hope that it will be useful,
  23742. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23743. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23744. * GNU Lesser General Public License for more details.
  23745. *
  23746. * You should have received a copy of the GNU Lesser General Public License
  23747. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23748. *
  23749. * video-source.ts
  23750. * HTMLVideoElement-based source of data
  23751. */
  23752. /** A message to be displayed if a video can't autoplay and user interaction is required */
  23753. const ALERT_MESSAGE = 'Tap on the screen to start';
  23754. /** Whether or not we have displayed the ALERT_MESSAGE */
  23755. let displayedAlertMessage = false;
  23756. /**
  23757. * HTMLVideoElement-based source of data
  23758. */
  23759. class VideoSource {
  23760. /**
  23761. * Constructor
  23762. */
  23763. constructor(video) {
  23764. Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
  23765. this._video = video;
  23766. this._media = null;
  23767. }
  23768. /**
  23769. * A type-identifier of the source of data
  23770. * @internal
  23771. */
  23772. get _type() {
  23773. return 'video';
  23774. }
  23775. /**
  23776. * Get media
  23777. * @internal
  23778. */
  23779. get _data() {
  23780. if (this._media == null)
  23781. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  23782. return this._media;
  23783. }
  23784. /**
  23785. * Stats related to this source of data
  23786. * @internal
  23787. */
  23788. get _stats() {
  23789. const media = this._media;
  23790. if (media != null)
  23791. return `${media.width}x${media.height} video`;
  23792. else
  23793. return 'uninitialized video';
  23794. }
  23795. /**
  23796. * Initialize this source of data
  23797. * @returns a promise that resolves as soon as this source of data is initialized
  23798. * @internal
  23799. */
  23800. _init() {
  23801. Utils.log(`Initializing ${this._type} source...`);
  23802. // prepare the video before loading the SpeedyMedia!
  23803. return this._prepareVideo(this._video).then(video => {
  23804. Utils.log('The video is prepared');
  23805. return speedy_vision_default().load(video).then(media => {
  23806. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  23807. this._media = media;
  23808. });
  23809. });
  23810. }
  23811. /**
  23812. * Release this source of data
  23813. * @returns a promise that resolves as soon as this source of data is released
  23814. * @internal
  23815. */
  23816. _release() {
  23817. if (this._media)
  23818. this._media.release();
  23819. this._media = null;
  23820. return speedy_vision_default().Promise.resolve();
  23821. }
  23822. /**
  23823. * Handle browser-specific quirks for <video> elements
  23824. * @param video a video element
  23825. * @returns a promise that resolves to the input video
  23826. */
  23827. _prepareVideo(video) {
  23828. // WebKit <video> policies for iOS:
  23829. // https://webkit.org/blog/6784/new-video-policies-for-ios/
  23830. // required on iOS; nice to have in all browsers
  23831. video.setAttribute('playsinline', '');
  23832. // handle autoplay
  23833. return this._handleAutoPlay(video).then(video => {
  23834. // handle WebKit quirks
  23835. if (Utils.isWebKit()) {
  23836. // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
  23837. // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
  23838. if (video.hidden) {
  23839. video.hidden = false;
  23840. video.style.setProperty('opacity', '0');
  23841. video.style.setProperty('position', 'absolute');
  23842. //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
  23843. //video.style.setProperty('visibility', 'hidden'); // doesn't work either
  23844. }
  23845. }
  23846. // done
  23847. return video;
  23848. });
  23849. }
  23850. /**
  23851. * Handle browser-specific quirks for videos marked with autoplay
  23852. * @param video a <video> marked with autoplay
  23853. * @returns a promise that resolves to the input video
  23854. */
  23855. _handleAutoPlay(video) {
  23856. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  23857. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  23858. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  23859. // nothing to do?
  23860. if (!video.autoplay)
  23861. return speedy_vision_default().Promise.resolve(video);
  23862. // videos marked with autoplay should be muted
  23863. if (!video.muted) {
  23864. Utils.warning('Videos marked with autoplay should be muted', video);
  23865. video.muted = true;
  23866. }
  23867. // the browser may not honor the autoplay attribute if the video is not
  23868. // visible on-screen. So, let's try to play the video in any case.
  23869. return this._waitUntilPlayable(video).then(video => {
  23870. // try to play the video
  23871. const promise = video.play();
  23872. // handle older browsers
  23873. if (promise === undefined)
  23874. return video;
  23875. // resolve if successful
  23876. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23877. promise.then(() => resolve(video), error => {
  23878. // can't play the video
  23879. Utils.error(`Can't autoplay video!`, error, video);
  23880. // autoplay is blocked for some reason
  23881. if (error.name == 'NotAllowedError') {
  23882. Utils.warning('Tip: allow manual playback');
  23883. if (Utils.isIOS())
  23884. Utils.warning('Is low power mode on?');
  23885. // User interaction is required to play the video. We can
  23886. // solve this here (easy and convenient to do) or at the
  23887. // application layer (for a better user experience). If the
  23888. // latter is preferred, just disable autoplay and play the
  23889. // video programatically.
  23890. if (video.hidden || !video.controls || video.parentNode === null) {
  23891. // this is added for convenience
  23892. document.body.addEventListener('pointerdown', () => video.play());
  23893. // ask only once for user interaction
  23894. if (!displayedAlertMessage) {
  23895. alert(ALERT_MESSAGE);
  23896. displayedAlertMessage = true;
  23897. }
  23898. // XXX what if the Session mode is inline? In this
  23899. // case, this convenience code may be undesirable.
  23900. // A workaround is to disable autoplay.
  23901. }
  23902. /*else {
  23903. // play the video after the first interaction with the page
  23904. const polling = setInterval(() => {
  23905. video.play().then(() => clearInterval(polling));
  23906. }, 500);
  23907. }*/
  23908. }
  23909. // unsupported media source
  23910. else if (error.name == 'NotSupportedError') {
  23911. reject(new NotSupportedError('Unsupported video format', error));
  23912. return;
  23913. }
  23914. // done
  23915. resolve(video);
  23916. });
  23917. });
  23918. });
  23919. }
  23920. /**
  23921. * Wait for the input video to be playable
  23922. * @param video
  23923. * @returns a promise that resolves to the input video when it can be played through to the end
  23924. */
  23925. _waitUntilPlayable(video) {
  23926. const TIMEOUT = 15000, INTERVAL = 500;
  23927. if (video.readyState >= 4)
  23928. return speedy_vision_default().Promise.resolve(video);
  23929. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23930. let ms = 0, t = setInterval(() => {
  23931. if (video.readyState >= 4) { // canplaythrough
  23932. clearInterval(t);
  23933. resolve(video);
  23934. }
  23935. else if ((ms += INTERVAL) >= TIMEOUT) {
  23936. clearInterval(t);
  23937. reject(new TimeoutError('The video took too long to load'));
  23938. }
  23939. }, INTERVAL);
  23940. });
  23941. }
  23942. }
  23943. ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
  23944. /*
  23945. * MARTINS.js
  23946. * GPU-accelerated Augmented Reality for the web
  23947. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  23948. *
  23949. * This program is free software: you can redistribute it and/or modify
  23950. * it under the terms of the GNU Lesser General Public License as published
  23951. * by the Free Software Foundation, either version 3 of the License, or
  23952. * (at your option) any later version.
  23953. *
  23954. * This program is distributed in the hope that it will be useful,
  23955. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23956. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23957. * GNU Lesser General Public License for more details.
  23958. *
  23959. * You should have received a copy of the GNU Lesser General Public License
  23960. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23961. *
  23962. * canvas-source.ts
  23963. * HTMLCanvasElement-based source of data
  23964. */
  23965. /**
  23966. * HTMLCanvasElement-based source of data
  23967. */
  23968. class CanvasSource {
  23969. /**
  23970. * Constructor
  23971. */
  23972. constructor(canvas) {
  23973. Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
  23974. this._canvas = canvas;
  23975. this._media = null;
  23976. }
  23977. /**
  23978. * A type-identifier of the source of data
  23979. * @internal
  23980. */
  23981. get _type() {
  23982. return 'canvas';
  23983. }
  23984. /**
  23985. * Get media
  23986. * @internal
  23987. */
  23988. get _data() {
  23989. if (this._media == null)
  23990. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  23991. return this._media;
  23992. }
  23993. /**
  23994. * Stats related to this source of data
  23995. * @internal
  23996. */
  23997. get _stats() {
  23998. const media = this._media;
  23999. if (media != null)
  24000. return `${media.width}x${media.height} canvas`;
  24001. else
  24002. return 'uninitialized canvas';
  24003. }
  24004. /**
  24005. * Initialize this source of data
  24006. * @returns a promise that resolves as soon as this source of data is initialized
  24007. * @internal
  24008. */
  24009. _init() {
  24010. return speedy_vision_default().load(this._canvas).then(media => {
  24011. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  24012. this._media = media;
  24013. });
  24014. }
  24015. /**
  24016. * Release this source of data
  24017. * @returns a promise that resolves as soon as this source of data is released
  24018. * @internal
  24019. */
  24020. _release() {
  24021. if (this._media)
  24022. this._media.release();
  24023. this._media = null;
  24024. return speedy_vision_default().Promise.resolve();
  24025. }
  24026. }
  24027. ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
  24028. /*
  24029. * MARTINS.js
  24030. * GPU-accelerated Augmented Reality for the web
  24031. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24032. *
  24033. * This program is free software: you can redistribute it and/or modify
  24034. * it under the terms of the GNU Lesser General Public License as published
  24035. * by the Free Software Foundation, either version 3 of the License, or
  24036. * (at your option) any later version.
  24037. *
  24038. * This program is distributed in the hope that it will be useful,
  24039. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24040. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24041. * GNU Lesser General Public License for more details.
  24042. *
  24043. * You should have received a copy of the GNU Lesser General Public License
  24044. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24045. *
  24046. * camera-source.ts
  24047. * Webcam-based source of data
  24048. */
  24049. /** Default options for camera sources */
  24050. const DEFAULT_CAMERA_OPTIONS = {
  24051. resolution: 'md',
  24052. aspectRatio: 16 / 9,
  24053. constraints: { facingMode: 'environment' },
  24054. };
  24055. /**
  24056. * Webcam-based source of data
  24057. */
  24058. class CameraSource extends VideoSource {
  24059. /**
  24060. * Constructor
  24061. */
  24062. constructor(options) {
  24063. const video = document.createElement('video');
  24064. super(video);
  24065. this._cameraVideo = video;
  24066. this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
  24067. }
  24068. /**
  24069. * Camera resolution
  24070. */
  24071. get resolution() {
  24072. return this._options.resolution;
  24073. }
  24074. /**
  24075. * Initialize this source of data
  24076. * @returns a promise that resolves as soon as this source of data is initialized
  24077. * @internal
  24078. */
  24079. _init() {
  24080. Utils.log('Accessing the webcam...');
  24081. // validate
  24082. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  24083. throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
  24084. // set up media constraints
  24085. const options = this._options;
  24086. const size = Utils.resolution(options.resolution, options.aspectRatio);
  24087. const constraints = {
  24088. audio: false,
  24089. video: Object.assign({ width: size.width, height: size.height }, options.constraints)
  24090. };
  24091. // load camera stream
  24092. return new (speedy_vision_default()).Promise((resolve, reject) => {
  24093. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  24094. const video = this._cameraVideo;
  24095. video.onloadedmetadata = () => {
  24096. const promise = video.play();
  24097. const success = 'Access to the webcam has been granted.';
  24098. // handle older browsers
  24099. if (promise === undefined) {
  24100. Utils.log(success);
  24101. resolve(video);
  24102. return;
  24103. }
  24104. // handle promise
  24105. promise.then(() => {
  24106. Utils.log(success);
  24107. resolve(video);
  24108. }).catch(error => {
  24109. reject(new IllegalOperationError('Webcam error!', error));
  24110. });
  24111. };
  24112. video.setAttribute('playsinline', '');
  24113. video.setAttribute('autoplay', '');
  24114. video.setAttribute('muted', '');
  24115. video.srcObject = stream;
  24116. }).catch(error => {
  24117. reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
  24118. });
  24119. }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
  24120. }
  24121. /**
  24122. * Release this source of data
  24123. * @returns a promise that resolves as soon as this source of data is released
  24124. * @internal
  24125. */
  24126. _release() {
  24127. const stream = this._cameraVideo.srcObject;
  24128. const tracks = stream.getTracks();
  24129. // stop camera feed
  24130. tracks.forEach(track => track.stop());
  24131. this._cameraVideo.onloadedmetadata = null;
  24132. this._cameraVideo.srcObject = null;
  24133. // release the media
  24134. return super._release();
  24135. }
  24136. }
  24137. ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
  24138. /*
  24139. * MARTINS.js
  24140. * GPU-accelerated Augmented Reality for the web
  24141. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24142. *
  24143. * This program is free software: you can redistribute it and/or modify
  24144. * it under the terms of the GNU Lesser General Public License as published
  24145. * by the Free Software Foundation, either version 3 of the License, or
  24146. * (at your option) any later version.
  24147. *
  24148. * This program is distributed in the hope that it will be useful,
  24149. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24150. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24151. * GNU Lesser General Public License for more details.
  24152. *
  24153. * You should have received a copy of the GNU Lesser General Public License
  24154. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24155. *
  24156. * source-factory.ts
  24157. * Factory of sources of data
  24158. */
  24159. /**
  24160. * Factory of sources of data
  24161. */
  24162. class SourceFactory {
  24163. /**
  24164. * Create a <video>-based source of data
  24165. * @param video video element
  24166. */
  24167. static Video(video) {
  24168. return new VideoSource(video);
  24169. }
  24170. /**
  24171. * Create a <canvas>-based source of data
  24172. * @param canvas canvas element
  24173. */
  24174. static Canvas(canvas) {
  24175. return new CanvasSource(canvas);
  24176. }
  24177. /**
  24178. * Create a Webcam-based source of data
  24179. * @param options optional options object
  24180. */
  24181. static Camera(options = {}) {
  24182. return new CameraSource(options);
  24183. }
  24184. }
  24185. ;// CONCATENATED MODULE: ./src/main.ts
  24186. /*
  24187. * MARTINS.js
  24188. * GPU-accelerated Augmented Reality for the web
  24189. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  24190. *
  24191. * This program is free software: you can redistribute it and/or modify
  24192. * it under the terms of the GNU Lesser General Public License as published
  24193. * by the Free Software Foundation, either version 3 of the License, or
  24194. * (at your option) any later version.
  24195. *
  24196. * This program is distributed in the hope that it will be useful,
  24197. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  24198. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  24199. * GNU Lesser General Public License for more details.
  24200. *
  24201. * You should have received a copy of the GNU Lesser General Public License
  24202. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  24203. *
  24204. * main.ts
  24205. * Entry point
  24206. */
  24207. /**
  24208. * GPU-accelerated Augmented Reality for the web
  24209. */
  24210. class Martins {
  24211. /**
  24212. * Start a new session
  24213. * @param options
  24214. * @returns a promise that resolves to a new session
  24215. */
  24216. static startSession(options) {
  24217. return Session.instantiate(options);
  24218. }
  24219. /**
  24220. * Trackers
  24221. */
  24222. static get Tracker() {
  24223. return TrackerFactory;
  24224. }
  24225. /**
  24226. * Sources of data
  24227. */
  24228. static get Source() {
  24229. return SourceFactory;
  24230. }
  24231. /**
  24232. * Create a viewport
  24233. * @param settings
  24234. * @returns a new viewport with the specified settings
  24235. */
  24236. static Viewport(settings) {
  24237. return new BaseViewport(settings);
  24238. }
  24239. /**
  24240. * Global Settings
  24241. */
  24242. static get Settings() {
  24243. return Settings;
  24244. }
  24245. /**
  24246. * Engine version
  24247. */
  24248. static get version() {
  24249. if (false)
  24250. {}
  24251. else
  24252. return "0.2.0";
  24253. }
  24254. /**
  24255. * Speedy Vision
  24256. */
  24257. static get Speedy() {
  24258. return (speedy_vision_default());
  24259. }
  24260. /**
  24261. * Checks if the engine can be run in the browser the client is using
  24262. * @returns true if the engine is compatible with the browser
  24263. */
  24264. static isSupported() {
  24265. return Session.isSupported();
  24266. }
  24267. }
  24268. // Freeze the namespace
  24269. Object.freeze(Martins);
  24270. // Add Speedy Vision to global scope
  24271. ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
  24272. // Display a notice
  24273. Utils.log(`MARTINS.js version ${Martins.version}. ` +
  24274. `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
  24275. "https://github.com/alemart/martins-js");
  24276. })();
  24277. __webpack_exports__ = __webpack_exports__["default"];
  24278. /******/ return __webpack_exports__;
  24279. /******/ })()
  24280. ;
  24281. });