You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

martins.js 1016KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045220462204722048220492205022051220522205322054220552205622057220582205922060220612206222063220642206522066220672206822069220702207122072220732207422075220762207722078220792208022081220822208322084220852208622087220882208922090220912209222093220942209522096220972209822099221002210122102221032210422105221062210722108221092211022111221122211322114221152211622117221182211922120221212212222123221242212522126221272212822129221302213122132221332213422135221362213722138221392214022141221422214322144221452214622147221482214922150221512215222153221542215522156221572215822159221602216122162221632216422165221662216722168221692217022171221722217322174221752217622177221782217922180221812218222183221842218522186221872218822189221902219122192221932219422195221962219722198221992220022201222022220322204222052220622207222082220922210222112221222213222142221522216222172221822219222202222122222222232222422225222262222722228222292223022231222322223322234222352223622237222382223922240222412224222243222442224522246222472224822249222502225122252222532225422255222562225722258222592226022261222622226322264222652226622267222682226922270222712227222273222742227522276222772227822279222802228122282222832228422285222862228722288222892229022291222922229322294222952229622297222982229922300223012230222303223042230522306223072230822309223102231122312223132231422315223162231722318223192232022321223222232322324223252232622327223282232922330223312233222333223342233522336223372233822339223402234122342223432234422345223462234722348223492235022351223522235322354223552235622357223582235922360223612236222363223642236522366223672236822369223702237122372223732237422375223762237722378223792238022381223822238322384223852238622387223882238922390223912239222393223942239522396223972239822399224002240122402224032240422405224062240722408224092241022411224122241322414224152241622417224182241922420224212242222423224242242522426224272242822429224302243122432224332243422435224362243722438224392244022441224422244322444224452244622447224482244922450224512245222453224542245522456224572245822459224602246122462224632246422465224662246722468224692247022471224722247322474224752247622477224782247922480224812248222483224842248522486224872248822489224902249122492224932249422495224962249722498224992250022501225022250322504225052250622507225082250922510225112251222513225142251522516225172251822519225202252122522225232252422525225262252722528225292253022531225322253322534225352253622537225382253922540225412254222543225442254522546225472254822549225502255122552225532255422555225562255722558225592256022561225622256322564225652256622567225682256922570225712257222573225742257522576225772257822579225802258122582225832258422585225862258722588225892259022591225922259322594225952259622597225982259922600226012260222603226042260522606226072260822609226102261122612226132261422615226162261722618226192262022621226222262322624226252262622627226282262922630226312263222633226342263522636226372263822639226402264122642226432264422645226462264722648226492265022651226522265322654226552265622657226582265922660226612266222663226642266522666226672266822669226702267122672226732267422675226762267722678226792268022681226822268322684226852268622687226882268922690226912269222693226942269522696226972269822699227002270122702227032270422705227062270722708227092271022711227122271322714227152271622717227182271922720227212272222723227242272522726227272272822729227302273122732227332273422735227362273722738227392274022741227422274322744227452274622747227482274922750227512275222753227542275522756227572275822759227602276122762227632276422765227662276722768227692277022771227722277322774227752277622777227782277922780227812278222783227842278522786227872278822789227902279122792227932279422795227962279722798227992280022801228022280322804228052280622807228082280922810228112281222813228142281522816228172281822819228202282122822228232282422825228262282722828228292283022831228322283322834228352283622837228382283922840228412284222843228442284522846228472284822849228502285122852228532285422855228562285722858228592286022861228622286322864228652286622867228682286922870228712287222873228742287522876228772287822879228802288122882228832288422885228862288722888228892289022891228922289322894228952289622897228982289922900229012290222903229042290522906229072290822909229102291122912229132291422915229162291722918229192292022921229222292322924229252292622927229282292922930229312293222933229342293522936229372293822939229402294122942229432294422945229462294722948229492295022951229522295322954229552295622957229582295922960229612296222963229642296522966229672296822969229702297122972229732297422975229762297722978229792298022981229822298322984229852298622987229882298922990229912299222993229942299522996229972299822999230002300123002230032300423005230062300723008230092301023011230122301323014230152301623017230182301923020230212302223023230242302523026230272302823029230302303123032230332303423035230362303723038230392304023041230422304323044230452304623047230482304923050230512305223053230542305523056230572305823059230602306123062230632306423065230662306723068230692307023071230722307323074230752307623077230782307923080230812308223083230842308523086230872308823089230902309123092230932309423095230962309723098230992310023101231022310323104231052310623107231082310923110231112311223113231142311523116231172311823119231202312123122231232312423125231262312723128231292313023131231322313323134231352313623137231382313923140231412314223143231442314523146231472314823149231502315123152231532315423155231562315723158231592316023161231622316323164231652316623167231682316923170231712317223173231742317523176231772317823179231802318123182231832318423185231862318723188231892319023191231922319323194231952319623197231982319923200232012320223203232042320523206232072320823209232102321123212232132321423215232162321723218232192322023221232222322323224232252322623227232282322923230232312323223233232342323523236232372323823239232402324123242232432324423245232462324723248232492325023251232522325323254232552325623257232582325923260232612326223263232642326523266232672326823269232702327123272232732327423275232762327723278232792328023281232822328323284232852328623287232882328923290232912329223293232942329523296232972329823299233002330123302233032330423305233062330723308233092331023311233122331323314233152331623317233182331923320233212332223323233242332523326233272332823329233302333123332233332333423335233362333723338233392334023341233422334323344233452334623347233482334923350233512335223353233542335523356233572335823359233602336123362233632336423365233662336723368233692337023371233722337323374233752337623377233782337923380233812338223383233842338523386233872338823389233902339123392233932339423395233962339723398233992340023401234022340323404234052340623407234082340923410234112341223413234142341523416234172341823419234202342123422234232342423425234262342723428234292343023431234322343323434234352343623437234382343923440234412344223443234442344523446234472344823449234502345123452234532345423455234562345723458234592346023461234622346323464234652346623467234682346923470234712347223473234742347523476234772347823479234802348123482234832348423485234862348723488234892349023491234922349323494234952349623497234982349923500235012350223503235042350523506235072350823509235102351123512235132351423515235162351723518235192352023521235222352323524235252352623527235282352923530235312353223533235342353523536235372353823539235402354123542235432354423545235462354723548235492355023551235522355323554235552355623557235582355923560235612356223563235642356523566235672356823569235702357123572235732357423575235762357723578235792358023581235822358323584235852358623587235882358923590235912359223593235942359523596235972359823599236002360123602236032360423605236062360723608236092361023611236122361323614236152361623617236182361923620236212362223623236242362523626236272362823629236302363123632236332363423635236362363723638236392364023641236422364323644236452364623647236482364923650236512365223653236542365523656236572365823659236602366123662236632366423665236662366723668236692367023671236722367323674236752367623677236782367923680236812368223683236842368523686236872368823689236902369123692236932369423695236962369723698236992370023701237022370323704237052370623707237082370923710237112371223713237142371523716237172371823719237202372123722237232372423725237262372723728237292373023731237322373323734237352373623737237382373923740237412374223743237442374523746237472374823749237502375123752237532375423755237562375723758237592376023761237622376323764237652376623767237682376923770237712377223773237742377523776237772377823779237802378123782237832378423785237862378723788237892379023791237922379323794237952379623797237982379923800238012380223803238042380523806238072380823809238102381123812238132381423815238162381723818238192382023821238222382323824238252382623827238282382923830238312383223833238342383523836238372383823839238402384123842238432384423845238462384723848238492385023851238522385323854238552385623857238582385923860238612386223863238642386523866238672386823869238702387123872238732387423875238762387723878238792388023881238822388323884238852388623887238882388923890238912389223893238942389523896238972389823899239002390123902239032390423905239062390723908239092391023911239122391323914239152391623917239182391923920239212392223923239242392523926239272392823929239302393123932239332393423935239362393723938239392394023941239422394323944239452394623947239482394923950239512395223953239542395523956239572395823959239602396123962239632396423965239662396723968239692397023971239722397323974239752397623977239782397923980239812398223983239842398523986239872398823989239902399123992239932399423995239962399723998239992400024001240022400324004240052400624007240082400924010240112401224013240142401524016240172401824019240202402124022240232402424025240262402724028240292403024031240322403324034240352403624037240382403924040240412404224043240442404524046240472404824049240502405124052240532405424055240562405724058240592406024061240622406324064240652406624067240682406924070240712407224073240742407524076240772407824079240802408124082240832408424085240862408724088240892409024091240922409324094240952409624097240982409924100241012410224103241042410524106241072410824109241102411124112241132411424115241162411724118241192412024121241222412324124241252412624127241282412924130241312413224133241342413524136241372413824139241402414124142241432414424145241462414724148241492415024151241522415324154241552415624157241582415924160241612416224163241642416524166241672416824169241702417124172241732417424175241762417724178241792418024181241822418324184241852418624187241882418924190241912419224193241942419524196241972419824199242002420124202242032420424205242062420724208242092421024211242122421324214242152421624217242182421924220242212422224223242242422524226242272422824229242302423124232242332423424235242362423724238242392424024241242422424324244242452424624247242482424924250242512425224253242542425524256242572425824259242602426124262242632426424265242662426724268242692427024271242722427324274242752427624277242782427924280242812428224283242842428524286242872428824289242902429124292242932429424295242962429724298242992430024301243022430324304243052430624307243082430924310243112431224313243142431524316243172431824319243202432124322243232432424325243262432724328243292433024331243322433324334243352433624337243382433924340243412434224343243442434524346243472434824349243502435124352243532435424355243562435724358243592436024361243622436324364243652436624367243682436924370243712437224373243742437524376243772437824379243802438124382243832438424385243862438724388243892439024391243922439324394243952439624397243982439924400244012440224403244042440524406244072440824409244102441124412244132441424415244162441724418244192442024421244222442324424244252442624427244282442924430244312443224433244342443524436244372443824439244402444124442244432444424445244462444724448244492445024451244522445324454244552445624457244582445924460244612446224463244642446524466244672446824469244702447124472244732447424475244762447724478244792448024481244822448324484244852448624487244882448924490244912449224493244942449524496244972449824499245002450124502245032450424505245062450724508245092451024511245122451324514245152451624517245182451924520245212452224523245242452524526245272452824529245302453124532245332453424535245362453724538245392454024541245422454324544245452454624547245482454924550245512455224553245542455524556245572455824559245602456124562245632456424565245662456724568245692457024571245722457324574245752457624577245782457924580245812458224583245842458524586245872458824589245902459124592245932459424595245962459724598245992460024601246022460324604246052460624607246082460924610246112461224613246142461524616246172461824619246202462124622246232462424625246262462724628246292463024631246322463324634246352463624637246382463924640246412464224643246442464524646246472464824649246502465124652246532465424655246562465724658246592466024661246622466324664246652466624667246682466924670246712467224673246742467524676246772467824679246802468124682246832468424685246862468724688246892469024691246922469324694246952469624697246982469924700247012470224703247042470524706247072470824709247102471124712247132471424715247162471724718247192472024721247222472324724247252472624727247282472924730247312473224733247342473524736247372473824739247402474124742247432474424745247462474724748247492475024751247522475324754247552475624757247582475924760247612476224763247642476524766247672476824769247702477124772247732477424775247762477724778247792478024781247822478324784247852478624787247882478924790247912479224793247942479524796247972479824799248002480124802248032480424805248062480724808248092481024811248122481324814248152481624817248182481924820248212482224823248242482524826248272482824829248302483124832248332483424835248362483724838248392484024841248422484324844248452484624847248482484924850248512485224853248542485524856248572485824859248602486124862248632486424865248662486724868248692487024871248722487324874248752487624877248782487924880248812488224883248842488524886248872488824889248902489124892248932489424895248962489724898248992490024901249022490324904249052490624907249082490924910249112491224913249142491524916249172491824919249202492124922249232492424925249262492724928249292493024931249322493324934249352493624937249382493924940249412494224943249442494524946249472494824949249502495124952249532495424955249562495724958249592496024961249622496324964249652496624967249682496924970249712497224973249742497524976249772497824979249802498124982249832498424985249862498724988249892499024991249922499324994249952499624997249982499925000250012500225003250042500525006250072500825009250102501125012250132501425015250162501725018250192502025021250222502325024250252502625027250282502925030250312503225033250342503525036250372503825039250402504125042250432504425045250462504725048250492505025051250522505325054250552505625057250582505925060250612506225063250642506525066250672506825069250702507125072250732507425075250762507725078250792508025081250822508325084250852508625087250882508925090250912509225093250942509525096250972509825099251002510125102251032510425105251062510725108251092511025111251122511325114251152511625117251182511925120251212512225123251242512525126251272512825129251302513125132251332513425135251362513725138251392514025141251422514325144251452514625147251482514925150251512515225153251542515525156251572515825159251602516125162251632516425165251662516725168251692517025171251722517325174251752517625177251782517925180251812518225183251842518525186251872518825189251902519125192251932519425195251962519725198251992520025201252022520325204252052520625207252082520925210252112521225213252142521525216252172521825219252202522125222252232522425225252262522725228252292523025231252322523325234252352523625237252382523925240252412524225243252442524525246252472524825249252502525125252252532525425255252562525725258252592526025261252622526325264252652526625267252682526925270252712527225273252742527525276252772527825279252802528125282252832528425285252862528725288252892529025291252922529325294252952529625297252982529925300253012530225303253042530525306253072530825309253102531125312253132531425315253162531725318253192532025321253222532325324253252532625327253282532925330253312533225333253342533525336253372533825339253402534125342253432534425345253462534725348253492535025351253522535325354253552535625357253582535925360253612536225363253642536525366253672536825369253702537125372253732537425375253762537725378253792538025381253822538325384253852538625387253882538925390253912539225393253942539525396253972539825399254002540125402254032540425405254062540725408254092541025411254122541325414254152541625417254182541925420254212542225423254242542525426254272542825429254302543125432254332543425435254362543725438254392544025441254422544325444254452544625447254482544925450254512545225453254542545525456254572545825459254602546125462254632546425465254662546725468254692547025471254722547325474254752547625477254782547925480254812548225483254842548525486254872548825489254902549125492254932549425495254962549725498254992550025501255022550325504255052550625507255082550925510255112551225513255142551525516255172551825519255202552125522255232552425525255262552725528255292553025531255322553325534255352553625537255382553925540255412554225543255442554525546255472554825549255502555125552255532555425555255562555725558255592556025561255622556325564255652556625567255682556925570255712557225573255742557525576255772557825579255802558125582255832558425585255862558725588255892559025591255922559325594255952559625597255982559925600256012560225603256042560525606256072560825609256102561125612256132561425615256162561725618256192562025621256222562325624256252562625627256282562925630256312563225633256342563525636256372563825639256402564125642256432564425645256462564725648256492565025651256522565325654256552565625657256582565925660256612566225663256642566525666256672566825669256702567125672256732567425675256762567725678256792568025681256822568325684256852568625687256882568925690256912569225693256942569525696256972569825699257002570125702257032570425705257062570725708257092571025711257122571325714257152571625717257182571925720257212572225723257242572525726257272572825729257302573125732257332573425735257362573725738257392574025741257422574325744257452574625747257482574925750257512575225753257542575525756257572575825759257602576125762257632576425765257662576725768257692577025771257722577325774257752577625777257782577925780257812578225783257842578525786257872578825789257902579125792257932579425795257962579725798257992580025801258022580325804258052580625807258082580925810258112581225813258142581525816258172581825819258202582125822258232582425825258262582725828258292583025831258322583325834258352583625837258382583925840258412584225843258442584525846258472584825849258502585125852258532585425855258562585725858258592586025861258622586325864258652586625867258682586925870258712587225873258742587525876258772587825879258802588125882258832588425885258862588725888258892589025891258922589325894258952589625897258982589925900259012590225903259042590525906259072590825909259102591125912259132591425915259162591725918259192592025921259222592325924259252592625927259282592925930259312593225933259342593525936259372593825939259402594125942259432594425945259462594725948259492595025951259522595325954259552595625957259582595925960259612596225963259642596525966259672596825969259702597125972259732597425975259762597725978259792598025981259822598325984259852598625987259882598925990259912599225993259942599525996259972599825999260002600126002260032600426005260062600726008260092601026011260122601326014260152601626017260182601926020260212602226023260242602526026260272602826029260302603126032260332603426035260362603726038260392604026041260422604326044260452604626047260482604926050260512605226053260542605526056260572605826059260602606126062260632606426065260662606726068260692607026071260722607326074260752607626077260782607926080260812608226083260842608526086260872608826089260902609126092260932609426095260962609726098260992610026101261022610326104261052610626107261082610926110261112611226113261142611526116261172611826119261202612126122261232612426125261262612726128261292613026131261322613326134261352613626137261382613926140261412614226143261442614526146261472614826149261502615126152261532615426155261562615726158261592616026161261622616326164261652616626167261682616926170261712617226173261742617526176261772617826179261802618126182261832618426185261862618726188261892619026191261922619326194261952619626197261982619926200262012620226203262042620526206262072620826209262102621126212262132621426215262162621726218262192622026221262222622326224262252622626227262282622926230262312623226233262342623526236262372623826239262402624126242262432624426245262462624726248262492625026251262522625326254262552625626257262582625926260262612626226263262642626526266262672626826269262702627126272262732627426275262762627726278262792628026281262822628326284262852628626287262882628926290262912629226293262942629526296262972629826299263002630126302263032630426305263062630726308263092631026311263122631326314263152631626317263182631926320263212632226323263242632526326263272632826329263302633126332263332633426335263362633726338263392634026341263422634326344263452634626347263482634926350263512635226353263542635526356263572635826359263602636126362263632636426365263662636726368263692637026371263722637326374263752637626377263782637926380263812638226383263842638526386263872638826389263902639126392263932639426395263962639726398263992640026401264022640326404264052640626407264082640926410264112641226413264142641526416264172641826419264202642126422264232642426425264262642726428264292643026431264322643326434264352643626437264382643926440264412644226443264442644526446264472644826449264502645126452264532645426455264562645726458264592646026461264622646326464264652646626467264682646926470264712647226473264742647526476264772647826479264802648126482264832648426485264862648726488264892649026491264922649326494264952649626497264982649926500265012650226503265042650526506265072650826509265102651126512265132651426515265162651726518265192652026521265222652326524265252652626527265282652926530265312653226533265342653526536265372653826539265402654126542265432654426545265462654726548265492655026551265522655326554265552655626557265582655926560265612656226563265642656526566265672656826569265702657126572265732657426575265762657726578265792658026581265822658326584265852658626587265882658926590265912659226593265942659526596265972659826599266002660126602266032660426605266062660726608266092661026611266122661326614266152661626617266182661926620266212662226623266242662526626266272662826629266302663126632266332663426635266362663726638266392664026641266422664326644266452664626647266482664926650266512665226653266542665526656266572665826659266602666126662266632666426665266662666726668266692667026671266722667326674266752667626677266782667926680266812668226683266842668526686266872668826689266902669126692266932669426695266962669726698266992670026701267022670326704267052670626707267082670926710267112671226713267142671526716267172671826719267202672126722267232672426725267262672726728267292673026731267322673326734267352673626737267382673926740267412674226743267442674526746267472674826749267502675126752267532675426755267562675726758267592676026761267622676326764267652676626767267682676926770267712677226773267742677526776267772677826779267802678126782267832678426785267862678726788267892679026791267922679326794267952679626797267982679926800268012680226803268042680526806268072680826809268102681126812268132681426815268162681726818268192682026821268222682326824268252682626827268282682926830268312683226833268342683526836268372683826839268402684126842268432684426845268462684726848268492685026851268522685326854268552685626857268582685926860268612686226863268642686526866268672686826869268702687126872268732687426875268762687726878268792688026881268822688326884268852688626887268882688926890268912689226893268942689526896268972689826899269002690126902269032690426905269062690726908269092691026911269122691326914269152691626917269182691926920269212692226923269242692526926269272692826929269302693126932269332693426935269362693726938269392694026941269422694326944269452694626947269482694926950269512695226953269542695526956269572695826959269602696126962269632696426965269662696726968269692697026971269722697326974269752697626977269782697926980269812698226983269842698526986269872698826989269902699126992269932699426995269962699726998269992700027001270022700327004270052700627007270082700927010270112701227013270142701527016270172701827019270202702127022270232702427025270262702727028270292703027031270322703327034270352703627037270382703927040270412704227043270442704527046270472704827049270502705127052270532705427055270562705727058270592706027061270622706327064270652706627067270682706927070270712707227073270742707527076270772707827079270802708127082270832708427085270862708727088270892709027091270922709327094270952709627097270982709927100271012710227103271042710527106271072710827109271102711127112271132711427115271162711727118271192712027121271222712327124271252712627127271282712927130271312713227133271342713527136271372713827139271402714127142271432714427145271462714727148271492715027151271522715327154271552715627157271582715927160271612716227163271642716527166271672716827169271702717127172271732717427175271762717727178271792718027181271822718327184271852718627187271882718927190271912719227193271942719527196271972719827199272002720127202272032720427205272062720727208272092721027211272122721327214272152721627217272182721927220272212722227223272242722527226272272722827229272302723127232272332723427235272362723727238272392724027241272422724327244272452724627247272482724927250272512725227253272542725527256272572725827259272602726127262272632726427265272662726727268272692727027271272722727327274272752727627277272782727927280272812728227283272842728527286272872728827289272902729127292272932729427295272962729727298272992730027301273022730327304273052730627307273082730927310273112731227313273142731527316273172731827319273202732127322273232732427325273262732727328273292733027331273322733327334273352733627337273382733927340273412734227343273442734527346273472734827349273502735127352273532735427355273562735727358273592736027361
  1. /*!
  2. * MARTINS.js Free Edition version 0.1.2-wip
  3. * GPU-accelerated Augmented Reality for the web
  4. * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  5. * https://github.com/alemart/martins-js
  6. *
  7. * @license AGPL-3.0-only
  8. * Date: 2024-01-10T13:47:03.478Z
  9. */
  10. (function webpackUniversalModuleDefinition(root, factory) {
  11. if(typeof exports === 'object' && typeof module === 'object')
  12. module.exports = factory();
  13. else if(typeof define === 'function' && define.amd)
  14. define([], factory);
  15. else if(typeof exports === 'object')
  16. exports["Martins"] = factory();
  17. else
  18. root["Martins"] = factory();
  19. })(self, function() {
  20. return /******/ (() => { // webpackBootstrap
  21. /******/ var __webpack_modules__ = ({
  22. /***/ 528:
  23. /***/ ((module) => {
  24. /*!
  25. * Speedy Vision version 0.9.1-wip
  26. * GPU-accelerated Computer Vision for JavaScript
  27. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  28. * https://github.com/alemart/speedy-vision
  29. *
  30. * @license Apache-2.0
  31. * Date: 2024-01-10T13:44:25.122Z
  32. */
  33. (function webpackUniversalModuleDefinition(root, factory) {
  34. if(true)
  35. module.exports = factory();
  36. else {}
  37. })(self, function() {
  38. return /******/ (() => { // webpackBootstrap
  39. /******/ var __webpack_modules__ = ({
  40. /***/ 3135:
  41. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_800__) => {
  42. "use strict";
  43. /* harmony export */ __nested_webpack_require_800__.d(__webpack_exports__, {
  44. /* harmony export */ "Z": () => (/* binding */ Settings)
  45. /* harmony export */ });
  46. /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_800__(2411);
  47. /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_800__(7905);
  48. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_800__(5484);
  49. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_800__(3841);
  50. /*
  51. * speedy-vision.js
  52. * GPU-accelerated Computer Vision for JavaScript
  53. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  54. *
  55. * Licensed under the Apache License, Version 2.0 (the "License");
  56. * you may not use this file except in compliance with the License.
  57. * You may obtain a copy of the License at
  58. *
  59. * http://www.apache.org/licenses/LICENSE-2.0
  60. *
  61. * Unless required by applicable law or agreed to in writing, software
  62. * distributed under the License is distributed on an "AS IS" BASIS,
  63. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  64. * See the License for the specific language governing permissions and
  65. * limitations under the License.
  66. *
  67. * settings.js
  68. * Global settings
  69. */
  70. /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
  71. /** @typedef {"raf" | "asap"} GPUPollingMode */
  72. /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
  73. /** @type {GPUPollingMode} Default GPU polling mode */
  74. const DEFAULT_GPU_POLLING_MODE = 'raf';
  75. /** @type {GPUPollingMode} GPU polling mode */
  76. let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
  77. /** @type {LoggingMode} logging mode */
  78. let loggingMode = 'default';
  79. /**
  80. * Global settings
  81. */
  82. class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .R
  83. {
  84. /**
  85. * Power preference of the WebGL context
  86. * @returns {PowerPreference}
  87. */
  88. static get powerPreference()
  89. {
  90. return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL.powerPreference */ .$.powerPreference;
  91. }
  92. /**
  93. * Power preference of the WebGL context
  94. * @param {PowerPreference} value
  95. */
  96. static set powerPreference(value)
  97. {
  98. _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL.powerPreference */ .$.powerPreference = value;
  99. }
  100. /**
  101. * GPU polling mode
  102. * @returns {GPUPollingMode}
  103. */
  104. static get gpuPollingMode()
  105. {
  106. return gpuPollingMode;
  107. }
  108. /**
  109. * GPU polling mode
  110. * @param {GPUPollingMode} value
  111. */
  112. static set gpuPollingMode(value)
  113. {
  114. if(value !== 'raf' && value !== 'asap')
  115. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .mG(`Invalid GPU polling mode: "${value}"`);
  116. gpuPollingMode = value;
  117. }
  118. /**
  119. * Logging mode
  120. * @returns {LoggingMode}
  121. */
  122. static get logging()
  123. {
  124. return loggingMode;
  125. }
  126. /**
  127. * Logging mode
  128. * @param {LoggingMode} mode
  129. */
  130. static set logging(mode)
  131. {
  132. if(mode !== 'default' && mode !== 'none' && mode !== 'diagnostic')
  133. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .mG(`Invalid logging mode: "${mode}"`);
  134. else if(mode === 'diagnostic')
  135. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils.log */ .c.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
  136. loggingMode = mode;
  137. }
  138. }
  139. /***/ }),
  140. /***/ 5137:
  141. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_4472__) => {
  142. "use strict";
  143. /* harmony export */ __nested_webpack_require_4472__.d(__webpack_exports__, {
  144. /* harmony export */ "N": () => (/* binding */ SpeedyMatrixExpr)
  145. /* harmony export */ });
  146. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4472__(4368);
  147. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4472__(5484);
  148. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4472__(3841);
  149. /*
  150. * speedy-vision.js
  151. * GPU-accelerated Computer Vision for JavaScript
  152. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  153. *
  154. * Licensed under the Apache License, Version 2.0 (the "License");
  155. * you may not use this file except in compliance with the License.
  156. * You may obtain a copy of the License at
  157. *
  158. * http://www.apache.org/licenses/LICENSE-2.0
  159. *
  160. * Unless required by applicable law or agreed to in writing, software
  161. * distributed under the License is distributed on an "AS IS" BASIS,
  162. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  163. * See the License for the specific language governing permissions and
  164. * limitations under the License.
  165. *
  166. * speedy-matrix-expr.js
  167. * Symbolic matrix expressions
  168. */
  169. /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
  170. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
  171. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
  172. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  173. /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
  174. /** @const {Dtype2BufferType} */
  175. const DTYPE_TO_BUFFER_TYPE = Object.freeze({
  176. 'float32': Float32Array
  177. });
  178. /**
  179. * @abstract Matrix expression
  180. * It's an opaque object representing an algebraic
  181. * expression. It has no data attached to it.
  182. */
  183. class SpeedyMatrixExpr
  184. {
  185. /**
  186. * Constructor
  187. * @param {number} rows
  188. * @param {number} columns
  189. * @param {SpeedyMatrixDtype} dtype
  190. */
  191. constructor(rows, columns, dtype)
  192. {
  193. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(rows > 0 && columns > 0);
  194. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
  195. /** @type {number} number of rows */
  196. this._rows = rows | 0;
  197. /** @type {number} number of columns */
  198. this._columns = columns | 0;
  199. /** @type {SpeedyMatrixDtype} data type */
  200. this._dtype = dtype;
  201. }
  202. /**
  203. * Number of rows
  204. * @returns {number}
  205. */
  206. get rows()
  207. {
  208. return this._rows;
  209. }
  210. /**
  211. * Number of columns
  212. * @returns {number}
  213. */
  214. get columns()
  215. {
  216. return this._columns;
  217. }
  218. /**
  219. * Data type
  220. * @returns {SpeedyMatrixDtype}
  221. */
  222. get dtype()
  223. {
  224. return this._dtype;
  225. }
  226. /**
  227. * Default data type
  228. * @returns {SpeedyMatrixDtype}
  229. */
  230. static get DEFAULT_DTYPE()
  231. {
  232. return 'float32';
  233. }
  234. /**
  235. * Buffer types
  236. * @returns {Dtype2BufferType}
  237. */
  238. static get BUFFER_TYPE()
  239. {
  240. return DTYPE_TO_BUFFER_TYPE;
  241. }
  242. /**
  243. * Matrix addition
  244. * @param {SpeedyMatrixExpr} expr
  245. * @returns {SpeedyMatrixExpr}
  246. */
  247. plus(expr)
  248. {
  249. return new SpeedyMatrixAddExpr(this, expr);
  250. }
  251. /**
  252. * Matrix subtraction
  253. * @param {SpeedyMatrixExpr} expr
  254. * @returns {SpeedyMatrixExpr}
  255. */
  256. minus(expr)
  257. {
  258. return new SpeedyMatrixSubtractExpr(this, expr);
  259. }
  260. /**
  261. * Matrix multiplication
  262. * @param {SpeedyMatrixExpr|number} expr
  263. * @returns {SpeedyMatrixExpr}
  264. */
  265. times(expr)
  266. {
  267. if(typeof expr === 'number')
  268. return new SpeedyMatrixScaleExpr(this, expr);
  269. else
  270. return new SpeedyMatrixMultiplyExpr(this, expr);
  271. }
  272. /**
  273. * Matrix transposition
  274. * @returns {SpeedyMatrixExpr}
  275. */
  276. transpose()
  277. {
  278. return new SpeedyMatrixTransposeExpr(this);
  279. }
  280. /**
  281. * Matrix inversion
  282. * @returns {SpeedyMatrixExpr}
  283. */
  284. inverse()
  285. {
  286. return new SpeedyMatrixInvertExpr(this);
  287. }
  288. /**
  289. * Component-wise multiplication
  290. * @param {SpeedyMatrixExpr} expr
  291. * @returns {SpeedyMatrixExpr}
  292. */
  293. compMult(expr)
  294. {
  295. return new SpeedyMatrixCompMultExpr(this, expr);
  296. }
  297. /**
  298. * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
  299. * @param {SpeedyMatrixExpr} expr
  300. * @returns {SpeedyMatrixExpr}
  301. */
  302. ldiv(expr)
  303. {
  304. return new SpeedyMatrixLdivExpr(this, expr);
  305. }
  306. /**
  307. * Returns a human-readable string representation of the matrix expression
  308. * @returns {string}
  309. */
  310. toString()
  311. {
  312. return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
  313. }
  314. /**
  315. * Evaluate this expression
  316. * @abstract
  317. * @param {WebAssembly.Instance} wasm
  318. * @param {SpeedyMatrixWASMMemory} memory
  319. * @returns {SpeedyMatrix}
  320. */
  321. _evaluate(wasm, memory)
  322. {
  323. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .Mi();
  324. }
  325. }
  326. const { SpeedyMatrix } = __nested_webpack_require_4472__(8007);
  327. /**
  328. * @abstract operation storing a temporary matrix
  329. */
  330. class SpeedyMatrixTempExpr extends SpeedyMatrixExpr
  331. {
  332. /**
  333. * Constructor
  334. * @param {number} rows
  335. * @param {number} columns
  336. * @param {SpeedyMatrixDtype} dtype
  337. */
  338. constructor(rows, columns, dtype)
  339. {
  340. super(rows, columns, dtype);
  341. /** @type {SpeedyMatrix} holds the results of a computation */
  342. this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
  343. }
  344. }
  345. /**
  346. * @abstract unary operation
  347. */
  348. class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr
  349. {
  350. /**
  351. * Constructor
  352. * @param {number} rows rows of the output matrix
  353. * @param {number} columns columns of the output matrix
  354. * @param {SpeedyMatrixExpr} operand
  355. */
  356. constructor(rows, columns, operand)
  357. {
  358. super(rows, columns, operand.dtype);
  359. /** @type {SpeedyMatrixExpr} operand */
  360. this._operand = operand;
  361. }
  362. /**
  363. * Evaluate this expression
  364. * @param {WebAssembly.Instance} wasm
  365. * @param {SpeedyMatrixWASMMemory} memory
  366. * @returns {SpeedyMatrix}
  367. */
  368. _evaluate(wasm, memory)
  369. {
  370. const operand = this._operand._evaluate(wasm, memory);
  371. const result = this._tempMatrix;
  372. // allocate matrices
  373. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.allocateMat32 */ .r.allocateMat32(wasm, memory, result);
  374. const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.allocateMat32 */ .r.allocateMat32(wasm, memory, operand);
  375. // copy operand to WASM memory
  376. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.copyToMat32 */ .r.copyToMat32(wasm, memory, operandptr, operand);
  377. // run the WASM routine
  378. this._compute(wasm, memory, resultptr, operandptr);
  379. // copy result from WASM memory
  380. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.copyFromMat32 */ .r.copyFromMat32(wasm, memory, resultptr, result);
  381. // deallocate matrices
  382. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.deallocateMat32 */ .r.deallocateMat32(wasm, memory, operandptr);
  383. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.deallocateMat32 */ .r.deallocateMat32(wasm, memory, resultptr);
  384. // done!
  385. return result;
  386. }
  387. /**
  388. * Compute the result of this operation
  389. * @abstract
  390. * @param {WebAssembly.Instance} wasm
  391. * @param {SpeedyMatrixWASMMemory} memory
  392. * @param {number} resultptr pointer to Mat32
  393. * @param {number} operandptr pointer to Mat32
  394. */
  395. _compute(wasm, memory, resultptr, operandptr)
  396. {
  397. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .Mi();
  398. }
  399. }
  400. /**
  401. * @abstract binary operation
  402. */
  403. class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr
  404. {
  405. /**
  406. * Constructor
  407. * @param {number} rows rows of the output matrix
  408. * @param {number} columns columns of the output matrix
  409. * @param {SpeedyMatrixExpr} left left operand
  410. * @param {SpeedyMatrixExpr} right right operand
  411. */
  412. constructor(rows, columns, left, right)
  413. {
  414. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(left.dtype === right.dtype);
  415. super(rows, columns, left.dtype);
  416. /** @type {SpeedyMatrixExpr} left operand */
  417. this._left = left;
  418. /** @type {SpeedyMatrixExpr} right operand */
  419. this._right = right;
  420. }
  421. /**
  422. * Evaluate this expression
  423. * @param {WebAssembly.Instance} wasm
  424. * @param {SpeedyMatrixWASMMemory} memory
  425. * @returns {SpeedyMatrix}
  426. */
  427. _evaluate(wasm, memory)
  428. {
  429. const left = this._left._evaluate(wasm, memory);
  430. const right = this._right._evaluate(wasm, memory);
  431. const result = this._tempMatrix;
  432. // allocate matrices
  433. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.allocateMat32 */ .r.allocateMat32(wasm, memory, result);
  434. const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.allocateMat32 */ .r.allocateMat32(wasm, memory, left);
  435. const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.allocateMat32 */ .r.allocateMat32(wasm, memory, right);
  436. // copy input matrices to WASM memory
  437. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.copyToMat32 */ .r.copyToMat32(wasm, memory, leftptr, left);
  438. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.copyToMat32 */ .r.copyToMat32(wasm, memory, rightptr, right);
  439. // run the WASM routine
  440. this._compute(wasm, memory, resultptr, leftptr, rightptr);
  441. // copy output matrix from WASM memory
  442. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.copyFromMat32 */ .r.copyFromMat32(wasm, memory, resultptr, result);
  443. // deallocate matrices
  444. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.deallocateMat32 */ .r.deallocateMat32(wasm, memory, rightptr);
  445. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.deallocateMat32 */ .r.deallocateMat32(wasm, memory, leftptr);
  446. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM.deallocateMat32 */ .r.deallocateMat32(wasm, memory, resultptr);
  447. // done!
  448. return result;
  449. }
  450. /**
  451. * Compute the result of this operation
  452. * @abstract
  453. * @param {WebAssembly.Instance} wasm
  454. * @param {SpeedyMatrixWASMMemory} memory
  455. * @param {number} resultptr pointer to Mat32
  456. * @param {number} leftptr pointer to Mat32
  457. * @param {number} rightptr pointer to Mat32
  458. */
  459. _compute(wasm, memory, resultptr, leftptr, rightptr)
  460. {
  461. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .Mi();
  462. }
  463. }
  464. /**
  465. * Transpose matrix
  466. */
  467. class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr
  468. {
  469. /**
  470. * Constructor
  471. * @param {SpeedyMatrixExpr} operand
  472. */
  473. constructor(operand)
  474. {
  475. super(operand.columns, operand.rows, operand);
  476. }
  477. /**
  478. * Compute result = operand^T
  479. * @param {WebAssembly.Instance} wasm
  480. * @param {SpeedyMatrixWASMMemory} memory
  481. * @param {number} resultptr pointer to Mat32
  482. * @param {number} operandptr pointer to Mat32
  483. */
  484. _compute(wasm, memory, resultptr, operandptr)
  485. {
  486. wasm.exports.Mat32_transpose(resultptr, operandptr);
  487. }
  488. }
  489. /**
  490. * Invert square matrix
  491. */
  492. class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr
  493. {
  494. /**
  495. * Constructor
  496. * @param {SpeedyMatrixExpr} operand
  497. */
  498. constructor(operand)
  499. {
  500. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(operand.rows === operand.columns);
  501. super(operand.rows, operand.columns, operand);
  502. /** @type {number} size of the matrix */
  503. this._size = operand.rows;
  504. }
  505. /**
  506. * Compute result = operand ^ (-1)
  507. * @param {WebAssembly.Instance} wasm
  508. * @param {SpeedyMatrixWASMMemory} memory
  509. * @param {number} resultptr pointer to Mat32
  510. * @param {number} operandptr pointer to Mat32
  511. */
  512. _compute(wasm, memory, resultptr, operandptr)
  513. {
  514. switch(this._size) {
  515. case 0: break;
  516. case 1:
  517. wasm.exports.Mat32_inverse1(resultptr, operandptr);
  518. break;
  519. case 2:
  520. wasm.exports.Mat32_inverse2(resultptr, operandptr);
  521. break;
  522. case 3:
  523. wasm.exports.Mat32_inverse3(resultptr, operandptr);
  524. break;
  525. default:
  526. wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
  527. break;
  528. }
  529. }
  530. }
  531. /**
  532. * Multiply matrix by a scalar value
  533. */
  534. class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr
  535. {
  536. /**
  537. * Constructor
  538. * @param {SpeedyMatrixExpr} operand
  539. * @param {number} scalar
  540. */
  541. constructor(operand, scalar)
  542. {
  543. super(operand.rows, operand.columns, operand);
  544. /** @type {number} scalar value */
  545. this._scalar = +scalar;
  546. }
  547. /**
  548. * Compute result = scalar * operand
  549. * @param {WebAssembly.Instance} wasm
  550. * @param {SpeedyMatrixWASMMemory} memory
  551. * @param {number} resultptr pointer to Mat32
  552. * @param {number} operandptr pointer to Mat32
  553. */
  554. _compute(wasm, memory, resultptr, operandptr)
  555. {
  556. wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
  557. }
  558. }
  559. /**
  560. * Matrix addition
  561. */
  562. class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr
  563. {
  564. /**
  565. * Constructor
  566. * @param {SpeedyMatrixExpr} left left operand
  567. * @param {SpeedyMatrixExpr} right right operand
  568. */
  569. constructor(left, right)
  570. {
  571. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(left.rows === right.rows && left.columns === right.columns);
  572. super(left.rows, left.columns, left, right);
  573. }
  574. /**
  575. * Compute result = left + right
  576. * @param {WebAssembly.Instance} wasm
  577. * @param {SpeedyMatrixWASMMemory} memory
  578. * @param {number} resultptr pointer to Mat32
  579. * @param {number} leftptr pointer to Mat32
  580. * @param {number} rightptr pointer to Mat32
  581. */
  582. _compute(wasm, memory, resultptr, leftptr, rightptr)
  583. {
  584. wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
  585. }
  586. }
  587. /**
  588. * Matrix subtraction
  589. */
  590. class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr
  591. {
  592. /**
  593. * Constructor
  594. * @param {SpeedyMatrixExpr} left left operand
  595. * @param {SpeedyMatrixExpr} right right operand
  596. */
  597. constructor(left, right)
  598. {
  599. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(left.rows === right.rows && left.columns === right.columns);
  600. super(left.rows, left.columns, left, right);
  601. }
  602. /**
  603. * Compute result = left - right
  604. * @param {WebAssembly.Instance} wasm
  605. * @param {SpeedyMatrixWASMMemory} memory
  606. * @param {number} resultptr pointer to Mat32
  607. * @param {number} leftptr pointer to Mat32
  608. * @param {number} rightptr pointer to Mat32
  609. */
  610. _compute(wasm, memory, resultptr, leftptr, rightptr)
  611. {
  612. wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
  613. }
  614. }
  615. /**
  616. * Matrix multiplication
  617. */
  618. class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr
  619. {
  620. /**
  621. * Constructor
  622. * @param {SpeedyMatrixExpr} left left operand
  623. * @param {SpeedyMatrixExpr} right right operand
  624. */
  625. constructor(left, right)
  626. {
  627. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(left.columns === right.rows);
  628. super(left.rows, right.columns, left, right);
  629. }
  630. /**
  631. * Compute result = left * right
  632. * @param {WebAssembly.Instance} wasm
  633. * @param {SpeedyMatrixWASMMemory} memory
  634. * @param {number} resultptr pointer to Mat32
  635. * @param {number} leftptr pointer to Mat32
  636. * @param {number} rightptr pointer to Mat32
  637. */
  638. _compute(wasm, memory, resultptr, leftptr, rightptr)
  639. {
  640. wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
  641. }
  642. }
  643. /**
  644. * Component-wise multiplication
  645. */
  646. class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr
  647. {
  648. /**
  649. * Constructor
  650. * @param {SpeedyMatrixExpr} left left operand
  651. * @param {SpeedyMatrixExpr} right right operand
  652. */
  653. constructor(left, right)
  654. {
  655. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(left.rows === right.rows && left.columns === right.columns);
  656. super(right.rows, right.columns, left, right);
  657. }
  658. /**
  659. * Compute result = left <compMult> right
  660. * @param {WebAssembly.Instance} wasm
  661. * @param {SpeedyMatrixWASMMemory} memory
  662. * @param {number} resultptr pointer to Mat32
  663. * @param {number} leftptr pointer to Mat32
  664. * @param {number} rightptr pointer to Mat32
  665. */
  666. _compute(wasm, memory, resultptr, leftptr, rightptr)
  667. {
  668. wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
  669. }
  670. }
  671. /**
  672. * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
  673. */
  674. class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr
  675. {
  676. /**
  677. * Constructor
  678. * @param {SpeedyMatrixExpr} left left operand
  679. * @param {SpeedyMatrixExpr} right right operand
  680. */
  681. constructor(left, right)
  682. {
  683. const m = left.rows, n = left.columns;
  684. // TODO right doesn't need to be a column vector
  685. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.assert */ .c.assert(m >= n && right.rows === m && right.columns === 1);
  686. super(n, 1, left, right);
  687. }
  688. /**
  689. * Compute result = left \ right
  690. * @param {WebAssembly.Instance} wasm
  691. * @param {SpeedyMatrixWASMMemory} memory
  692. * @param {number} resultptr pointer to Mat32
  693. * @param {number} leftptr pointer to Mat32
  694. * @param {number} rightptr pointer to Mat32
  695. */
  696. _compute(wasm, memory, resultptr, leftptr, rightptr)
  697. {
  698. wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
  699. }
  700. }
  701. /***/ }),
  702. /***/ 4368:
  703. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_23353__) => {
  704. "use strict";
  705. /* harmony export */ __nested_webpack_require_23353__.d(__webpack_exports__, {
  706. /* harmony export */ "r": () => (/* binding */ SpeedyMatrixWASM)
  707. /* harmony export */ });
  708. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_23353__(4500);
  709. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_23353__(3841);
  710. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_23353__(5484);
  711. /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_23353__(3020);
  712. /*
  713. * speedy-vision.js
  714. * GPU-accelerated Computer Vision for JavaScript
  715. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  716. *
  717. * Licensed under the Apache License, Version 2.0 (the "License");
  718. * you may not use this file except in compliance with the License.
  719. * You may obtain a copy of the License at
  720. *
  721. * http://www.apache.org/licenses/LICENSE-2.0
  722. *
  723. * Unless required by applicable law or agreed to in writing, software
  724. * distributed under the License is distributed on an "AS IS" BASIS,
  725. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  726. * See the License for the specific language governing permissions and
  727. * limitations under the License.
  728. *
  729. * speedy-matrix-wasm.js
  730. * WebAssembly bridge
  731. */
  732. /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  733. /**
  734. * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
  735. * @property {object} as
  736. * @property {WebAssembly.Memory} as.object
  737. * @property {Uint8Array} as.uint8
  738. * @property {Int32Array} as.int32
  739. * @property {Uint32Array} as.uint32
  740. * @property {Float32Array} as.float32
  741. * @property {Float64Array} as.float64
  742. */
  743. /**
  744. * @typedef {object} SpeedyMatrixWASMHandle
  745. * @property {WebAssembly.Instance} wasm
  746. * @property {SpeedyMatrixWASMMemory} memory
  747. * @property {WebAssembly.Module} module
  748. */
  749. /** @type {Uint8Array} WebAssembly binary */
  750. const WASM_BINARY = __nested_webpack_require_23353__(4209);
  751. /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
  752. let _instance = null;
  753. /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
  754. let _module = null;
  755. /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
  756. const _memory = (mem => ({
  757. as: {
  758. object: mem,
  759. uint8: new Uint8Array(mem.buffer),
  760. int32: new Int32Array(mem.buffer),
  761. uint32: new Uint32Array(mem.buffer),
  762. float32: new Float32Array(mem.buffer),
  763. float64: new Float64Array(mem.buffer),
  764. },
  765. }))(new WebAssembly.Memory({
  766. initial: 16, // 1 MB
  767. maximum: 256
  768. }));
  769. /**
  770. * WebAssembly utilities
  771. */
  772. class SpeedyMatrixWASM
  773. {
  774. /**
  775. * Gets you the WASM instance, augmented memory & module
  776. * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
  777. */
  778. static ready()
  779. {
  780. return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .s((resolve, reject) => {
  781. SpeedyMatrixWASM._ready(resolve, reject);
  782. });
  783. }
  784. /**
  785. * Synchronously gets you the WASM instance, augmented memory & module
  786. * @returns {SpeedyMatrixWASMHandle}
  787. */
  788. static get handle()
  789. {
  790. if(!_instance || !_module)
  791. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .IT(`Can't get WASM handle: routines not yet loaded`);
  792. return {
  793. wasm: _instance,
  794. memory: _memory,
  795. module: _module,
  796. };
  797. }
  798. /**
  799. * Gets you the WASM imports bound to a memory object
  800. * @param {SpeedyMatrixWASMMemory} memory
  801. * @returns {Object<string,Function>}
  802. */
  803. static imports(memory)
  804. {
  805. const obj = new SpeedyMatrixWASMImports(memory);
  806. return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype)
  807. .filter(property => typeof obj[property] === 'function' && property !== 'constructor')
  808. .reduce(
  809. (imports, methodName) => ((imports[methodName] = obj[methodName]), imports),
  810. Object.create(null)
  811. );
  812. }
  813. /**
  814. * Allocate a Mat32 in WebAssembly memory without copying any data
  815. * @param {WebAssembly.Instance} wasm
  816. * @param {SpeedyMatrixWASMMemory} memory
  817. * @param {SpeedyMatrix} matrix
  818. * @returns {number} pointer to the new Mat32
  819. */
  820. static allocateMat32(wasm, memory, matrix)
  821. {
  822. const dataptr = wasm.exports.malloc(matrix.data.byteLength);
  823. const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
  824. return matptr;
  825. }
  826. /**
  827. * Deallocate a Mat32 in WebAssembly
  828. * @param {WebAssembly.Instance} wasm
  829. * @param {SpeedyMatrixWASMMemory} memory
  830. * @param {number} matptr pointer to the allocated Mat32
  831. * @returns {number} NULL
  832. */
  833. static deallocateMat32(wasm, memory, matptr)
  834. {
  835. const dataptr = wasm.exports.Mat32_data(matptr);
  836. wasm.exports.free(matptr);
  837. wasm.exports.free(dataptr);
  838. return 0;
  839. }
  840. /**
  841. * Copy the data of a matrix to a WebAssembly Mat32
  842. * @param {WebAssembly.Instance} wasm
  843. * @param {SpeedyMatrixWASMMemory} memory
  844. * @param {number} matptr pointer to a Mat32
  845. * @param {SpeedyMatrix} matrix
  846. * @returns {number} matptr
  847. */
  848. static copyToMat32(wasm, memory, matptr, matrix)
  849. {
  850. // We assume the following:
  851. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  852. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  853. // 3. the data type is float32
  854. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils.assert */ .c.assert(
  855. //matrix.dtype === 'float32' &&
  856. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr)
  857. );
  858. const dataptr = wasm.exports.Mat32_data(matptr);
  859. memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
  860. return matptr;
  861. }
  862. /**
  863. * Copy the data of a WebAssembly Mat32 to a matrix
  864. * @param {WebAssembly.Instance} wasm
  865. * @param {SpeedyMatrixWASMMemory} memory
  866. * @param {number} matptr pointer to a Mat32
  867. * @param {SpeedyMatrix} matrix
  868. * @returns {number} matptr
  869. */
  870. static copyFromMat32(wasm, memory, matptr, matrix)
  871. {
  872. // We assume the following:
  873. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  874. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  875. // 3. the data type is float32
  876. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils.assert */ .c.assert(
  877. //matrix.dtype === 'float32' &&
  878. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr)
  879. );
  880. const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
  881. for(let offset = matrix.data.length - 1; offset >= 0; offset--)
  882. matrix.data[offset] = memory.as.float32[base + offset];
  883. return matptr;
  884. }
  885. /**
  886. * Polls the WebAssembly instance until it's ready
  887. * @param {function(SpeedyMatrixWASMHandle): void} resolve
  888. * @param {function(Error): void} reject
  889. * @param {number} [counter]
  890. */
  891. static _ready(resolve, reject, counter = 1000)
  892. {
  893. if(_instance !== null && _module !== null)
  894. resolve({ wasm: _instance, memory: _memory, module: _module });
  895. else if(counter <= 0)
  896. reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .W5(`Can't load WASM routines`));
  897. else
  898. setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
  899. }
  900. }
  901. /**
  902. * Methods called from WASM
  903. */
  904. class SpeedyMatrixWASMImports
  905. {
  906. /**
  907. * Constructor
  908. * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
  909. */
  910. constructor(memory)
  911. {
  912. // find all methods of this object
  913. const methodNames = Object.getOwnPropertyNames(this.constructor.prototype)
  914. .filter(property => typeof this[property] === 'function')
  915. .filter(property => property !== 'constructor');
  916. // bind all methods to this object
  917. methodNames.forEach(methodName => {
  918. this[methodName] = this[methodName].bind(this);
  919. });
  920. /** @type {SpeedyMatrixWASMMemory} WASM memory */
  921. this.memory = memory;
  922. /** @type {CStringUtils} utilities related to C strings */
  923. this.cstring = new CStringUtils(memory);
  924. // done!
  925. return Object.freeze(this);
  926. }
  927. /**
  928. * Prints a message
  929. * @param {number} ptr pointer to char
  930. */
  931. print(ptr)
  932. {
  933. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils.log */ .c.log(this.cstring.get(ptr));
  934. }
  935. /**
  936. * Throws an error
  937. * @param {number} ptr pointer to char
  938. */
  939. fatal(ptr)
  940. {
  941. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .IT(this.cstring.get(ptr));
  942. }
  943. /**
  944. * Fills a memory segment with a byte
  945. * @param {number} value byte
  946. * @param {number} start memory address, inclusive
  947. * @param {number} end memory address greater than start, exclusive
  948. */
  949. bytefill(value, start, end)
  950. {
  951. this.memory.as.uint8.fill(value, start, end);
  952. }
  953. /**
  954. * Copy a memory segment to another segment
  955. * @param {number} target memory address, where we'll start writing
  956. * @param {number} start memory address, where we'll start copying (inclusive)
  957. * @param {number} end memory address, where we'll end the copy (exclusive)
  958. */
  959. copyWithin(target, start, end)
  960. {
  961. this.memory.as.uint8.copyWithin(target, start, end);
  962. }
  963. }
  964. /**
  965. * Utilities related to C strings
  966. */
  967. class CStringUtils
  968. {
  969. /**
  970. * Constructor
  971. * @param {SpeedyMatrixWASMMemory} memory
  972. */
  973. constructor(memory)
  974. {
  975. /** @type {TextDecoder} */
  976. this._decoder = new TextDecoder('utf-8');
  977. /** @type {SpeedyMatrixWASMMemory} */
  978. this._memory = memory;
  979. }
  980. /**
  981. * Convert a C string to a JavaScript string
  982. * @param {number} ptr pointer to char
  983. * @returns {string}
  984. */
  985. get(ptr)
  986. {
  987. const byte = this._memory.as.uint8;
  988. const size = this._memory.as.uint8.byteLength;
  989. let p = ptr;
  990. while(p < size && 0 !== byte[p])
  991. ++p;
  992. return this._decoder.decode(byte.subarray(ptr, p));
  993. }
  994. }
  995. /**
  996. * WebAssembly loader
  997. * @param {SpeedyMatrixWASMMemory} memory
  998. */
  999. (function loadWASM(memory) {
  1000. const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
  1001. // Endianness check
  1002. if(!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN)
  1003. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .B8(`Can't run WebAssembly code: not in a little-endian machine!`);
  1004. // Load the WASM binary
  1005. _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise.resolve */ .s.resolve(WASM_BINARY)
  1006. .then(data => base64decode(data))
  1007. .then(bytes => WebAssembly.instantiate(bytes, {
  1008. env: {
  1009. memory: memory.as.object,
  1010. ...SpeedyMatrixWASM.imports(memory),
  1011. }
  1012. }))
  1013. .then(wasm => {
  1014. _instance = wasm.instance;
  1015. _module = wasm.module;
  1016. wasm.instance.exports.srand((Date.now() * 0.001) & 0xffffffff); // srand(time(NULL))
  1017. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils.log */ .c.log(`The WebAssembly routines have been loaded!`);
  1018. })
  1019. .catch(err => {
  1020. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .IT(`Can't load the WebAssembly routines: ${err}`, err);
  1021. });
  1022. })(_memory);
  1023. /***/ }),
  1024. /***/ 8007:
  1025. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_35355__) => {
  1026. "use strict";
  1027. __nested_webpack_require_35355__.r(__webpack_exports__);
  1028. /* harmony export */ __nested_webpack_require_35355__.d(__webpack_exports__, {
  1029. /* harmony export */ "SpeedyMatrix": () => (/* binding */ SpeedyMatrix)
  1030. /* harmony export */ });
  1031. /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_35355__(5137);
  1032. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_35355__(4368);
  1033. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_35355__(4500);
  1034. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_35355__(5484);
  1035. /*
  1036. * speedy-vision.js
  1037. * GPU-accelerated Computer Vision for JavaScript
  1038. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  1039. *
  1040. * Licensed under the Apache License, Version 2.0 (the "License");
  1041. * you may not use this file except in compliance with the License.
  1042. * You may obtain a copy of the License at
  1043. *
  1044. * http://www.apache.org/licenses/LICENSE-2.0
  1045. *
  1046. * Unless required by applicable law or agreed to in writing, software
  1047. * distributed under the License is distributed on an "AS IS" BASIS,
  1048. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1049. * See the License for the specific language governing permissions and
  1050. * limitations under the License.
  1051. *
  1052. * speedy-matrix.js
  1053. * Matrix class
  1054. */
  1055. /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
  1056. /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
  1057. /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
  1058. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  1059. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
  1060. /**
  1061. * Matrix class
  1062. */
  1063. class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .N
  1064. {
  1065. /**
  1066. * @private
  1067. *
  1068. * Low-level constructor
  1069. * @param {number} rows number of rows
  1070. * @param {number} columns number of columns
  1071. * @param {number} step0 step size between two consecutive elements (e.g., 1)
  1072. * @param {number} step1 step size between two consecutive columns (e.g., rows)
  1073. * @param {SpeedyMatrixBufferType} data entries in column-major format
  1074. */
  1075. constructor(rows, columns, step0, step1, data)
  1076. {
  1077. super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.DEFAULT_DTYPE */ .N.DEFAULT_DTYPE);
  1078. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE[this.dtype]);
  1079. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(step0 > 0 && step1 >= step0);
  1080. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(
  1081. data.length + rows * columns === 0 || // empty matrix and empty buffer, or
  1082. data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
  1083. );
  1084. /** @type {number} step size between two consecutive elements */
  1085. this._step0 = step0 | 0;
  1086. /** @type {number} step size between two consecutive columns */
  1087. this._step1 = step1 | 0;
  1088. /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
  1089. this._data = data;
  1090. }
  1091. /**
  1092. * Create a new matrix with the specified size and entries
  1093. * @param {number} rows number of rows
  1094. * @param {number} columns number of columns
  1095. * @param {number[]} entries in column-major format
  1096. * @param {SpeedyMatrixDtype} [dtype] data type
  1097. * @returns {SpeedyMatrix}
  1098. */
  1099. static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.DEFAULT_DTYPE */ .N.DEFAULT_DTYPE)
  1100. {
  1101. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1102. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
  1103. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1104. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE[dtype], [entries]));
  1105. }
  1106. /**
  1107. * Create a new matrix filled with zeros with the specified size
  1108. * @param {number} rows number of rows
  1109. * @param {number} [columns] number of columns
  1110. * @param {SpeedyMatrixDtype} [dtype] data type
  1111. * @returns {SpeedyMatrix}
  1112. */
  1113. static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.DEFAULT_DTYPE */ .N.DEFAULT_DTYPE)
  1114. {
  1115. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1116. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1117. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE[dtype], [rows * columns]));
  1118. }
  1119. /**
  1120. * Create a new matrix filled with ones with the specified size
  1121. * @param {number} rows number of rows
  1122. * @param {number} [columns] number of columns
  1123. * @param {SpeedyMatrixDtype} [dtype] data type
  1124. * @returns {SpeedyMatrix}
  1125. */
  1126. static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.DEFAULT_DTYPE */ .N.DEFAULT_DTYPE)
  1127. {
  1128. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1129. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1130. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
  1131. }
  1132. /**
  1133. * Create a new identity matrix with the specified size
  1134. * @param {number} rows number of rows
  1135. * @param {number} [columns] number of columns
  1136. * @param {SpeedyMatrixDtype} [dtype] data type
  1137. * @returns {SpeedyMatrix}
  1138. */
  1139. static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.DEFAULT_DTYPE */ .N.DEFAULT_DTYPE)
  1140. {
  1141. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1142. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1143. const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr.BUFFER_TYPE */ .N.BUFFER_TYPE[dtype], [rows * columns]);
  1144. for(let j = Math.min(rows, columns) - 1; j >= 0; j--)
  1145. data[j * rows + j] = 1;
  1146. return new SpeedyMatrix(rows, columns, 1, rows, data);
  1147. }
  1148. /**
  1149. * Evaluate an expression synchronously and store the result in a new matrix
  1150. * @param {SpeedyMatrixExpr} expr matrix expression
  1151. * @returns {SpeedyMatrix}
  1152. */
  1153. static From(expr)
  1154. {
  1155. return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
  1156. }
  1157. /**
  1158. * Returns a promise that resolves immediately if the WebAssembly routines
  1159. * are ready to be used, or as soon as they do become ready
  1160. * @returns {SpeedyPromise<void>}
  1161. */
  1162. static ready()
  1163. {
  1164. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM.ready */ .r.ready().then(_ => void(0));
  1165. }
  1166. /**
  1167. * Get the underlying buffer
  1168. * @returns {SpeedyMatrixBufferType}
  1169. */
  1170. get data()
  1171. {
  1172. return this._data;
  1173. }
  1174. /**
  1175. * Row-step
  1176. * @returns {number} defaults to 1
  1177. */
  1178. get step0()
  1179. {
  1180. return this._step0;
  1181. }
  1182. /**
  1183. * Column-step
  1184. * @returns {number} defaults to this.rows
  1185. */
  1186. get step1()
  1187. {
  1188. return this._step1;
  1189. }
  1190. /**
  1191. * Extract a block from this matrix. Use a shared underlying buffer
  1192. * @param {number} firstRow
  1193. * @param {number} lastRow
  1194. * @param {number} firstColumn
  1195. * @param {number} lastColumn
  1196. * @returns {SpeedyMatrix}
  1197. */
  1198. block(firstRow, lastRow, firstColumn, lastColumn)
  1199. {
  1200. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(
  1201. firstRow <= lastRow && firstColumn <= lastColumn,
  1202. `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`
  1203. );
  1204. // ensure that the indices are within bounds
  1205. firstRow = Math.max(firstRow, 0);
  1206. lastRow = Math.min(lastRow, this._rows - 1);
  1207. firstColumn = Math.max(firstColumn, 0);
  1208. lastColumn = Math.min(lastColumn, this._columns - 1);
  1209. // compute the dimensions of the new submatrix
  1210. const rows = lastRow - firstRow + 1;
  1211. const columns = lastColumn - firstColumn + 1;
  1212. // obtain the relevant portion of the data
  1213. const step0 = this._step0, step1 = this._step1;
  1214. const begin = firstRow * step0 + firstColumn * step1; // inclusive
  1215. const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
  1216. // create new matrix
  1217. return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
  1218. }
  1219. /**
  1220. * Extract a row from this matrix
  1221. * @param {number} index 0-based
  1222. * @returns {SpeedyMatrix}
  1223. */
  1224. row(index)
  1225. {
  1226. return this.block(index, index, 0, this._columns - 1);
  1227. }
  1228. /**
  1229. * Extract a column from this matrix
  1230. * @param {number} index 0-based
  1231. * @returns {SpeedyMatrix}
  1232. */
  1233. column(index)
  1234. {
  1235. return this.block(0, this._rows - 1, index, index);
  1236. }
  1237. /**
  1238. * Extract the main diagonal from this matrix
  1239. * @returns {SpeedyMatrix} as a column-vector
  1240. */
  1241. diagonal()
  1242. {
  1243. const diagsize = Math.min(this._rows, this._columns);
  1244. // compute the dimensions of the new submatrix
  1245. const rows = diagsize; // make it a column vector
  1246. const columns = 1;
  1247. // obtain the relevant portion of the data
  1248. const diagstep = this._step0 + this._step1; // jump a row and a column
  1249. const begin = 0; // inclusive
  1250. const end = 1 + (diagsize - 1) * diagstep; // exclusive
  1251. // create new matrix
  1252. return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
  1253. }
  1254. /**
  1255. * Read a single entry of this matrix
  1256. * @param {number} row 0-based index
  1257. * @param {number} column 0-based index
  1258. * @returns {number}
  1259. */
  1260. at(row, column)
  1261. {
  1262. if(row >= 0 && row < this._rows && column >= 0 && column < this._columns)
  1263. return this._data[this._step0 * row + this._step1 * column];
  1264. else
  1265. return Number.NaN;
  1266. }
  1267. /**
  1268. * Read the entries of the matrix in column-major format
  1269. * @returns {number[]}
  1270. */
  1271. read()
  1272. {
  1273. const entries = new Array(this._rows * this._columns);
  1274. const step0 = this._step0, step1 = this._step1;
  1275. let i = 0;
  1276. for(let column = 0; column < this._columns; column++) {
  1277. for(let row = 0; row < this._rows; row++)
  1278. entries[i++] = this._data[row * step0 + column * step1];
  1279. }
  1280. return entries;
  1281. }
  1282. /**
  1283. * Returns a human-readable string representation of the matrix
  1284. * @returns {string}
  1285. */
  1286. toString()
  1287. {
  1288. const DECIMALS = 5;
  1289. const rows = this.rows, columns = this.columns;
  1290. const entries = this.read();
  1291. const mat = /** @type {number[][]} */ ( new Array(rows) );
  1292. for(let i = 0; i < rows; i++) {
  1293. mat[i] = new Array(columns);
  1294. for(let j = 0; j < columns; j++)
  1295. mat[i][j] = entries[j * rows + i];
  1296. }
  1297. const fix = x => x.toFixed(DECIMALS);
  1298. const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
  1299. const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
  1300. return str;
  1301. }
  1302. /**
  1303. * Set the contents of this matrix to the result of an expression
  1304. * @param {SpeedyMatrixExpr} expr matrix expression
  1305. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1306. */
  1307. setTo(expr)
  1308. {
  1309. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM.ready */ .r.ready().then(_ => {
  1310. // TODO: add support for WebWorkers
  1311. return this.setToSync(expr);
  1312. });
  1313. }
  1314. /**
  1315. * Synchronously set the contents of this matrix to the result of an expression
  1316. * @param {SpeedyMatrixExpr} expr matrix expression
  1317. * @returns {SpeedyMatrix} this
  1318. */
  1319. setToSync(expr)
  1320. {
  1321. const { wasm, memory } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM.handle */ .r.handle;
  1322. // evaluate the expression
  1323. const result = expr._evaluate(wasm, memory);
  1324. /*
  1325. // shallow copy the results to this matrix
  1326. // limitation: can't handle blocks properly
  1327. // (a tree-like structure could be useful)
  1328. this._rows = result.rows;
  1329. this._columns = result.columns;
  1330. //this._dtype = result.dtype;
  1331. this._data = result.data;
  1332. this._step0 = result.step0;
  1333. this._step1 = result.step1;
  1334. */
  1335. // validate shape
  1336. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils.assert */ .c.assert(
  1337. this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype,
  1338. `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`
  1339. );
  1340. // deep copy
  1341. const step0 = this._step0, step1 = this._step1, rstep0 = result._step0, rstep1 = result._step1;
  1342. if(step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
  1343. // fast copy
  1344. this._data.set(result._data);
  1345. }
  1346. else {
  1347. // copy each element
  1348. for(let column = this._columns - 1; column >= 0; column--) {
  1349. for(let row = this._rows - 1; row >= 0; row--)
  1350. this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
  1351. }
  1352. }
  1353. // done!
  1354. return this;
  1355. }
  1356. /**
  1357. * Fill this matrix with a scalar value
  1358. * @param {number} value
  1359. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1360. */
  1361. fill(value)
  1362. {
  1363. this.fillSync(value);
  1364. return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise.resolve */ .s.resolve(this);
  1365. }
  1366. /**
  1367. * Synchronously fill this matrix with a scalar value
  1368. * @param {number} value
  1369. * @returns {SpeedyMatrix} this
  1370. */
  1371. fillSync(value)
  1372. {
  1373. value = +value;
  1374. if(this._rows * this._columns === this._data.length) {
  1375. this._data.fill(value);
  1376. return this;
  1377. }
  1378. for(let column = 0; column < this._columns; column++) {
  1379. for(let row = 0; row < this._rows; row++) {
  1380. this._data[row * this._step0 + column * this._step1] = value;
  1381. }
  1382. }
  1383. return this;
  1384. }
  1385. /**
  1386. * Evaluate this expression
  1387. * @param {WebAssembly.Instance} wasm
  1388. * @param {SpeedyMatrixWASMMemory} memory
  1389. * @returns {SpeedyMatrix}
  1390. */
  1391. _evaluate(wasm, memory)
  1392. {
  1393. return this;
  1394. }
  1395. }
  1396. /***/ }),
  1397. /***/ 2411:
  1398. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_52046__) => {
  1399. "use strict";
  1400. /* harmony export */ __nested_webpack_require_52046__.d(__webpack_exports__, {
  1401. /* harmony export */ "R": () => (/* binding */ SpeedyNamespace)
  1402. /* harmony export */ });
  1403. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_52046__(3841);
  1404. /*
  1405. * speedy-vision.js
  1406. * GPU-accelerated Computer Vision for JavaScript
  1407. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  1408. *
  1409. * Licensed under the Apache License, Version 2.0 (the "License");
  1410. * you may not use this file except in compliance with the License.
  1411. * You may obtain a copy of the License at
  1412. *
  1413. * http://www.apache.org/licenses/LICENSE-2.0
  1414. *
  1415. * Unless required by applicable law or agreed to in writing, software
  1416. * distributed under the License is distributed on an "AS IS" BASIS,
  1417. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1418. * See the License for the specific language governing permissions and
  1419. * limitations under the License.
  1420. *
  1421. * speedy-namespace.js
  1422. * Symbolizes a namespace
  1423. */
  1424. /**
  1425. * An abstract namespace
  1426. * @abstract
  1427. */
  1428. class SpeedyNamespace
  1429. {
  1430. /**
  1431. * Namespaces can't be instantiated.
  1432. * Only static methods are allowed.
  1433. * @abstract
  1434. * @throws SpeedyError
  1435. */
  1436. constructor()
  1437. {
  1438. // only static methods are allowed
  1439. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .Mi(`Namespaces can't be instantiated`);
  1440. }
  1441. }
  1442. /***/ }),
  1443. /***/ 4500:
  1444. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_53588__) => {
  1445. "use strict";
  1446. /* harmony export */ __nested_webpack_require_53588__.d(__webpack_exports__, {
  1447. /* harmony export */ "s": () => (/* binding */ SpeedyPromise)
  1448. /* harmony export */ });
  1449. /*
  1450. * speedy-vision.js
  1451. * GPU-accelerated Computer Vision for JavaScript
  1452. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  1453. *
  1454. * Licensed under the Apache License, Version 2.0 (the "License");
  1455. * you may not use this file except in compliance with the License.
  1456. * You may obtain a copy of the License at
  1457. *
  1458. * http://www.apache.org/licenses/LICENSE-2.0
  1459. *
  1460. * Unless required by applicable law or agreed to in writing, software
  1461. * distributed under the License is distributed on an "AS IS" BASIS,
  1462. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1463. * See the License for the specific language governing permissions and
  1464. * limitations under the License.
  1465. *
  1466. * speedy-promise.js
  1467. * Speedy Promises: a fast implementation of Promises
  1468. */
  1469. const PENDING = 0;
  1470. const FULFILLED = 1;
  1471. const REJECTED = 2;
  1472. const SUSPEND_ASYNC = 1;
  1473. const asap = (typeof queueMicrotask !== 'undefined' && queueMicrotask) || // browsers
  1474. (typeof process !== 'undefined' && process.nextTick) || // node.js
  1475. (f => Promise.resolve().then(() => f())); // most compatible
  1476. /**
  1477. * SpeedyPromise: Super Fast Promises. SpeedyPromises can
  1478. * interoperate with ES6 Promises. This implementation is
  1479. * based on the Promises/A+ specification.
  1480. * @template T
  1481. */
  1482. class SpeedyPromise
  1483. {
  1484. /**
  1485. * Constructor
  1486. * @param {function(function(T=): void, function(Error): void): void} callback
  1487. */
  1488. constructor(callback)
  1489. {
  1490. this._state = PENDING;
  1491. this._value = undefined;
  1492. this._onFulfillment = null;
  1493. this._onRejection = null;
  1494. this._children = 0;
  1495. this[0] = this;
  1496. this._parent = undefined;
  1497. this._flags = 0;
  1498. this._fulfill = this._fulfill.bind(this);
  1499. this._reject = this._reject.bind(this);
  1500. this._resolve = this._resolve.bind(this);
  1501. this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
  1502. callback(this._fulfill, this._reject);
  1503. }
  1504. /**
  1505. * Setup handlers
  1506. * @template U, V=never
  1507. * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
  1508. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1509. * @returns {SpeedyPromise<U>}
  1510. */
  1511. then(onFulfillment, onRejection = null)
  1512. {
  1513. const child = new SpeedyPromise(this._nop);
  1514. child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
  1515. child._onRejection = typeof onRejection === 'function' && onRejection;
  1516. child._parent = this;
  1517. this[this._children++] = child; // attach child
  1518. this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
  1519. this._notify();
  1520. return child;
  1521. }
  1522. /**
  1523. * Setup rejection handler
  1524. * @template U, V=never
  1525. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1526. * @returns {SpeedyPromise<V>}
  1527. */
  1528. catch(onRejection)
  1529. {
  1530. return this.then(null, onRejection);
  1531. }
  1532. /**
  1533. * Execute a callback when the promise is settled
  1534. * (i.e., fulfilled or rejected)
  1535. * @param {function(): void} onFinally
  1536. * @returns {SpeedyPromise<T>}
  1537. */
  1538. finally(onFinally)
  1539. {
  1540. const fn = val => { onFinally(); return val; };
  1541. return this.then(fn, fn);
  1542. }
  1543. /**
  1544. * Start the computation immediately, synchronously.
  1545. * Can't afford to spend any time at all waiting for micro-tasks, etc.
  1546. * @returns {SpeedyPromise<T>} this
  1547. */
  1548. turbocharge()
  1549. {
  1550. let my = this;
  1551. // suspend the async behavior
  1552. this._flags |= SUSPEND_ASYNC;
  1553. while(my._parent !== undefined) {
  1554. my = my._parent;
  1555. my._flags |= SUSPEND_ASYNC;
  1556. }
  1557. // notify the children of the root
  1558. my._notify(); // will be synchronous
  1559. // return this SpeedyPromise
  1560. return this;
  1561. }
  1562. /**
  1563. * Convert to string
  1564. * @returns {string}
  1565. */
  1566. toString()
  1567. {
  1568. switch(this._state) {
  1569. case PENDING:
  1570. return `SpeedyPromise { <pending> }`;
  1571. case FULFILLED:
  1572. return `SpeedyPromise { <fulfilled> ${this._value} }`;
  1573. case REJECTED:
  1574. return `SpeedyPromise { <rejected> ${this._value} }`;
  1575. default:
  1576. return '';
  1577. }
  1578. }
  1579. /**
  1580. * Symbol.toStringTag
  1581. * @returns {string}
  1582. */
  1583. get [Symbol.toStringTag]()
  1584. {
  1585. return 'SpeedyPromise';
  1586. }
  1587. /**
  1588. * Creates a resolved SpeedyPromise
  1589. * @template U
  1590. * @param {U} [value]
  1591. * @returns {SpeedyPromise<U>}
  1592. */
  1593. static resolve(value)
  1594. {
  1595. const promise = new SpeedyPromise(this._snop);
  1596. if((typeof value === 'object' && value !== null && 'then' in value) || (typeof value === 'function' && 'then' in value)) {
  1597. // resolve asynchronously
  1598. promise._resolve(value);
  1599. }
  1600. else {
  1601. // fulfill synchronously
  1602. promise._value = value;
  1603. promise._state = FULFILLED;
  1604. }
  1605. return promise;
  1606. }
  1607. /**
  1608. * Creates a rejected SpeedyPromise
  1609. * @template U
  1610. * @param {Error} reason
  1611. * @returns {SpeedyPromise<U>}
  1612. */
  1613. static reject(reason)
  1614. {
  1615. const promise = new SpeedyPromise(this._snop);
  1616. promise._value = reason;
  1617. promise._state = REJECTED;
  1618. return promise;
  1619. }
  1620. /**
  1621. * Returns a SpeedyPromise that resolves to an array
  1622. * containing the results of the input promises/values,
  1623. * in their given order. The returned SpeedyPromise will
  1624. * resolve if all input promises resolve, or reject if
  1625. * any input promise rejects.
  1626. * @template U
  1627. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1628. * @returns {SpeedyPromise<U[]>}
  1629. *
  1630. * FIXME iterables need not be all <U>
  1631. */
  1632. static all(iterable)
  1633. {
  1634. return new SpeedyPromise((resolve, reject) => {
  1635. const input = [];
  1636. // get elements
  1637. for(const element of iterable)
  1638. input.push(element);
  1639. // resolve synchronously if there are no elements
  1640. const length = input.length;
  1641. if(length == 0) {
  1642. resolve([]);
  1643. return;
  1644. }
  1645. // resolve asynchronously
  1646. let counter = length;
  1647. const output = new Array(length);
  1648. const partialResolve = i => (val => { output[i] = val; if(0 == --counter) resolve(output); });
  1649. for(let i = 0; i < length; i++) {
  1650. const element = input[i];
  1651. if(element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype)
  1652. element.then(partialResolve(i), reject);
  1653. else
  1654. SpeedyPromise.resolve(element).then(partialResolve(i), reject);
  1655. }
  1656. });
  1657. }
  1658. /**
  1659. * Returns a promise that gets fulfilled or rejected as soon
  1660. * as the first promise in the iterable gets fulfilled or
  1661. * rejected (with its value/reason).
  1662. * @template U
  1663. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1664. * @returns {SpeedyPromise<U>}
  1665. */
  1666. static race(iterable)
  1667. {
  1668. return new SpeedyPromise((resolve, reject) => {
  1669. const input = [];
  1670. // get elements
  1671. for(const element of iterable)
  1672. input.push(element);
  1673. // if the iterable is empty, the promise
  1674. // will be pending forever...
  1675. // resolve asynchronously
  1676. const length = input.length;
  1677. for(let i = 0; i < length; i++) {
  1678. const element = input[i];
  1679. if(element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype)
  1680. element.then(resolve, reject);
  1681. else
  1682. SpeedyPromise.resolve(element).then(resolve, reject);
  1683. }
  1684. });
  1685. }
  1686. /**
  1687. * Fulfill this promise with a value
  1688. * @param {T} value
  1689. */
  1690. _fulfill(value)
  1691. {
  1692. this._setState(FULFILLED, value);
  1693. }
  1694. /**
  1695. * Reject this promise with a reason
  1696. * @param {Error} reason
  1697. */
  1698. _reject(reason)
  1699. {
  1700. this._setState(REJECTED, reason);
  1701. }
  1702. /**
  1703. * Set the state and the value of this promise
  1704. * @param {number} state
  1705. * @param {T|Error} value
  1706. */
  1707. _setState(state, value)
  1708. {
  1709. // the promise is already fulfilled or rejected
  1710. if(this._state != PENDING)
  1711. return;
  1712. // set the new state
  1713. this._state = state;
  1714. this._value = value;
  1715. this._notify();
  1716. }
  1717. /**
  1718. * Notify my children that this promise is no
  1719. * longer pending. This is an async operation:
  1720. * my childen will be notified "as soon
  1721. * as possible" (it will be scheduled).
  1722. * We may force this to be synchronous, though
  1723. */
  1724. _notify()
  1725. {
  1726. // nothing to do
  1727. if(this._state == PENDING)
  1728. return;
  1729. // have we turbocharged this promise?
  1730. if(this._flags & SUSPEND_ASYNC) {
  1731. this._broadcast(); // execute synchronously
  1732. return;
  1733. }
  1734. // install a timer (default behavior)
  1735. asap(this._broadcastIfAsync);
  1736. }
  1737. /**
  1738. * Helper method
  1739. */
  1740. _broadcastIfAsync()
  1741. {
  1742. // we may have installed a timer at some
  1743. // point, but turbocharged the promise later
  1744. if(!(this._flags & SUSPEND_ASYNC))
  1745. this._broadcast();
  1746. }
  1747. /**
  1748. * Tell my children that this promise
  1749. * is either fulfilled or rejected.
  1750. * This is a synchronous operation
  1751. */
  1752. _broadcast()
  1753. {
  1754. const children = this._children;
  1755. const state = this._state;
  1756. if(state === FULFILLED) {
  1757. for(let i = 0; i < children; i++) {
  1758. const child = this[i];
  1759. const callback = child._onFulfillment;
  1760. try {
  1761. if(callback) {
  1762. if(callback !== child._nop) {
  1763. child._resolve(callback(this._value)); // promise resolution procedure
  1764. child._onFulfillment = child._nop; // will not be called again
  1765. }
  1766. }
  1767. else
  1768. child._fulfill(this._value);
  1769. }
  1770. catch(e) {
  1771. child._reject(e);
  1772. }
  1773. }
  1774. }
  1775. else if(state === REJECTED) {
  1776. for(let i = 0; i < children; i++) {
  1777. const child = this[i];
  1778. const callback = child._onRejection;
  1779. try {
  1780. if(callback) {
  1781. if(callback !== child._nop) {
  1782. child._resolve(callback(this._value)); // promise resolution procedure
  1783. child._onRejection = child._nop; // will not be called again
  1784. }
  1785. }
  1786. else
  1787. child._reject(this._value);
  1788. }
  1789. catch(e) {
  1790. child._reject(e);
  1791. }
  1792. }
  1793. }
  1794. }
  1795. /**
  1796. * Promise Resolution Procedure
  1797. * based on the Promises/A+ spec
  1798. * @param {T} x
  1799. */
  1800. _resolve(x)
  1801. {
  1802. if((typeof x !== 'object' && typeof x !== 'function') || (x === null)) { // if(x !== Object(x))
  1803. this._fulfill(x);
  1804. return;
  1805. }
  1806. if(x === this)
  1807. throw new TypeError(); // Circular reference
  1808. if(x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
  1809. x.then(this._resolve, this._reject);
  1810. return;
  1811. }
  1812. try {
  1813. const then = x.then;
  1814. if(typeof then === 'function') {
  1815. let resolve = this._resolve, reject = this._reject;
  1816. try {
  1817. then.call(x,
  1818. y => { resolve(y); resolve = reject = this._nop; },
  1819. r => { reject(r); resolve = reject = this._nop; }
  1820. );
  1821. }
  1822. catch(e) {
  1823. if(resolve !== this._nop && reject !== this._nop)
  1824. this._reject(e);
  1825. }
  1826. }
  1827. else {
  1828. this._fulfill(x);
  1829. }
  1830. }
  1831. catch(e) {
  1832. this._reject(e);
  1833. }
  1834. }
  1835. /**
  1836. * No-operation
  1837. */
  1838. _nop()
  1839. {
  1840. }
  1841. /**
  1842. * Static no-operation
  1843. */
  1844. static _snop()
  1845. {
  1846. }
  1847. }
  1848. //module.exports = { SpeedyPromise };
  1849. /*
  1850. // Uncomment to test performance with regular Promises
  1851. module.exports = { SpeedyPromise: Promise };
  1852. Promise.prototype.turbocharge = function() { return this };
  1853. */
  1854. /***/ }),
  1855. /***/ 9759:
  1856. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_67055__) => {
  1857. "use strict";
  1858. // EXPORTS
  1859. __nested_webpack_require_67055__.d(__webpack_exports__, {
  1860. "ef": () => (/* binding */ createShader),
  1861. "Nt": () => (/* binding */ importShader)
  1862. });
  1863. // UNUSED EXPORTS: ShaderDeclaration
  1864. // EXTERNAL MODULE: ./src/utils/utils.js
  1865. var utils = __nested_webpack_require_67055__(5484);
  1866. // EXTERNAL MODULE: ./src/utils/types.js
  1867. var types = __nested_webpack_require_67055__(6731);
  1868. // EXTERNAL MODULE: ./src/utils/errors.js
  1869. var errors = __nested_webpack_require_67055__(3841);
  1870. ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
  1871. /*
  1872. * speedy-vision.js
  1873. * GPU-accelerated Computer Vision for JavaScript
  1874. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  1875. *
  1876. * Licensed under the Apache License, Version 2.0 (the "License");
  1877. * you may not use this file except in compliance with the License.
  1878. * You may obtain a copy of the License at
  1879. *
  1880. * http://www.apache.org/licenses/LICENSE-2.0
  1881. *
  1882. * Unless required by applicable law or agreed to in writing, software
  1883. * distributed under the License is distributed on an "AS IS" BASIS,
  1884. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1885. * See the License for the specific language governing permissions and
  1886. * limitations under the License.
  1887. *
  1888. * shader-preprocessor.js
  1889. * Custom preprocessor for shaders
  1890. */
  1891. // Import numeric globals
  1892. const globals = __nested_webpack_require_67055__(3020);
  1893. const numericGlobals = Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce(
  1894. (obj, key) => ((obj[key] = globals[key]), obj), {}
  1895. );
  1896. // Constants accessible by all shaders
  1897. const constants = Object.freeze({
  1898. // numeric globals
  1899. ...numericGlobals,
  1900. // fragment shader
  1901. 'FS_USE_CUSTOM_PRECISION': 0, // use default precision settings
  1902. 'FS_OUTPUT_TYPE': 0, // normalized RGBA
  1903. // colors
  1904. 'PIXELCOMPONENT_RED': types/* PixelComponent.RED */.hE.RED,
  1905. 'PIXELCOMPONENT_GREEN': types/* PixelComponent.GREEN */.hE.GREEN,
  1906. 'PIXELCOMPONENT_BLUE': types/* PixelComponent.BLUE */.hE.BLUE,
  1907. 'PIXELCOMPONENT_ALPHA': types/* PixelComponent.ALPHA */.hE.ALPHA,
  1908. });
  1909. // Regular Expressions
  1910. const commentsRegex = [ /\/\*(.|\s)*?\*\//g , /\/\/.*$/gm ];
  1911. const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
  1912. const constantRegex = /@(\w+)@/g;
  1913. const unrollRegex = [
  1914. /@\s*unroll\s+?for\s*\(\s*(int|)\s*(?<counter>\w+)\s*=\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*(<=?)\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g,
  1915. /@\s*unroll\s+?for\s*\(\s*(int|)\s*(?<counter>\w+)\s*=\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*(<=?)\s*(-?\d+|\w+)\s*;\s*\k<counter>\s*\+=\s*(-?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g,
  1916. ];
  1917. /** @typedef {Map<string,number>} ShaderDefines */
  1918. /**
  1919. * Custom preprocessor for the shaders
  1920. */
  1921. class ShaderPreprocessor
  1922. {
  1923. /**
  1924. * Runs the preprocessor
  1925. * @param {string} code
  1926. * @param {ShaderDefines} [defines]
  1927. * @returns {string} preprocessed code
  1928. */
  1929. static run(code, defines = new Map())
  1930. {
  1931. const errors = []; // compile-time errors
  1932. //
  1933. // The preprocessor will remove comments from GLSL code,
  1934. // include requested GLSL files and import global constants
  1935. // defined for all shaders (see above)
  1936. //
  1937. return unrollLoops(
  1938. String(code)
  1939. .replace(commentsRegex[0], '')
  1940. .replace(commentsRegex[1], '')
  1941. .replace(includeRegex, (_, filename) =>
  1942. // FIXME: no cycle detection for @include
  1943. ShaderPreprocessor.run(readfileSync(filename), defines)
  1944. )
  1945. .replace(constantRegex, (_, name) => String(
  1946. // Find a defined constant. If not possible, find a global constant
  1947. defines.has(name) ? Number(defines.get(name)) : (
  1948. constants[name] !== undefined ? Number(constants[name]) : (
  1949. errors.push(`Undefined constant: ${name}`), 0
  1950. )
  1951. )
  1952. )),
  1953. defines
  1954. ) + (errors.length > 0 ? errors.map(msg => `\n#error ${msg}\n`).join('') : '');
  1955. }
  1956. }
  1957. /**
  1958. * Reads a shader from the shaders/include/ folder
  1959. * @param {string} filename
  1960. * @returns {string}
  1961. */
  1962. function readfileSync(filename)
  1963. {
  1964. if(String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/))
  1965. return __nested_webpack_require_67055__(524)("./" + filename);
  1966. throw new errors/* FileNotFoundError */.Xg(`Shader preprocessor: can't read file "${filename}"`);
  1967. }
  1968. /**
  1969. * Unroll for loops in our own preprocessor
  1970. * @param {string} code
  1971. * @param {ShaderDefines} defines
  1972. * @returns {string}
  1973. */
  1974. function unrollLoops(code, defines)
  1975. {
  1976. //
  1977. // Currently, only integer for loops with positive step values
  1978. // can be unrolled. (TODO: negative step values?)
  1979. //
  1980. // The current implementation does not support curly braces
  1981. // inside unrolled loops. You may define macros to get around
  1982. // this, but do you actually need to unroll such loops?
  1983. //
  1984. // Loops that don't fit the supported pattern will crash
  1985. // the preprocessor if you try to unroll them.
  1986. //
  1987. const fn = unroll.bind(defines); // CRAZY!
  1988. const n = unrollRegex.length;
  1989. for(let i = 0; i < n; i++)
  1990. code = code.replace(unrollRegex[i], fn);
  1991. return code;
  1992. }
  1993. /**
  1994. * Unroll a loop pattern (regexp)
  1995. * @param {string} match the matched for loop
  1996. * @param {string} type
  1997. * @param {string} counter
  1998. * @param {string} start
  1999. * @param {string} cmp
  2000. * @param {string} end
  2001. * @param {string} step
  2002. * @param {string} loopcode
  2003. * @returns {string} unrolled loop
  2004. */
  2005. function unroll(match, type, counter, start, cmp, end, step, loopcode)
  2006. {
  2007. const defines = /** @type {ShaderDefines} */ ( this );
  2008. // check if the loop limits are numeric constants or #defined numbers from the outside
  2009. const hasStart = Number.isFinite(+start) || defines.has(start);
  2010. const hasEnd = Number.isFinite(+end) || defines.has(end);
  2011. if(!hasStart || !hasEnd) {
  2012. if(defines.size > 0)
  2013. throw new errors/* ParseError */.D3(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);
  2014. else
  2015. return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
  2016. }
  2017. // parse and validate limits & step
  2018. let istart = defines.has(start) ? defines.get(start) : parseInt(start);
  2019. let iend = defines.has(end) ? defines.get(end) : parseInt(end);
  2020. let istep = (step.length == 0) ? 1 : parseInt(step);
  2021. utils/* Utils.assert */.c.assert(istart <= iend && istep > 0);
  2022. /*
  2023. // debug
  2024. console.log(`Encontrei "${match}"`);
  2025. console.log(`type="${type}"`);
  2026. console.log(`counter="${counter}"`);
  2027. console.log(`start="${start}"`);
  2028. console.log(`cmp="${cmp}"`);
  2029. console.log(`end="${end}"`);
  2030. console.log(`step="${step}"`);
  2031. console.log(`loopcode="${loopcode}"`)
  2032. console.log('Defines:', defines);
  2033. */
  2034. // continue statements are not supported inside unrolled loops
  2035. // and will generate a compiler error. Using break is ok.
  2036. const hasBreak = (loopcode.match(/\bbreak\s*;/) !== null);
  2037. // create a new scope
  2038. let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
  2039. // declare counter
  2040. unrolledCode += `${type} ${counter};\n`;
  2041. // unroll loop
  2042. iend += (cmp == '<=') ? 1 : 0;
  2043. for(let i = istart; i < iend; i += istep)
  2044. unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
  2045. // close scope
  2046. unrolledCode += '}\n';
  2047. //console.log('Unrolled code:\n\n' + unrolledCode);
  2048. // done!
  2049. return unrolledCode;
  2050. }
  2051. ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
  2052. /*
  2053. * speedy-vision.js
  2054. * GPU-accelerated Computer Vision for JavaScript
  2055. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  2056. *
  2057. * Licensed under the Apache License, Version 2.0 (the "License");
  2058. * you may not use this file except in compliance with the License.
  2059. * You may obtain a copy of the License at
  2060. *
  2061. * http://www.apache.org/licenses/LICENSE-2.0
  2062. *
  2063. * Unless required by applicable law or agreed to in writing, software
  2064. * distributed under the License is distributed on an "AS IS" BASIS,
  2065. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2066. * See the License for the specific language governing permissions and
  2067. * limitations under the License.
  2068. *
  2069. * shader-declaration.js
  2070. * Encapsulates a shader declaration
  2071. */
  2072. const DEFAULT_ATTRIBUTES = Object.freeze({
  2073. position: 'a_position',
  2074. texCoord: 'a_texCoord'
  2075. });
  2076. const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
  2077. position: 0, // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  2078. texCoord: 1,
  2079. });
  2080. const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
  2081. precision highp float;
  2082. precision highp int;
  2083. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
  2084. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
  2085. out highp vec2 texCoord;
  2086. uniform highp vec2 texSize;
  2087. #define vsinit() \
  2088. gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
  2089. texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
  2090. \n\n`;
  2091. const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
  2092. const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
  2093. const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
  2094. #if @FS_USE_CUSTOM_PRECISION@ == 0
  2095. precision mediump float; // ~float16
  2096. precision mediump sampler2D;
  2097. precision highp int; // int32
  2098. #endif
  2099. #if @FS_OUTPUT_TYPE@ == 0
  2100. #define OUT_TYPE mediump vec4
  2101. #elif @FS_OUTPUT_TYPE@ == 1
  2102. #define OUT_TYPE mediump ivec4
  2103. #elif @FS_OUTPUT_TYPE@ == 2
  2104. #define OUT_TYPE mediump uvec4
  2105. #else
  2106. #error Unknown FS_OUTPUT_TYPE
  2107. #endif
  2108. out OUT_TYPE color;
  2109. in highp vec2 texCoord;
  2110. uniform highp vec2 texSize;
  2111. @include "global.glsl"\n\n`;
  2112. const PRIVATE_TOKEN = Symbol();
  2113. /**
  2114. * @typedef {object} ShaderDeclarationFilepathOptions
  2115. * @property {"filepath"} type
  2116. * @property {string} filepath
  2117. * @property {string} [vsfilepath]
  2118. *
  2119. * @typedef {object} ShaderDeclarationSourceOptions
  2120. * @property {"source"} type
  2121. * @property {string} source
  2122. * @property {string} [vssource]
  2123. *
  2124. * @typedef {ShaderDeclarationFilepathOptions | ShaderDeclarationSourceOptions} ShaderDeclarationOptions
  2125. */
  2126. /** @typedef {import('./shader-preprocessor').ShaderDefines} ShaderDefines */
  2127. /**
  2128. * Shader Declaration
  2129. */
  2130. class ShaderDeclaration
  2131. {
  2132. /**
  2133. * @private Constructor
  2134. * @param {ShaderDeclarationOptions} options
  2135. * @param {Symbol} privateToken
  2136. */
  2137. constructor(options, privateToken)
  2138. {
  2139. if(privateToken !== PRIVATE_TOKEN)
  2140. throw new errors/* IllegalOperationError */.js(); // private constructor!
  2141. /** @type {string} original source code provided by the user (fragment shader) */
  2142. this._source = (() => {
  2143. switch(options.type) {
  2144. case 'filepath': return __nested_webpack_require_67055__(2863)("./" + options.filepath);
  2145. case 'source': return options.source;
  2146. default: return /** @type {never} */ ( '' );
  2147. }
  2148. })();
  2149. /** @type {string} vertex shader source code (without preprocessing) */
  2150. this._vssource = (() => {
  2151. switch(options.type) {
  2152. case 'filepath': return options.vsfilepath ? __nested_webpack_require_67055__(2863)("./" + options.vsfilepath) : DEFAULT_VERTEX_SHADER;
  2153. case 'source': return options.vssource ? options.vssource : DEFAULT_VERTEX_SHADER;
  2154. default: return /** @type {never} */ ( '' );
  2155. }
  2156. })();
  2157. /** @type {string} preprocessed source code of the fragment shader */
  2158. this._fragmentSource = ShaderPreprocessor.run(DEFAULT_FRAGMENT_SHADER_PREFIX + this._source);
  2159. /** @type {string} preprocessed source code of the vertex shader */
  2160. this._vertexSource = ShaderPreprocessor.run(DEFAULT_VERTEX_SHADER_PREFIX + this._vssource + DEFAULT_VERTEX_SHADER_SUFFIX);
  2161. /** @type {string} filepath of the fragment shader */
  2162. this._filepath = options.type === 'filepath' ? options.filepath : '<in-memory>';
  2163. /** @type {string} filepath of the vertex shader */
  2164. this._vsfilepath = options.type === 'filepath' && options.vsfilepath ? options.vsfilepath : '<in-memory>';
  2165. /** @type {string[]} an ordered list of uniform names */
  2166. this._arguments = [];
  2167. /** @type {Map<string,string>} it maps uniform names to their types */
  2168. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2169. /** @type {ShaderDefines} it maps externally #defined constants to their values */
  2170. this._defines = new Map();
  2171. }
  2172. /**
  2173. * Creates a new Shader directly from a GLSL source
  2174. * @param {string} source fragment shader
  2175. * @param {string|null} [vssource] vertex shader
  2176. * @returns {ShaderDeclaration}
  2177. */
  2178. static create(source, vssource = null)
  2179. {
  2180. return new ShaderDeclaration({ type: 'source', source, vssource }, PRIVATE_TOKEN);
  2181. }
  2182. /**
  2183. * Import a Shader from a file containing a GLSL source
  2184. * @param {string} filepath path to .glsl file relative to the shaders/ folder
  2185. * @param {string} [vsfilepath] path to a .vs.glsl file relative to the shaders/ folder
  2186. * @returns {ShaderDeclaration}
  2187. */
  2188. static import(filepath, vsfilepath = null)
  2189. {
  2190. if(!String(filepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/))
  2191. throw new errors/* FileNotFoundError */.Xg(`Can't import fragment shader at "${filepath}"`);
  2192. else if(vsfilepath != null && !String(vsfilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/))
  2193. throw new errors/* FileNotFoundError */.Xg(`Can't import vertex shader at "${vsfilepath}"`);
  2194. return new ShaderDeclaration({ type: 'filepath', filepath, vsfilepath }, PRIVATE_TOKEN);
  2195. }
  2196. /**
  2197. * Specify the list & order of arguments to be
  2198. * passed to the shader
  2199. * @param {...string} args argument names
  2200. * @returns {this}
  2201. */
  2202. withArguments(...args)
  2203. {
  2204. // the list of arguments may be declared only once
  2205. if(this._arguments.length > 0)
  2206. throw new errors/* IllegalOperationError */.js(`Redefinition of shader arguments`);
  2207. // get arguments
  2208. this._arguments = args.map(arg => String(arg));
  2209. // validate
  2210. for(const argname of this._arguments) {
  2211. if(!this._uniforms.has(argname)) {
  2212. if(!this._uniforms.has(argname + '[0]'))
  2213. throw new errors/* IllegalArgumentError */.mG(`Argument "${argname}" has not been declared in the shader`);
  2214. }
  2215. }
  2216. // done!
  2217. return this;
  2218. }
  2219. /**
  2220. * Specify a set of #defines to be prepended to the fragment shader
  2221. * @param {Object<string,number>} defines key-value pairs (define-name: define-value)
  2222. * @returns {this}
  2223. */
  2224. withDefines(defines)
  2225. {
  2226. // the list of #defines may be defined only once
  2227. if(this._defines.size > 0)
  2228. throw new errors/* IllegalOperationError */.js(`Redefinition of externally defined constants of a shader`);
  2229. // store and write the #defines
  2230. const defs = [], keys = Object.keys(defines);
  2231. for(const key of keys) {
  2232. const value = Number(defines[key]); // force numeric values (just in case)
  2233. this._defines.set(key, value);
  2234. defs.push(`#define ${key} ${value}\n`);
  2235. }
  2236. // update the shaders & the uniforms
  2237. const source = DEFAULT_FRAGMENT_SHADER_PREFIX + defs.join('') + this._source;
  2238. const vssource = DEFAULT_VERTEX_SHADER_PREFIX + defs.join('') + this._vssource + DEFAULT_VERTEX_SHADER_SUFFIX;
  2239. this._fragmentSource = ShaderPreprocessor.run(source, this._defines);
  2240. this._vertexSource = ShaderPreprocessor.run(vssource, this._defines);
  2241. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2242. // done!
  2243. return this;
  2244. }
  2245. /**
  2246. * Return the GLSL source of the fragment shader
  2247. * @returns {string}
  2248. */
  2249. get fragmentSource()
  2250. {
  2251. return this._fragmentSource;
  2252. }
  2253. /**
  2254. * Return the GLSL source of the vertex shader
  2255. * @returns {string}
  2256. */
  2257. get vertexSource()
  2258. {
  2259. return this._vertexSource;
  2260. }
  2261. /**
  2262. * Get the names of the vertex shader attributes
  2263. * @returns {typeof DEFAULT_ATTRIBUTES}
  2264. */
  2265. get attributes()
  2266. {
  2267. return DEFAULT_ATTRIBUTES;
  2268. }
  2269. /**
  2270. * Get the pre-defined locations of the vertex shader attributes
  2271. * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
  2272. */
  2273. get locationOfAttributes()
  2274. {
  2275. return DEFAULT_ATTRIBUTES_LOCATION;
  2276. }
  2277. /**
  2278. * Names of the arguments that will be passed to the Shader,
  2279. * corresponding to GLSL uniforms, in the order they will be passed
  2280. * @returns {string[]}
  2281. */
  2282. get arguments()
  2283. {
  2284. return this._arguments;
  2285. }
  2286. /**
  2287. * Names of the uniforms declared in the shader
  2288. * @returns {string[]}
  2289. */
  2290. get uniforms()
  2291. {
  2292. return Array.from(this._uniforms.keys());
  2293. }
  2294. /**
  2295. * The GLSL type of a uniform variable declared in the shader
  2296. * @param {string} name
  2297. * @returns {string}
  2298. */
  2299. uniformType(name)
  2300. {
  2301. if(!this._uniforms.has(name))
  2302. throw new errors/* IllegalArgumentError */.mG(`Unrecognized uniform variable: "${name}"`);
  2303. return this._uniforms.get(name);
  2304. }
  2305. /**
  2306. * The value of an externally defined constant, i.e., via withDefines()
  2307. * @param {string} name
  2308. * @returns {number}
  2309. */
  2310. definedConstant(name)
  2311. {
  2312. if(!this._defines.has(name))
  2313. throw new errors/* IllegalArgumentError */.mG(`Unrecognized externally defined constant: "${name}"`);
  2314. return this._defines.get(name);
  2315. }
  2316. /**
  2317. * Parses a GLSL source and detects the uniform variables,
  2318. * as well as their types
  2319. * @param {string} preprocessedSource
  2320. * @returns {Map<string,string>} specifies the types of all uniforms
  2321. */
  2322. _autodetectUniforms(preprocessedSource)
  2323. {
  2324. const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
  2325. const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
  2326. const uniforms = new Map();
  2327. let match;
  2328. while((match = regex.exec(sourceWithoutComments)) !== null) {
  2329. const type = match[2];
  2330. const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
  2331. for(const name of names) {
  2332. if(name.endsWith(']')) {
  2333. // is it an array?
  2334. if(!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/)))
  2335. throw new errors/* ParseError */.D3(`Unspecified array length for uniform "${name}" in the shader`);
  2336. // read array name & size
  2337. const [ array, size ] = [ match[1], Number(match[2]) ];
  2338. // register uniforms
  2339. for(let i = 0; i < size; i++)
  2340. uniforms.set(`${array}[${i}]`, type);
  2341. }
  2342. else {
  2343. // register a regular uniform
  2344. if(!uniforms.has(name) || uniforms.get(name) === type)
  2345. uniforms.set(name, type);
  2346. else
  2347. throw new errors/* IllegalOperationError */.js(`Redefinition of uniform "${name}" in the shader`);
  2348. }
  2349. }
  2350. }
  2351. return uniforms;
  2352. }
  2353. }
  2354. /**
  2355. * Import a ShaderDeclaration from a GLSL file
  2356. * @param {string} filepath relative to the shaders/ folder (a .glsl file)
  2357. * @param {string|null} [vsfilepath] optional vertex shader (a .vs.glsl file)
  2358. * @returns {ShaderDeclaration}
  2359. */
  2360. function importShader(filepath, vsfilepath = null)
  2361. {
  2362. return ShaderDeclaration.import(filepath, vsfilepath);
  2363. }
  2364. /**
  2365. * Create a ShaderDeclaration from a GLSL source code
  2366. * @param {string} source fragment shader
  2367. * @param {string|null} [vssource] optional vertex shader
  2368. * @returns {ShaderDeclaration}
  2369. */
  2370. function createShader(source, vssource = null)
  2371. {
  2372. return ShaderDeclaration.create(source, vssource);
  2373. }
  2374. /***/ }),
  2375. /***/ 6776:
  2376. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_87570__) => {
  2377. "use strict";
  2378. __nested_webpack_require_87570__.r(__webpack_exports__);
  2379. /* harmony export */ __nested_webpack_require_87570__.d(__webpack_exports__, {
  2380. /* harmony export */ "conv2D": () => (/* binding */ conv2D),
  2381. /* harmony export */ "convX": () => (/* binding */ convX),
  2382. /* harmony export */ "convY": () => (/* binding */ convY)
  2383. /* harmony export */ });
  2384. /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_87570__(9759);
  2385. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_87570__(5484);
  2386. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_87570__(3841);
  2387. /*
  2388. * speedy-vision.js
  2389. * GPU-accelerated Computer Vision for JavaScript
  2390. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  2391. *
  2392. * Licensed under the Apache License, Version 2.0 (the "License");
  2393. * you may not use this file except in compliance with the License.
  2394. * You may obtain a copy of the License at
  2395. *
  2396. * http://www.apache.org/licenses/LICENSE-2.0
  2397. *
  2398. * Unless required by applicable law or agreed to in writing, software
  2399. * distributed under the License is distributed on an "AS IS" BASIS,
  2400. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2401. * See the License for the specific language governing permissions and
  2402. * limitations under the License.
  2403. *
  2404. * convolution.js
  2405. * Convolution shader generators
  2406. */
  2407. /**
  2408. * Generate a 2D convolution with a square kernel
  2409. * @param {number[]} kernel convolution kernel
  2410. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2411. */
  2412. function conv2D(kernel, normalizationConstant = 1.0)
  2413. {
  2414. const kernel32 = new Float32Array(kernel.map(x => (+x) * (+normalizationConstant)));
  2415. const kSize = Math.sqrt(kernel32.length) | 0;
  2416. const N = kSize >> 1; // idiv 2
  2417. // validate input
  2418. if(kSize < 1 || kSize % 2 == 0)
  2419. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .mG(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);
  2420. else if(kSize * kSize != kernel32.length)
  2421. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .mG(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
  2422. // select the appropriate pixel function
  2423. const pixelAtOffset = (N <= 7) ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2424. // code generator
  2425. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.cartesian */ .c.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.symmetricRange */ .c.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.symmetricRange */ .c.symmetricRange(N)).map(
  2426. cur => fn(
  2427. kernel32[(cur[0] + N) * kSize + (cur[1] + N)],
  2428. cur[0], cur[1]
  2429. )
  2430. ).join('\n');
  2431. const generateCode = (k, dy, dx) => `
  2432. result += ${pixelAtOffset}(image, ivec2(${(-dx) | 0}, ${(-dy) | 0})) * float(${+k});
  2433. `;
  2434. // shader
  2435. const source = `
  2436. uniform sampler2D image;
  2437. void main()
  2438. {
  2439. float alpha = threadPixel(image).a;
  2440. vec4 result = vec4(0.0f);
  2441. ${foreachKernelElement(generateCode)}
  2442. color = vec4(result.rgb, alpha);
  2443. }
  2444. `;
  2445. // done!
  2446. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .ef)(source).withArguments('image');
  2447. }
  2448. /**
  2449. * Generate a 1D convolution function on the x-axis
  2450. * @param {number[]} kernel convolution kernel
  2451. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2452. */
  2453. function convX(kernel, normalizationConstant = 1.0)
  2454. {
  2455. return conv1D('x', kernel, normalizationConstant);
  2456. }
  2457. /**
  2458. * Generate a 1D convolution function on the y-axis
  2459. * @param {number[]} kernel convolution kernel
  2460. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2461. */
  2462. function convY(kernel, normalizationConstant = 1.0)
  2463. {
  2464. return conv1D('y', kernel, normalizationConstant);
  2465. }
  2466. /**
  2467. * 1D convolution function generator
  2468. * @param {string} axis either "x" or "y"
  2469. * @param {number[]} kernel convolution kernel
  2470. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2471. */
  2472. function conv1D(axis, kernel, normalizationConstant = 1.0)
  2473. {
  2474. const kernel32 = new Float32Array(kernel.map(x => (+x) * (+normalizationConstant)));
  2475. const kSize = kernel32.length;
  2476. const N = kSize >> 1; // idiv 2
  2477. // validate input
  2478. if(kSize < 1 || kSize % 2 == 0)
  2479. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .mG(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);
  2480. else if(axis != 'x' && axis != 'y')
  2481. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .mG(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
  2482. // select the appropriate pixel function
  2483. const pixelAtOffset = (N <= 7) ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2484. // code generator
  2485. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils.symmetricRange */ .c.symmetricRange(N).reduce(
  2486. (acc, cur) => acc + fn(kernel32[cur + N], cur),
  2487. '');
  2488. const generateCode = (k, i) => ((axis == 'x') ? `
  2489. pixel += ${pixelAtOffset}(image, ivec2(${(-i) | 0}, 0)) * float(${+k});
  2490. ` : `
  2491. pixel += ${pixelAtOffset}(image, ivec2(0, ${(-i) | 0})) * float(${+k});
  2492. `);
  2493. // shader
  2494. const source = `
  2495. uniform sampler2D image;
  2496. void main()
  2497. {
  2498. float alpha = threadPixel(image).a;
  2499. vec4 pixel = vec4(0.0f);
  2500. ${foreachKernelElement(generateCode)}
  2501. color = vec4(pixel.rgb, alpha);
  2502. }
  2503. `;
  2504. // done!
  2505. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .ef)(source).withArguments('image');
  2506. }
  2507. /***/ }),
  2508. /***/ 524:
  2509. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_93522__) => {
  2510. var map = {
  2511. "./colors.glsl": 2545,
  2512. "./filters.glsl": 7373,
  2513. "./fixed-point.glsl": 2229,
  2514. "./float16.glsl": 919,
  2515. "./global.glsl": 3815,
  2516. "./int32.glsl": 1830,
  2517. "./keypoint-descriptors.glsl": 1364,
  2518. "./keypoint-matches.glsl": 4004,
  2519. "./keypoints.glsl": 8714,
  2520. "./math.glsl": 9010,
  2521. "./pyramids.glsl": 6433,
  2522. "./subpixel.glsl": 4697
  2523. };
  2524. function webpackContext(req) {
  2525. var id = webpackContextResolve(req);
  2526. return __nested_webpack_require_93522__(id);
  2527. }
  2528. function webpackContextResolve(req) {
  2529. if(!__nested_webpack_require_93522__.o(map, req)) {
  2530. var e = new Error("Cannot find module '" + req + "'");
  2531. e.code = 'MODULE_NOT_FOUND';
  2532. throw e;
  2533. }
  2534. return map[req];
  2535. }
  2536. webpackContext.keys = function webpackContextKeys() {
  2537. return Object.keys(map);
  2538. };
  2539. webpackContext.resolve = webpackContextResolve;
  2540. module.exports = webpackContext;
  2541. webpackContext.id = 524;
  2542. /***/ }),
  2543. /***/ 2863:
  2544. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_94446__) => {
  2545. var map = {
  2546. "./filters/convolution": 6776,
  2547. "./filters/convolution.js": 6776,
  2548. "./filters/convolution1d.glsl": 4645,
  2549. "./filters/convolution2d.glsl": 6942,
  2550. "./filters/fast-median.glsl": 7054,
  2551. "./filters/nightvision.glsl": 8961,
  2552. "./filters/normalize-image.glsl": 9571,
  2553. "./filters/rgb2grey.glsl": 8466,
  2554. "./include/colors.glsl": 2545,
  2555. "./include/filters.glsl": 7373,
  2556. "./include/fixed-point.glsl": 2229,
  2557. "./include/float16.glsl": 919,
  2558. "./include/global.glsl": 3815,
  2559. "./include/int32.glsl": 1830,
  2560. "./include/keypoint-descriptors.glsl": 1364,
  2561. "./include/keypoint-matches.glsl": 4004,
  2562. "./include/keypoints.glsl": 8714,
  2563. "./include/math.glsl": 9010,
  2564. "./include/pyramids.glsl": 6433,
  2565. "./include/subpixel.glsl": 4697,
  2566. "./keypoints/allocate-descriptors.glsl": 2289,
  2567. "./keypoints/allocate-extra.glsl": 5725,
  2568. "./keypoints/apply-homography.glsl": 3801,
  2569. "./keypoints/bf-knn.glsl": 2346,
  2570. "./keypoints/clip-border.glsl": 4180,
  2571. "./keypoints/clip.glsl": 7771,
  2572. "./keypoints/distance-filter.glsl": 8938,
  2573. "./keypoints/encode-keypoint-long-offsets.glsl": 4802,
  2574. "./keypoints/encode-keypoint-offsets.glsl": 6253,
  2575. "./keypoints/encode-keypoint-positions.glsl": 384,
  2576. "./keypoints/encode-keypoint-properties.glsl": 500,
  2577. "./keypoints/encode-keypoints.glsl": 3673,
  2578. "./keypoints/encode-null-keypoints.glsl": 1703,
  2579. "./keypoints/fast.glsl": 2633,
  2580. "./keypoints/fast.vs.glsl": 535,
  2581. "./keypoints/hamming-distance-filter.glsl": 3232,
  2582. "./keypoints/harris-cutoff.glsl": 8356,
  2583. "./keypoints/harris.glsl": 7339,
  2584. "./keypoints/knn-init.glsl": 3177,
  2585. "./keypoints/knn-transfer.glsl": 2769,
  2586. "./keypoints/laplacian.glsl": 2006,
  2587. "./keypoints/lk.glsl": 3329,
  2588. "./keypoints/lookup-of-locations.glsl": 4251,
  2589. "./keypoints/lookup-of-locations.vs.glsl": 4747,
  2590. "./keypoints/lsh-knn.glsl": 7421,
  2591. "./keypoints/mix-keypoints.glsl": 4523,
  2592. "./keypoints/nonmax-scale.glsl": 2277,
  2593. "./keypoints/nonmax-space.glsl": 8430,
  2594. "./keypoints/nonmax-suppression.glsl": 9743,
  2595. "./keypoints/orb-descriptor.glsl": 3464,
  2596. "./keypoints/orb-orientation.glsl": 7184,
  2597. "./keypoints/refine-scale.glsl": 7220,
  2598. "./keypoints/score-findmax.glsl": 805,
  2599. "./keypoints/shuffle.glsl": 8736,
  2600. "./keypoints/sort-keypoints.glsl": 9311,
  2601. "./keypoints/subpixel-refinement.glsl": 9423,
  2602. "./keypoints/transfer-flow.glsl": 2060,
  2603. "./keypoints/transfer-orientation.glsl": 5463,
  2604. "./keypoints/transfer-to-extra.glsl": 6986,
  2605. "./keypoints/upload-keypoints.glsl": 3179,
  2606. "./pyramids/downsample2.glsl": 8680,
  2607. "./pyramids/upsample2.glsl": 3384,
  2608. "./transforms/additive-mix.glsl": 1976,
  2609. "./transforms/resize.glsl": 4543,
  2610. "./transforms/warp-perspective.glsl": 6296,
  2611. "./utils/copy-components.glsl": 747,
  2612. "./utils/copy-raster.glsl": 9176,
  2613. "./utils/copy.glsl": 8960,
  2614. "./utils/fill-components.glsl": 3294,
  2615. "./utils/fill.glsl": 1959,
  2616. "./utils/flip-y.vs.glsl": 7290,
  2617. "./utils/scan-minmax2d.glsl": 7270,
  2618. "./utils/sobel-derivatives.glsl": 48,
  2619. "./utils/sobel-derivatives.vs.glsl": 3713
  2620. };
  2621. function webpackContext(req) {
  2622. var id = webpackContextResolve(req);
  2623. return __nested_webpack_require_94446__(id);
  2624. }
  2625. function webpackContextResolve(req) {
  2626. if(!__nested_webpack_require_94446__.o(map, req)) {
  2627. var e = new Error("Cannot find module '" + req + "'");
  2628. e.code = 'MODULE_NOT_FOUND';
  2629. throw e;
  2630. }
  2631. return map[req];
  2632. }
  2633. webpackContext.keys = function webpackContextKeys() {
  2634. return Object.keys(map);
  2635. };
  2636. webpackContext.resolve = webpackContextResolve;
  2637. module.exports = webpackContext;
  2638. webpackContext.id = 2863;
  2639. /***/ }),
  2640. /***/ 7905:
  2641. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_97977__) => {
  2642. "use strict";
  2643. /* harmony export */ __nested_webpack_require_97977__.d(__webpack_exports__, {
  2644. /* harmony export */ "$": () => (/* binding */ SpeedyGL)
  2645. /* harmony export */ });
  2646. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_97977__(5484);
  2647. /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_97977__(9845);
  2648. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_97977__(4500);
  2649. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_97977__(3841);
  2650. /*
  2651. * speedy-vision.js
  2652. * GPU-accelerated Computer Vision for JavaScript
  2653. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  2654. *
  2655. * Licensed under the Apache License, Version 2.0 (the "License");
  2656. * you may not use this file except in compliance with the License.
  2657. * You may obtain a copy of the License at
  2658. *
  2659. * http://www.apache.org/licenses/LICENSE-2.0
  2660. *
  2661. * Unless required by applicable law or agreed to in writing, software
  2662. * distributed under the License is distributed on an "AS IS" BASIS,
  2663. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2664. * See the License for the specific language governing permissions and
  2665. * limitations under the License.
  2666. *
  2667. * speedy-gl.js
  2668. * A wrapper around the WebGL Rendering Context
  2669. */
  2670. /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
  2671. // Constants
  2672. const SINGLETON_KEY = Symbol();
  2673. const DEFAULT_POWER_PREFERENCE = 'default';
  2674. //
  2675. // We use a small canvas to improve the performance
  2676. // of createImageBitmap() on Firefox.
  2677. //
  2678. // A large canvas (2048x2048) causes a FPS drop, even
  2679. // if we only extract a small region of it (this is
  2680. // unlike Chrome, which is fast).
  2681. //
  2682. // Note: we automatically increase the size of the
  2683. // canvas (as needed) when rendering to it.
  2684. //
  2685. const CANVAS_WIDTH = 16, CANVAS_HEIGHT = 16;
  2686. /** @type {SpeedyGL} Singleton */
  2687. let instance = null;
  2688. /** @type {PowerPreference} power preference */
  2689. let powerPreference = DEFAULT_POWER_PREFERENCE;
  2690. /**
  2691. * A wrapper around the WebGL Rendering Context
  2692. */
  2693. class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_3__/* .Observable */ .y
  2694. {
  2695. /**
  2696. * Constructor
  2697. * @param {Symbol} key
  2698. * @private
  2699. */
  2700. constructor(key)
  2701. {
  2702. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils.assert */ .c.assert(key === SINGLETON_KEY);
  2703. super();
  2704. /** @type {boolean} internal flag */
  2705. this._reinitializeOnContextLoss = true;
  2706. /** @type {HTMLCanvasElement} canvas */
  2707. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2708. /** @type {WebGL2RenderingContext} WebGL rendering context */
  2709. this._gl = null;
  2710. // create WebGL2 rendering context
  2711. this._gl = this._createContext(this._canvas);
  2712. }
  2713. /**
  2714. * Get Singleton
  2715. * @returns {SpeedyGL}
  2716. */
  2717. static get instance()
  2718. {
  2719. return instance || (instance = new SpeedyGL(SINGLETON_KEY));
  2720. }
  2721. /**
  2722. * The WebGL Rendering Context
  2723. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  2724. * @returns {WebGL2RenderingContext}
  2725. */
  2726. get gl()
  2727. {
  2728. return this._gl;
  2729. }
  2730. /**
  2731. * The canvas
  2732. * @returns {HTMLCanvasElement}
  2733. */
  2734. get canvas()
  2735. {
  2736. return this._canvas;
  2737. }
  2738. /**
  2739. * Create a WebGL-capable canvas
  2740. * @param {Function} reinitialize to be called if we get a WebGL context loss event
  2741. * @returns {HTMLCanvasElement}
  2742. */
  2743. _createCanvas(reinitialize)
  2744. {
  2745. const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils.createCanvas */ .c.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
  2746. canvas.addEventListener('webglcontextlost', ev => {
  2747. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils.warning */ .c.warning(`Lost WebGL2 context`);
  2748. setTimeout(reinitialize, 0);
  2749. ev.preventDefault();
  2750. }, false);
  2751. /*canvas.addEventListener('webglcontextrestored', ev => {
  2752. Utils.warning(`Restored WebGL2 context`);
  2753. ev.preventDefault();
  2754. }, false);*/
  2755. return canvas;
  2756. }
  2757. /**
  2758. * Create a WebGL2 Rendering Context
  2759. * @param {HTMLCanvasElement} canvas
  2760. * @returns {WebGL2RenderingContext}
  2761. */
  2762. _createContext(canvas)
  2763. {
  2764. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils.log */ .c.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
  2765. // does the browser support WebGL2?
  2766. if(typeof WebGL2RenderingContext === 'undefined')
  2767. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .NotSupportedError */ .B8(`This application requires WebGL2. Please use a different browser.`);
  2768. const gl = canvas.getContext('webgl2', {
  2769. premultipliedAlpha: false,
  2770. preserveDrawingBuffer: false,
  2771. powerPreference: powerPreference,
  2772. alpha: true, // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
  2773. antialias: false,
  2774. depth: false,
  2775. stencil: false,
  2776. desynchronized: true,
  2777. });
  2778. if(!gl)
  2779. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .NotSupportedError */ .B8(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
  2780. return gl;
  2781. }
  2782. /**
  2783. * Reinitialize WebGL
  2784. */
  2785. _reinitialize()
  2786. {
  2787. // disable reinitialization?
  2788. if(!this._reinitializeOnContextLoss)
  2789. return;
  2790. // warning
  2791. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils.warning */ .c.warning(`Reinitializing WebGL2...`);
  2792. // create new canvas
  2793. this._canvas.remove();
  2794. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2795. // create new context
  2796. this._gl = this._createContext(this._canvas);
  2797. // notify observers: we have a new context!
  2798. // we need to recreate all textures...
  2799. this._notify();
  2800. }
  2801. /**
  2802. * Lose the WebGL context. This is used to manually
  2803. * free resources, and also for purposes of testing
  2804. * @returns {WEBGL_lose_context}
  2805. */
  2806. loseContext()
  2807. {
  2808. const gl = this._gl;
  2809. // nothing to do?
  2810. if(gl.isContextLost())
  2811. return;
  2812. // find the appropriate extension
  2813. const ext = gl.getExtension('WEBGL_lose_context');
  2814. if(!ext)
  2815. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .NotSupportedError */ .B8('WEBGL_lose_context extension is unavailable');
  2816. // disable reinitialization
  2817. this._reinitializeOnContextLoss = false;
  2818. // lose context
  2819. ext.loseContext();
  2820. // done!
  2821. return ext;
  2822. }
  2823. /**
  2824. * Lose & restore the WebGL context
  2825. * @param {number} [secondsToRestore]
  2826. * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
  2827. */
  2828. loseAndRestoreContext(secondsToRestore = 1)
  2829. {
  2830. const ms = Math.max(secondsToRestore, 0) * 1000;
  2831. const ext = this.loseContext();
  2832. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .s(resolve => {
  2833. setTimeout(() => {
  2834. //ext.restoreContext();
  2835. this._reinitializeOnContextLoss = true;
  2836. this._reinitialize();
  2837. setTimeout(() => resolve(ext), 0); // next frame
  2838. }, ms);
  2839. });
  2840. }
  2841. /**
  2842. * Power preference for the WebGL context
  2843. * @returns {PowerPreference}
  2844. */
  2845. static get powerPreference()
  2846. {
  2847. return powerPreference;
  2848. }
  2849. /**
  2850. * Power preference for the WebGL context
  2851. * @param {PowerPreference} value
  2852. */
  2853. static set powerPreference(value)
  2854. {
  2855. // validate
  2856. if(!(value === 'default' || value === 'low-power' || value === 'high-performance'))
  2857. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .mG(`Invalid powerPreference: "${value}"`);
  2858. // the power preference should be set before we create the WebGL context
  2859. if(instance == null || powerPreference !== value) {
  2860. powerPreference = value;
  2861. // recreate the context if it already exists. Experimental.
  2862. if(instance != null)
  2863. instance.loseAndRestoreContext();
  2864. }
  2865. }
  2866. }
  2867. /***/ }),
  2868. /***/ 3841:
  2869. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_106533__) => {
  2870. "use strict";
  2871. /* harmony export */ __nested_webpack_require_106533__.d(__webpack_exports__, {
  2872. /* harmony export */ "nU": () => (/* binding */ SpeedyError),
  2873. /* harmony export */ "B8": () => (/* binding */ NotSupportedError),
  2874. /* harmony export */ "Ql": () => (/* binding */ GLError),
  2875. /* harmony export */ "Mi": () => (/* binding */ AbstractMethodError),
  2876. /* harmony export */ "mG": () => (/* binding */ IllegalArgumentError),
  2877. /* harmony export */ "js": () => (/* binding */ IllegalOperationError),
  2878. /* harmony export */ "Cx": () => (/* binding */ OutOfMemoryError),
  2879. /* harmony export */ "Xg": () => (/* binding */ FileNotFoundError),
  2880. /* harmony export */ "tg": () => (/* binding */ ResourceNotLoadedError),
  2881. /* harmony export */ "W5": () => (/* binding */ TimeoutError),
  2882. /* harmony export */ "D3": () => (/* binding */ ParseError),
  2883. /* harmony export */ "ps": () => (/* binding */ AssertionError),
  2884. /* harmony export */ "$y": () => (/* binding */ AccessDeniedError),
  2885. /* harmony export */ "IT": () => (/* binding */ WebAssemblyError)
  2886. /* harmony export */ });
  2887. /* unused harmony export NotImplementedError */
  2888. /*
  2889. * speedy-vision.js
  2890. * GPU-accelerated Computer Vision for JavaScript
  2891. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  2892. *
  2893. * Licensed under the Apache License, Version 2.0 (the "License");
  2894. * you may not use this file except in compliance with the License.
  2895. * You may obtain a copy of the License at
  2896. *
  2897. * http://www.apache.org/licenses/LICENSE-2.0
  2898. *
  2899. * Unless required by applicable law or agreed to in writing, software
  2900. * distributed under the License is distributed on an "AS IS" BASIS,
  2901. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2902. * See the License for the specific language governing permissions and
  2903. * limitations under the License.
  2904. *
  2905. * errors.js
  2906. * Error classes
  2907. */
  2908. /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
  2909. /**
  2910. * Generic error class for Speedy
  2911. */
  2912. class SpeedyError extends Error
  2913. {
  2914. /**
  2915. * Class constructor
  2916. * @param {string} message message text
  2917. * @param {SpeedyErrorCause} [cause] cause of the error
  2918. */
  2919. constructor(message, cause = null)
  2920. {
  2921. super([
  2922. message,
  2923. cause ? cause.toString() : '[speedy-vision.js]'
  2924. ].join('\n-> '));
  2925. /** @type {SpeedyErrorCause} cause of the error */
  2926. this._cause = cause;
  2927. }
  2928. /**
  2929. * Error name
  2930. * @returns {string}
  2931. */
  2932. get name()
  2933. {
  2934. return this.constructor.name;
  2935. }
  2936. /**
  2937. * Set error name (ignored)
  2938. * @param {string} _ ignored
  2939. */
  2940. set name(_)
  2941. {
  2942. void(0);
  2943. }
  2944. /**
  2945. * Get the cause of the error. Available if
  2946. * it has been specified in the constructor
  2947. * @returns {SpeedyErrorCause}
  2948. */
  2949. get cause()
  2950. {
  2951. return this._cause;
  2952. }
  2953. }
  2954. /**
  2955. * Unsupported operation error
  2956. * The requested operation is not supported
  2957. */
  2958. class NotSupportedError extends SpeedyError
  2959. {
  2960. /**
  2961. * Class constructor
  2962. * @param {string} [message] additional text
  2963. * @param {SpeedyErrorCause} [cause] cause of the error
  2964. */
  2965. constructor(message = '', cause = null)
  2966. {
  2967. super(`Unsupported operation. ${message}`, cause);
  2968. }
  2969. }
  2970. /**
  2971. * Not implemented error
  2972. * The called method is not implemented
  2973. */
  2974. class NotImplementedError extends (/* unused pure expression or super */ null && (0))
  2975. {
  2976. /**
  2977. * Class constructor
  2978. * @param {string} [message] additional text
  2979. * @param {SpeedyErrorCause} [cause] cause of the error
  2980. */
  2981. constructor(message = '', cause = null)
  2982. {
  2983. super(`Method not implemented. ${message}`, cause);
  2984. }
  2985. }
  2986. /**
  2987. * WebGL error
  2988. */
  2989. class GLError extends SpeedyError
  2990. {
  2991. /**
  2992. * Class constructor
  2993. * @param {string} [message] additional text
  2994. * @param {SpeedyErrorCause} [cause] cause of the error
  2995. */
  2996. constructor(message = '', cause = null)
  2997. {
  2998. super(`WebGL error. ${message}`, cause);
  2999. }
  3000. /**
  3001. * Get an error object describing the latest WebGL error
  3002. * @param {WebGL2RenderingContext} gl
  3003. * @returns {GLError}
  3004. */
  3005. static from(gl)
  3006. {
  3007. const recognizedErrors = [
  3008. 'NO_ERROR',
  3009. 'INVALID_ENUM',
  3010. 'INVALID_VALUE',
  3011. 'INVALID_OPERATION',
  3012. 'INVALID_FRAMEBUFFER_OPERATION',
  3013. 'OUT_OF_MEMORY',
  3014. 'CONTEXT_LOST_WEBGL',
  3015. ];
  3016. const glError = gl.getError();
  3017. const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
  3018. return new GLError(message);
  3019. }
  3020. }
  3021. /**
  3022. * AbstractMethodError
  3023. * Thrown when one tries to call an abstract method
  3024. */
  3025. class AbstractMethodError extends SpeedyError
  3026. {
  3027. /**
  3028. * Class constructor
  3029. * @param {string} [message] additional text
  3030. * @param {SpeedyErrorCause} [cause] cause of the error
  3031. */
  3032. constructor(message = '', cause = null)
  3033. {
  3034. super(`Can't call abstract method. ${message}`, cause);
  3035. }
  3036. }
  3037. /**
  3038. * Illegal argument error
  3039. * A method has received one or more illegal arguments
  3040. */
  3041. class IllegalArgumentError extends SpeedyError
  3042. {
  3043. /**
  3044. * Class constructor
  3045. * @param {string} [message] additional text
  3046. * @param {SpeedyErrorCause} [cause] cause of the error
  3047. */
  3048. constructor(message = '', cause = null)
  3049. {
  3050. super(`Illegal argument. ${message}`, cause);
  3051. }
  3052. }
  3053. /**
  3054. * Illegal operation error
  3055. * The method arguments are valid, but the method can't
  3056. * be called due to the current the state of the object
  3057. */
  3058. class IllegalOperationError extends SpeedyError
  3059. {
  3060. /**
  3061. * Class constructor
  3062. * @param {string} [message] additional text
  3063. * @param {SpeedyErrorCause} [cause] cause of the error
  3064. */
  3065. constructor(message = '', cause = null)
  3066. {
  3067. super(`Illegal operation. ${message}`, cause);
  3068. }
  3069. }
  3070. /**
  3071. * Out of memory
  3072. */
  3073. class OutOfMemoryError extends SpeedyError
  3074. {
  3075. /**
  3076. * Class constructor
  3077. * @param {string} [message] additional text
  3078. * @param {SpeedyErrorCause} [cause] cause of the error
  3079. */
  3080. constructor(message = '', cause = null)
  3081. {
  3082. super(`Out of memory. ${message}`, cause);
  3083. }
  3084. }
  3085. /**
  3086. * File not found error
  3087. */
  3088. class FileNotFoundError extends SpeedyError
  3089. {
  3090. /**
  3091. * Class constructor
  3092. * @param {string} [message] additional text
  3093. * @param {SpeedyErrorCause} [cause] cause of the error
  3094. */
  3095. constructor(message = '', cause = null)
  3096. {
  3097. super(`File not found. ${message}`, cause);
  3098. }
  3099. }
  3100. /**
  3101. * Resource not loaded error
  3102. */
  3103. class ResourceNotLoadedError extends SpeedyError
  3104. {
  3105. /**
  3106. * Class constructor
  3107. * @param {string} [message] additional text
  3108. * @param {SpeedyErrorCause} [cause] cause of the error
  3109. */
  3110. constructor(message = '', cause = null)
  3111. {
  3112. super(`Resource not loaded. ${message}`, cause);
  3113. }
  3114. }
  3115. /**
  3116. * Timeout error
  3117. */
  3118. class TimeoutError extends SpeedyError
  3119. {
  3120. /**
  3121. * Class constructor
  3122. * @param {string} [message] additional text
  3123. * @param {SpeedyErrorCause} [cause] cause of the error
  3124. */
  3125. constructor(message = '', cause = null)
  3126. {
  3127. super(`Timeout error. ${message}`, cause);
  3128. }
  3129. }
  3130. /**
  3131. * Parse error
  3132. */
  3133. class ParseError extends SpeedyError
  3134. {
  3135. /**
  3136. * Class constructor
  3137. * @param {string} [message] additional text
  3138. * @param {SpeedyErrorCause} [cause] cause of the error
  3139. */
  3140. constructor(message = '', cause = null)
  3141. {
  3142. super(`Parse error. ${message}`, cause);
  3143. }
  3144. }
  3145. /**
  3146. * Assertion error
  3147. */
  3148. class AssertionError extends SpeedyError
  3149. {
  3150. /**
  3151. * Class constructor
  3152. * @param {string} [message] additional text
  3153. * @param {SpeedyErrorCause} [cause] cause of the error
  3154. */
  3155. constructor(message = '', cause = null)
  3156. {
  3157. super(`Assertion failed. ${message}`, cause);
  3158. }
  3159. }
  3160. /**
  3161. * Access denied
  3162. */
  3163. class AccessDeniedError extends SpeedyError
  3164. {
  3165. /**
  3166. * Class constructor
  3167. * @param {string} [message] additional text
  3168. * @param {SpeedyErrorCause} [cause] cause of the error
  3169. */
  3170. constructor(message = '', cause = null)
  3171. {
  3172. super(`Access denied. ${message}`, cause);
  3173. }
  3174. }
  3175. /**
  3176. * WebAssembly error
  3177. */
  3178. class WebAssemblyError extends SpeedyError
  3179. {
  3180. /**
  3181. * Class constructor
  3182. * @param {string} [message] additional text
  3183. * @param {SpeedyErrorCause} [cause] cause of the error
  3184. */
  3185. constructor(message = '', cause = null)
  3186. {
  3187. super(`WebAssembly error. ${message}`, cause);
  3188. }
  3189. }
  3190. /***/ }),
  3191. /***/ 3020:
  3192. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_115191__) => {
  3193. "use strict";
  3194. __nested_webpack_require_115191__.r(__webpack_exports__);
  3195. /* harmony export */ __nested_webpack_require_115191__.d(__webpack_exports__, {
  3196. /* harmony export */ "PYRAMID_MAX_LEVELS": () => (/* binding */ PYRAMID_MAX_LEVELS),
  3197. /* harmony export */ "LOG2_PYRAMID_MAX_SCALE": () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
  3198. /* harmony export */ "PYRAMID_MAX_SCALE": () => (/* binding */ PYRAMID_MAX_SCALE),
  3199. /* harmony export */ "FIX_BITS": () => (/* binding */ FIX_BITS),
  3200. /* harmony export */ "FIX_RESOLUTION": () => (/* binding */ FIX_RESOLUTION),
  3201. /* harmony export */ "MAX_TEXTURE_LENGTH": () => (/* binding */ MAX_TEXTURE_LENGTH),
  3202. /* harmony export */ "MIN_KEYPOINT_SIZE": () => (/* binding */ MIN_KEYPOINT_SIZE),
  3203. /* harmony export */ "MIN_ENCODER_LENGTH": () => (/* binding */ MIN_ENCODER_LENGTH),
  3204. /* harmony export */ "MAX_ENCODER_CAPACITY": () => (/* binding */ MAX_ENCODER_CAPACITY),
  3205. /* harmony export */ "DEFAULT_ENCODER_CAPACITY": () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
  3206. /* harmony export */ "LOG2_MAX_DESCRIPTOR_SIZE": () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
  3207. /* harmony export */ "MAX_DESCRIPTOR_SIZE": () => (/* binding */ MAX_DESCRIPTOR_SIZE),
  3208. /* harmony export */ "MATCH_INDEX_BITS": () => (/* binding */ MATCH_INDEX_BITS),
  3209. /* harmony export */ "MATCH_INDEX_MASK": () => (/* binding */ MATCH_INDEX_MASK),
  3210. /* harmony export */ "MATCH_MAX_INDEX": () => (/* binding */ MATCH_MAX_INDEX),
  3211. /* harmony export */ "MATCH_MAX_DISTANCE": () => (/* binding */ MATCH_MAX_DISTANCE),
  3212. /* harmony export */ "LITTLE_ENDIAN": () => (/* binding */ LITTLE_ENDIAN)
  3213. /* harmony export */ });
  3214. /*
  3215. * speedy-vision.js
  3216. * GPU-accelerated Computer Vision for JavaScript
  3217. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  3218. *
  3219. * Licensed under the Apache License, Version 2.0 (the "License");
  3220. * you may not use this file except in compliance with the License.
  3221. * You may obtain a copy of the License at
  3222. *
  3223. * http://www.apache.org/licenses/LICENSE-2.0
  3224. *
  3225. * Unless required by applicable law or agreed to in writing, software
  3226. * distributed under the License is distributed on an "AS IS" BASIS,
  3227. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3228. * See the License for the specific language governing permissions and
  3229. * limitations under the License.
  3230. *
  3231. * globals.js
  3232. * Global constants
  3233. */
  3234. // -----------------------------------------------------------------
  3235. // IMAGE PYRAMIDS & SCALE-SPACE
  3236. // -----------------------------------------------------------------
  3237. /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
  3238. const PYRAMID_MAX_LEVELS = 8;
  3239. /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
  3240. const LOG2_PYRAMID_MAX_SCALE = 0;
  3241. /** @type {number} The maximum supported scale for a pyramid level */
  3242. const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
  3243. // -----------------------------------------------------------------
  3244. // FIXED-POINT MATH
  3245. // -----------------------------------------------------------------
  3246. /** @type {number} How many bits do we use to store fractional data? */
  3247. const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
  3248. /** @type {number} Fixed-point resolution */
  3249. const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
  3250. // -----------------------------------------------------------------
  3251. // TEXTURE LIMITS
  3252. // -----------------------------------------------------------------
  3253. /** @type {number} Maximum texture length (width, height) */
  3254. const MAX_TEXTURE_LENGTH = (1 << (16 - FIX_BITS)) - 1; // must be 2^n - 1 due to keypoint encoding
  3255. // -----------------------------------------------------------------
  3256. // KEYPOINTS
  3257. // -----------------------------------------------------------------
  3258. /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
  3259. const MIN_KEYPOINT_SIZE = 8;
  3260. /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
  3261. const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
  3262. /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
  3263. const MAX_ENCODER_CAPACITY = 8192;
  3264. /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
  3265. const DEFAULT_ENCODER_CAPACITY = 2048;
  3266. /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
  3267. const LOG2_MAX_DESCRIPTOR_SIZE = 6;
  3268. /** @type {number} maximum size of a keypoint descriptor, in bytes */
  3269. const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
  3270. /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
  3271. const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
  3272. /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
  3273. const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
  3274. /** @type {number} Maximum size of the database of keypoints for matching */
  3275. const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
  3276. /** @type {number} The maximum distance that can be stored in a match */
  3277. const MATCH_MAX_DISTANCE = (1 << (32 - MATCH_INDEX_BITS)) - 1;
  3278. // -----------------------------------------------------------------
  3279. // MISC
  3280. // -----------------------------------------------------------------
  3281. /** @type {boolean} Are we in a little-endian machine? */
  3282. const LITTLE_ENDIAN = (function() {
  3283. return 0xCAFE === (new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer))[0];
  3284. })();
  3285. /***/ }),
  3286. /***/ 9845:
  3287. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_120825__) => {
  3288. "use strict";
  3289. /* harmony export */ __nested_webpack_require_120825__.d(__webpack_exports__, {
  3290. /* harmony export */ "y": () => (/* binding */ Observable)
  3291. /* harmony export */ });
  3292. /*
  3293. * speedy-vision.js
  3294. * GPU-accelerated Computer Vision for JavaScript
  3295. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  3296. *
  3297. * Licensed under the Apache License, Version 2.0 (the "License");
  3298. * you may not use this file except in compliance with the License.
  3299. * You may obtain a copy of the License at
  3300. *
  3301. * http://www.apache.org/licenses/LICENSE-2.0
  3302. *
  3303. * Unless required by applicable law or agreed to in writing, software
  3304. * distributed under the License is distributed on an "AS IS" BASIS,
  3305. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3306. * See the License for the specific language governing permissions and
  3307. * limitations under the License.
  3308. *
  3309. * observable.js
  3310. * Observer design pattern
  3311. */
  3312. /**
  3313. * Implementation of the Observer design pattern
  3314. * @abstract
  3315. */
  3316. class Observable
  3317. {
  3318. /**
  3319. * Constructor
  3320. */
  3321. constructor()
  3322. {
  3323. /** @type {Function[]} subscribers / callbacks */
  3324. this._subscribers = [];
  3325. /** @type {object[]} "this" pointers */
  3326. this._thisptr = [];
  3327. /** @type {Array<any[]>} function arguments */
  3328. this._args = [];
  3329. }
  3330. /**
  3331. * Add subscriber
  3332. * @param {Function} fn callback
  3333. * @param {object} [thisptr] "this" pointer to be used when invoking the callback
  3334. * @param {...any} args arguments to be passed to the callback
  3335. */
  3336. subscribe(fn, thisptr, ...args)
  3337. {
  3338. this._subscribers.push(fn);
  3339. this._thisptr.push(thisptr);
  3340. this._args.push(args);
  3341. }
  3342. /**
  3343. * Remove subscriber
  3344. * @param {Function} fn previously added callback
  3345. * @param {object} [thisptr] "this" pointer
  3346. */
  3347. unsubscribe(fn, thisptr)
  3348. {
  3349. for(let j = this._subscribers.length - 1; j >= 0; j--) {
  3350. if(this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
  3351. this._subscribers.splice(j, 1);
  3352. this._thisptr.splice(j, 1);
  3353. this._args.splice(j, 1);
  3354. break;
  3355. }
  3356. }
  3357. }
  3358. /**
  3359. * Notify all subscribers about a state change
  3360. * @protected
  3361. */
  3362. _notify()
  3363. {
  3364. for(let i = 0; i < this._subscribers.length; i++)
  3365. this._subscribers[i].call(this._thisptr[i], ...(this._args[i]));
  3366. }
  3367. }
  3368. /***/ }),
  3369. /***/ 6731:
  3370. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_123384__) => {
  3371. "use strict";
  3372. /* harmony export */ __nested_webpack_require_123384__.d(__webpack_exports__, {
  3373. /* harmony export */ "DD": () => (/* binding */ MediaType),
  3374. /* harmony export */ "D3": () => (/* binding */ ImageFormat),
  3375. /* harmony export */ "hE": () => (/* binding */ PixelComponent),
  3376. /* harmony export */ "rY": () => (/* binding */ ColorComponentId)
  3377. /* harmony export */ });
  3378. /*
  3379. * speedy-vision.js
  3380. * GPU-accelerated Computer Vision for JavaScript
  3381. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  3382. *
  3383. * Licensed under the Apache License, Version 2.0 (the "License");
  3384. * you may not use this file except in compliance with the License.
  3385. * You may obtain a copy of the License at
  3386. *
  3387. * http://www.apache.org/licenses/LICENSE-2.0
  3388. *
  3389. * Unless required by applicable law or agreed to in writing, software
  3390. * distributed under the License is distributed on an "AS IS" BASIS,
  3391. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3392. * See the License for the specific language governing permissions and
  3393. * limitations under the License.
  3394. *
  3395. * types.js
  3396. * Types & formats
  3397. */
  3398. /**
  3399. * Media types
  3400. * @enum {Symbol}
  3401. */
  3402. const MediaType = Object.freeze({
  3403. Image: Symbol('Image'),
  3404. Video: Symbol('Video'),
  3405. Canvas: Symbol('Canvas'),
  3406. Bitmap: Symbol('Bitmap'),
  3407. });
  3408. /**
  3409. * Image formats
  3410. * @enum {Symbol}
  3411. */
  3412. const ImageFormat = Object.freeze({
  3413. RGBA: Symbol('RGBA'),
  3414. GREY: Symbol('GREY'),
  3415. });
  3416. /**
  3417. * Pixel component (bitwise flags)
  3418. * @typedef {number} PixelComponent
  3419. */
  3420. const PixelComponent = Object.freeze({
  3421. RED: 1,
  3422. GREEN: 2,
  3423. BLUE: 4,
  3424. ALPHA: 8,
  3425. ALL: 15 // = RED | GREEN | BLUE | ALPHA
  3426. });
  3427. /**
  3428. * Component ID utility
  3429. */
  3430. const ColorComponentId = Object.freeze({
  3431. [PixelComponent.RED]: 0,
  3432. [PixelComponent.GREEN]: 1,
  3433. [PixelComponent.BLUE]: 2,
  3434. [PixelComponent.ALPHA]: 3
  3435. });
  3436. /***/ }),
  3437. /***/ 5484:
  3438. /***/ ((__unused_webpack_module, __webpack_exports__, __nested_webpack_require_125347__) => {
  3439. "use strict";
  3440. /* harmony export */ __nested_webpack_require_125347__.d(__webpack_exports__, {
  3441. /* harmony export */ "c": () => (/* binding */ Utils)
  3442. /* harmony export */ });
  3443. /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_125347__(3841);
  3444. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_125347__(4500);
  3445. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_125347__(3135);
  3446. /*
  3447. * speedy-vision.js
  3448. * GPU-accelerated Computer Vision for JavaScript
  3449. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  3450. *
  3451. * Licensed under the Apache License, Version 2.0 (the "License");
  3452. * you may not use this file except in compliance with the License.
  3453. * You may obtain a copy of the License at
  3454. *
  3455. * http://www.apache.org/licenses/LICENSE-2.0
  3456. *
  3457. * Unless required by applicable law or agreed to in writing, software
  3458. * distributed under the License is distributed on an "AS IS" BASIS,
  3459. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3460. * See the License for the specific language governing permissions and
  3461. * limitations under the License.
  3462. *
  3463. * utils.js
  3464. * Generic utilities
  3465. */
  3466. /**
  3467. * Generic utilities
  3468. */
  3469. class Utils
  3470. {
  3471. /**
  3472. * Generates a warning
  3473. * @param {string} text message text
  3474. * @param {...string} args optional text
  3475. */
  3476. static warning(text, ...args)
  3477. {
  3478. //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
  3479. if(_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings.logging */ .Z.logging !== 'none')
  3480. console.warn('[speedy-vision] ' + text, ...args);
  3481. }
  3482. /**
  3483. * Logs a message
  3484. * @param {string} text message text
  3485. * @param {...string} args optional text
  3486. */
  3487. static log(text, ...args)
  3488. {
  3489. if(_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings.logging */ .Z.logging !== 'none')
  3490. console.log('[speedy-vision] ' + text, ...args);
  3491. }
  3492. /**
  3493. * Assertion
  3494. * @param {boolean} expr expression
  3495. * @param {string} [text] error message
  3496. * @throws {AssertionError}
  3497. */
  3498. static assert(expr, text = '')
  3499. {
  3500. if(!expr)
  3501. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .ps(text);
  3502. }
  3503. /**
  3504. * Gets the names of the arguments of the specified function
  3505. * @param {Function} fun
  3506. * @returns {string[]}
  3507. */
  3508. static functionArguments(fun)
  3509. {
  3510. const code = fun.toString();
  3511. const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' :
  3512. (code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>');
  3513. const match = new RegExp(regex).exec(code);
  3514. if(match !== null) {
  3515. const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
  3516. return args.split(',').map(argname =>
  3517. argname.replace(/=.*$/, '').trim() // remove default params & trim
  3518. ).filter(argname =>
  3519. argname // handle trailing commas
  3520. );
  3521. }
  3522. else
  3523. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .D3(`Can't detect function arguments of ${code}`);
  3524. }
  3525. /**
  3526. * Get all property descriptors from an object,
  3527. * traversing its entire prototype chain
  3528. * @param {object} obj
  3529. * @returns {object}
  3530. */
  3531. static getAllPropertyDescriptors(obj)
  3532. {
  3533. if(obj) {
  3534. const proto = Object.getPrototypeOf(obj);
  3535. return {
  3536. ...(Utils.getAllPropertyDescriptors(proto)),
  3537. ...Object.getOwnPropertyDescriptors(obj)
  3538. };
  3539. }
  3540. else
  3541. return Object.create(null);
  3542. }
  3543. /**
  3544. * Creates a HTMLCanvasElement with the given dimensions
  3545. * @param {number} width in pixels
  3546. * @param {number} height in pixels
  3547. * @returns {HTMLCanvasElement}
  3548. */
  3549. static createCanvas(width, height)
  3550. {
  3551. const canvas = document.createElement('canvas');
  3552. canvas.width = width;
  3553. canvas.height = height;
  3554. return canvas;
  3555. }
  3556. /**
  3557. * Generates a random number with
  3558. * Gaussian distribution (mu, sigma)
  3559. * @param {number} mu mean
  3560. * @param {number} sigma standard deviation
  3561. * @returns {number} random number
  3562. */
  3563. static gaussianNoise(mu = 0, sigma = 1)
  3564. {
  3565. // Box-Muller transformation
  3566. const TWO_PI = 2.0 * Math.PI;
  3567. let a, b = Math.random();
  3568. do { a = Math.random(); } while(a <= Number.EPSILON);
  3569. let z = Math.sqrt(-2 * Math.log(a)) * Math.sin(TWO_PI * b);
  3570. return z * sigma + mu;
  3571. }
  3572. /**
  3573. * Generate a 1D gaussian kernel with custom sigma
  3574. * Tip: use kernelSize >= (5 * sigma), kernelSize odd
  3575. * @param {number} sigma gaussian sigma
  3576. * @param {number} [kernelSize] kernel size, odd number
  3577. * @param {boolean} [normalized] normalize entries so that their sum is 1
  3578. * @returns {number[]}
  3579. */
  3580. static gaussianKernel(sigma, kernelSize = 0, normalized = true)
  3581. {
  3582. /*
  3583. * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
  3584. *
  3585. * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
  3586. *
  3587. * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
  3588. *
  3589. * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
  3590. * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
  3591. *
  3592. * Setting a constant c := sqrt(2) * sigma, it follows that:
  3593. *
  3594. * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
  3595. */
  3596. // default kernel size
  3597. if(kernelSize == 0) {
  3598. kernelSize = Math.ceil(5.0 * sigma) | 0;
  3599. kernelSize += 1 - (kernelSize % 2);
  3600. }
  3601. // validate input
  3602. kernelSize |= 0;
  3603. if(kernelSize < 1 || kernelSize % 2 == 0)
  3604. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .mG(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);
  3605. else if(sigma <= 0.0)
  3606. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .mG(`Invalid sigma given to gaussianKernel: ${sigma}`);
  3607. // function erf(x) = -erf(-x) can be approximated numerically. See:
  3608. // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
  3609. const kernel = new Array(kernelSize);
  3610. // set constants
  3611. const N = kernelSize >> 1; // integer (floor, div 2)
  3612. const c = (+sigma) * 1.4142135623730951; // sigma * sqrt(2)
  3613. const m = 0.3275911;
  3614. const a1 = 0.254829592;
  3615. const a2 = -0.284496736;
  3616. const a3 = 1.421413741;
  3617. const a4 = -1.453152027;
  3618. const a5 = 1.061405429;
  3619. // compute the kernel
  3620. let sum = 0.0;
  3621. for(let j = 0; j < kernelSize; j++) {
  3622. let xa = (j - N + 0.5) / c;
  3623. let xb = (j - N - 0.5) / c;
  3624. let sa = 1.0, sb = 1.0;
  3625. if(xa < 0.0) { sa = -1.0; xa = -xa; }
  3626. if(xb < 0.0) { sb = -1.0; xb = -xb; }
  3627. const ta = 1.0 / (1.0 + m * xa);
  3628. const tb = 1.0 / (1.0 + m * xb);
  3629. const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
  3630. const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
  3631. const ya = 1.0 - pa * Math.exp(-xa * xa);
  3632. const yb = 1.0 - pb * Math.exp(-xb * xb);
  3633. const erfa = sa * ya;
  3634. const erfb = sb * yb;
  3635. const fp = (erfa - erfb) / (2.0 * c);
  3636. kernel[j] = fp;
  3637. sum += fp;
  3638. }
  3639. // normalize the kernel
  3640. if(normalized) {
  3641. for(let j = 0; j < kernelSize; j++)
  3642. kernel[j] /= sum;
  3643. }
  3644. // done!
  3645. return kernel;
  3646. }
  3647. /**
  3648. * Generate a 2D kernel in column-major format using two separable 1D kernels
  3649. * @param {number[]} ka 1D kernel
  3650. * @param {number[]} [kb]
  3651. * @returns {number[]}
  3652. */
  3653. static kernel2d(ka, kb = ka)
  3654. {
  3655. const ksize = ka.length;
  3656. Utils.assert(ka.length == ka.length);
  3657. Utils.assert(ksize >= 1 && ksize % 2 == 1);
  3658. // compute the outer product ka x kb
  3659. let kernel2d = new Array(ksize * ksize), k = 0;
  3660. for(let col = 0; col < ksize; col++) {
  3661. for(let row = 0; row < ksize; row++)
  3662. kernel2d[k++] = ka[row] * kb[col];
  3663. }
  3664. return kernel2d;
  3665. }
  3666. /**
  3667. * Cartesian product a x b: [ [ai, bj] for all i, j ]
  3668. * @param {number[]} a
  3669. * @param {number[]} b
  3670. * @returns {Array<[number,number]>}
  3671. */
  3672. static cartesian(a, b)
  3673. {
  3674. return [].concat(...a.map(a => b.map(b => [a, b])));
  3675. }
  3676. /**
  3677. * Symmetric range
  3678. * @param {number} n non-negative integer
  3679. * @returns {number[]} [ -n, ..., n ]
  3680. */
  3681. static symmetricRange(n)
  3682. {
  3683. if((n |= 0) < 0)
  3684. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .mG(`Expected a non-negative integer as input`);
  3685. return [...(Array(2*n + 1).keys())].map(x => x - n);
  3686. }
  3687. /**
  3688. * Compute the [0, n) range of integers
  3689. * @param {number} n positive integer
  3690. * @returns {number[]} [ 0, 1, ..., n-1 ]
  3691. */
  3692. static range(n)
  3693. {
  3694. if((n |= 0) <= 0)
  3695. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .mG(`Expected a positive integer as input`);
  3696. return [...(Array(n).keys())];
  3697. }
  3698. /**
  3699. * Shuffle in-place
  3700. * @template T
  3701. * @param {T[]} arr
  3702. * @returns {T[]} arr
  3703. */
  3704. static shuffle(arr)
  3705. {
  3706. const len = arr.length;
  3707. const m = len - 1;
  3708. // Fisher-Yattes
  3709. for(let i = 0; i < m; i++) {
  3710. const j = i + ((Math.random() * (len - i)) | 0); // i <= j < arr.length
  3711. if(i !== j) {
  3712. const t = arr[i];
  3713. arr[i] = arr[j];
  3714. arr[j] = t;
  3715. }
  3716. }
  3717. return arr;
  3718. }
  3719. /**
  3720. * Flatten an array (1 level only)
  3721. * @template U
  3722. * @param {U[]} array
  3723. * @returns {U[]}
  3724. */
  3725. static flatten(array)
  3726. {
  3727. //return array.flat();
  3728. //return array.reduce((arr, val) => arr.concat(val), []);
  3729. const flat = [];
  3730. for(let i = 0, n = array.length; i < n; i++) {
  3731. const entry = array[i];
  3732. if(Array.isArray(entry)) {
  3733. for(let j = 0, m = entry.length; j < m; j++)
  3734. flat.push(entry[j]);
  3735. }
  3736. else
  3737. flat.push(entry);
  3738. }
  3739. return flat;
  3740. }
  3741. /**
  3742. * Decode a 16-bit float from a
  3743. * unsigned 16-bit integer
  3744. * @param {number} uint16
  3745. * @returns {number}
  3746. */
  3747. static decodeFloat16(uint16)
  3748. {
  3749. // decode according to sec 2.1.2
  3750. // 16-Bit Floating Point Numbers
  3751. // of the OpenGL ES 3 spec
  3752. const s = (uint16 & 0xFFFF) >> 15; // sign bit
  3753. const e = (uint16 & 0x7FFF) >> 10; // exponent
  3754. const m = (uint16 & 0x3FF); // mantissa
  3755. const sign = 1 - 2 * s; // (-1)^s
  3756. if(e == 0)
  3757. return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
  3758. else if(e == 31)
  3759. return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
  3760. const f = e >= 15 ? (1 << (e-15)) : 1.0 / (1 << (15-e)); // 2^(e-15)
  3761. return sign * f * (1.0 + m * 0.0009765625); // normal
  3762. }
  3763. /**
  3764. * Wrapper around getUserMedia()
  3765. * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
  3766. * @returns {SpeedyPromise<HTMLVideoElement>}
  3767. */
  3768. static requestCameraStream(constraints = { audio: false, video: true })
  3769. {
  3770. Utils.log('Accessing the webcam...');
  3771. if(!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  3772. throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .B8('Unsupported browser: no mediaDevices.getUserMedia()');
  3773. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .s((resolve, reject) => {
  3774. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  3775. const video = document.createElement('video');
  3776. video.onloadedmetadata = () => {
  3777. video.play();
  3778. Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
  3779. resolve(video);
  3780. };
  3781. video.setAttribute('playsinline', '');
  3782. video.setAttribute('autoplay', '');
  3783. if(constraints.audio === false || constraints.audio === undefined)
  3784. video.setAttribute('muted', '');
  3785. video.srcObject = stream;
  3786. })
  3787. .catch(err => {
  3788. if(err.name === 'NotAllowedError') {
  3789. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .$y(
  3790. `Please give access to the camera and reload the page.`,
  3791. err
  3792. ));
  3793. }
  3794. else if(err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
  3795. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .B8(
  3796. `Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`,
  3797. err
  3798. ));
  3799. }
  3800. else {
  3801. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .nU(
  3802. `Can't access the webcam.`,
  3803. err
  3804. ));
  3805. }
  3806. });
  3807. });
  3808. }
  3809. /**
  3810. * Format binary data as a string with hex values
  3811. * @param {ArrayBuffer} bytes
  3812. * @returns {string}
  3813. */
  3814. static formatBinaryData(bytes)
  3815. {
  3816. const uint8 = new Uint8Array(bytes);
  3817. const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
  3818. return array.join(' ');
  3819. }
  3820. }
  3821. /***/ }),
  3822. /***/ 4645:
  3823. /***/ ((module) => {
  3824. module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3825. /***/ }),
  3826. /***/ 6942:
  3827. /***/ ((module) => {
  3828. module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#define Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3829. /***/ }),
  3830. /***/ 7054:
  3831. /***/ ((module) => {
  3832. module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
  3833. /***/ }),
  3834. /***/ 8961:
  3835. /***/ ((module) => {
  3836. module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
  3837. /***/ }),
  3838. /***/ 9571:
  3839. /***/ ((module) => {
  3840. module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
  3841. /***/ }),
  3842. /***/ 8466:
  3843. /***/ ((module) => {
  3844. module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
  3845. /***/ }),
  3846. /***/ 2545:
  3847. /***/ ((module) => {
  3848. module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
  3849. /***/ }),
  3850. /***/ 7373:
  3851. /***/ ((module) => {
  3852. module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
  3853. /***/ }),
  3854. /***/ 2229:
  3855. /***/ ((module) => {
  3856. module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
  3857. /***/ }),
  3858. /***/ 919:
  3859. /***/ ((module) => {
  3860. module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
  3861. /***/ }),
  3862. /***/ 3815:
  3863. /***/ ((module) => {
  3864. module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
  3865. /***/ }),
  3866. /***/ 1830:
  3867. /***/ ((module) => {
  3868. module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\n#if 1\nvec4 encodeUint32(uint value)\n{\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n}\n#else\nvec4 encodeUint32(uint value)\n{\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n}\n#endif\n#endif"
  3869. /***/ }),
  3870. /***/ 1364:
  3871. /***/ ((module) => {
  3872. module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
  3873. /***/ }),
  3874. /***/ 4004:
  3875. /***/ ((module) => {
  3876. module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
  3877. /***/ }),
  3878. /***/ 8714:
  3879. /***/ ((module) => {
  3880. module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
  3881. /***/ }),
  3882. /***/ 9010:
  3883. /***/ ((module) => {
  3884. module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
  3885. /***/ }),
  3886. /***/ 6433:
  3887. /***/ ((module) => {
  3888. module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
  3889. /***/ }),
  3890. /***/ 4697:
  3891. /***/ ((module) => {
  3892. module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
  3893. /***/ }),
  3894. /***/ 2289:
  3895. /***/ ((module) => {
  3896. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3897. /***/ }),
  3898. /***/ 5725:
  3899. /***/ ((module) => {
  3900. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3901. /***/ }),
  3902. /***/ 3801:
  3903. /***/ ((module) => {
  3904. module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
  3905. /***/ }),
  3906. /***/ 2346:
  3907. /***/ ((module) => {
  3908. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
  3909. /***/ }),
  3910. /***/ 4180:
  3911. /***/ ((module) => {
  3912. module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
  3913. /***/ }),
  3914. /***/ 7771:
  3915. /***/ ((module) => {
  3916. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
  3917. /***/ }),
  3918. /***/ 8938:
  3919. /***/ ((module) => {
  3920. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3921. /***/ }),
  3922. /***/ 4802:
  3923. /***/ ((module) => {
  3924. module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
  3925. /***/ }),
  3926. /***/ 6253:
  3927. /***/ ((module) => {
  3928. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
  3929. /***/ }),
  3930. /***/ 384:
  3931. /***/ ((module) => {
  3932. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
  3933. /***/ }),
  3934. /***/ 500:
  3935. /***/ ((module) => {
  3936. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
  3937. /***/ }),
  3938. /***/ 3673:
  3939. /***/ ((module) => {
  3940. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
  3941. /***/ }),
  3942. /***/ 1703:
  3943. /***/ ((module) => {
  3944. module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
  3945. /***/ }),
  3946. /***/ 2633:
  3947. /***/ ((module) => {
  3948. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
  3949. /***/ }),
  3950. /***/ 535:
  3951. /***/ ((module) => {
  3952. module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
  3953. /***/ }),
  3954. /***/ 3232:
  3955. /***/ ((module) => {
  3956. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3957. /***/ }),
  3958. /***/ 8356:
  3959. /***/ ((module) => {
  3960. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
  3961. /***/ }),
  3962. /***/ 7339:
  3963. /***/ ((module) => {
  3964. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
  3965. /***/ }),
  3966. /***/ 3177:
  3967. /***/ ((module) => {
  3968. module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
  3969. /***/ }),
  3970. /***/ 2769:
  3971. /***/ ((module) => {
  3972. module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
  3973. /***/ }),
  3974. /***/ 2006:
  3975. /***/ ((module) => {
  3976. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
  3977. /***/ }),
  3978. /***/ 3329:
  3979. /***/ ((module) => {
  3980. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
  3981. /***/ }),
  3982. /***/ 4251:
  3983. /***/ ((module) => {
  3984. module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3985. /***/ }),
  3986. /***/ 4747:
  3987. /***/ ((module) => {
  3988. module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
  3989. /***/ }),
  3990. /***/ 7421:
  3991. /***/ ((module) => {
  3992. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
  3993. /***/ }),
  3994. /***/ 4523:
  3995. /***/ ((module) => {
  3996. module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3997. /***/ }),
  3998. /***/ 2277:
  3999. /***/ ((module) => {
  4000. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
  4001. /***/ }),
  4002. /***/ 8430:
  4003. /***/ ((module) => {
  4004. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
  4005. /***/ }),
  4006. /***/ 9743:
  4007. /***/ ((module) => {
  4008. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
  4009. /***/ }),
  4010. /***/ 3464:
  4011. /***/ ((module) => {
  4012. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
  4013. /***/ }),
  4014. /***/ 7184:
  4015. /***/ ((module) => {
  4016. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
  4017. /***/ }),
  4018. /***/ 7220:
  4019. /***/ ((module) => {
  4020. module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
  4021. /***/ }),
  4022. /***/ 805:
  4023. /***/ ((module) => {
  4024. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
  4025. /***/ }),
  4026. /***/ 8736:
  4027. /***/ ((module) => {
  4028. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
  4029. /***/ }),
  4030. /***/ 9311:
  4031. /***/ ((module) => {
  4032. module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
  4033. /***/ }),
  4034. /***/ 9423:
  4035. /***/ ((module) => {
  4036. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
  4037. /***/ }),
  4038. /***/ 2060:
  4039. /***/ ((module) => {
  4040. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
  4041. /***/ }),
  4042. /***/ 5463:
  4043. /***/ ((module) => {
  4044. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
  4045. /***/ }),
  4046. /***/ 6986:
  4047. /***/ ((module) => {
  4048. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
  4049. /***/ }),
  4050. /***/ 3179:
  4051. /***/ ((module) => {
  4052. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
  4053. /***/ }),
  4054. /***/ 8680:
  4055. /***/ ((module) => {
  4056. module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
  4057. /***/ }),
  4058. /***/ 3384:
  4059. /***/ ((module) => {
  4060. module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
  4061. /***/ }),
  4062. /***/ 1976:
  4063. /***/ ((module) => {
  4064. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
  4065. /***/ }),
  4066. /***/ 4543:
  4067. /***/ ((module) => {
  4068. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
  4069. /***/ }),
  4070. /***/ 6296:
  4071. /***/ ((module) => {
  4072. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
  4073. /***/ }),
  4074. /***/ 747:
  4075. /***/ ((module) => {
  4076. module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
  4077. /***/ }),
  4078. /***/ 9176:
  4079. /***/ ((module) => {
  4080. module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
  4081. /***/ }),
  4082. /***/ 8960:
  4083. /***/ ((module) => {
  4084. module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
  4085. /***/ }),
  4086. /***/ 3294:
  4087. /***/ ((module) => {
  4088. module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
  4089. /***/ }),
  4090. /***/ 1959:
  4091. /***/ ((module) => {
  4092. module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
  4093. /***/ }),
  4094. /***/ 7290:
  4095. /***/ ((module) => {
  4096. module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
  4097. /***/ }),
  4098. /***/ 7270:
  4099. /***/ ((module) => {
  4100. module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
  4101. /***/ }),
  4102. /***/ 48:
  4103. /***/ ((module) => {
  4104. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
  4105. /***/ }),
  4106. /***/ 3713:
  4107. /***/ ((module) => {
  4108. module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
  4109. /***/ }),
  4110. /***/ 4209:
  4111. /***/ ((module) => {
  4112. module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
  4113. f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
  4114. f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
  4115. BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
  4116. AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
  4117. CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
  4118. Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
  4119. AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
  4120. dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
  4121. TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
  4122. X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
  4123. MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
  4124. ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
  4125. PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
  4126. CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
  4127. AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
  4128. gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
  4129. AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
  4130. QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
  4131. AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
  4132. gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
  4133. IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
  4134. gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
  4135. hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
  4136. nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
  4137. AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
  4138. EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
  4139. IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
  4140. AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
  4141. IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
  4142. gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
  4143. AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
  4144. IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
  4145. AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
  4146. AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
  4147. IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
  4148. AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
  4149. QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
  4150. gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
  4151. IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
  4152. OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
  4153. IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
  4154. AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
  4155. APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
  4156. IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
  4157. AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
  4158. IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
  4159. CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
  4160. hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
  4161. IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
  4162. AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
  4163. BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
  4164. QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
  4165. ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
  4166. AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
  4167. BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
  4168. iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
  4169. IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
  4170. AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
  4171. gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
  4172. QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
  4173. QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
  4174. ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
  4175. gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
  4176. NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
  4177. AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
  4178. gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
  4179. IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
  4180. a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
  4181. KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
  4182. QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
  4183. CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
  4184. ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
  4185. QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
  4186. CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
  4187. AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
  4188. QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
  4189. ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
  4190. BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
  4191. AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
  4192. KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
  4193. EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
  4194. DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
  4195. BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
  4196. Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
  4197. DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
  4198. AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
  4199. QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
  4200. IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
  4201. QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
  4202. QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
  4203. IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
  4204. CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
  4205. KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
  4206. IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
  4207. ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
  4208. KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
  4209. AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
  4210. DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
  4211. QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
  4212. QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
  4213. ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
  4214. EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
  4215. SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
  4216. KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
  4217. gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
  4218. ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
  4219. ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
  4220. IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
  4221. IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
  4222. IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
  4223. DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
  4224. A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
  4225. akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
  4226. DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
  4227. u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
  4228. AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
  4229. IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
  4230. IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
  4231. aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
  4232. QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
  4233. KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
  4234. bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
  4235. IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
  4236. IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
  4237. IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
  4238. An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
  4239. DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
  4240. KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
  4241. QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
  4242. BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
  4243. QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
  4244. KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
  4245. ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
  4246. GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
  4247. QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
  4248. ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
  4249. B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
  4250. DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
  4251. BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
  4252. bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
  4253. IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
  4254. DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
  4255. IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
  4256. QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
  4257. FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
  4258. DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
  4259. AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
  4260. AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
  4261. QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
  4262. AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
  4263. EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
  4264. SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
  4265. Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
  4266. ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
  4267. IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
  4268. fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
  4269. IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
  4270. gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
  4271. gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
  4272. CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
  4273. IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
  4274. ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
  4275. DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
  4276. KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
  4277. gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
  4278. ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
  4279. AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
  4280. gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
  4281. aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
  4282. SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
  4283. CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
  4284. CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
  4285. CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
  4286. CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
  4287. AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
  4288. ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
  4289. gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
  4290. QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
  4291. AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
  4292. lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
  4293. kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
  4294. 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
  4295. ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
  4296. NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
  4297. gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
  4298. BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
  4299. AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
  4300. QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
  4301. GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
  4302. C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
  4303. DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
  4304. QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
  4305. IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
  4306. DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
  4307. f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
  4308. gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
  4309. gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
  4310. IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
  4311. AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
  4312. aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
  4313. bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
  4314. IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
  4315. AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
  4316. ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
  4317. gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
  4318. AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
  4319. IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
  4320. BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
  4321. l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
  4322. gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
  4323. AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
  4324. AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
  4325. aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
  4326. ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
  4327. aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
  4328. AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
  4329. FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
  4330. IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
  4331. oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
  4332. AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
  4333. IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
  4334. IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
  4335. 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
  4336. oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
  4337. IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
  4338. PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
  4339. APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
  4340. oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
  4341. GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
  4342. IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
  4343. mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
  4344. BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
  4345. AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
  4346. oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
  4347. oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
  4348. FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
  4349. oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
  4350. IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
  4351. JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
  4352. ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
  4353. AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
  4354. DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
  4355. IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
  4356. ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
  4357. CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
  4358. QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
  4359. IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
  4360. IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
  4361. ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
  4362. AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
  4363. CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
  4364. dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
  4365. KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
  4366. AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
  4367. HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
  4368. BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
  4369. AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
  4370. gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
  4371. QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
  4372. AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
  4373. BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
  4374. IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
  4375. kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
  4376. BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
  4377. IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
  4378. AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
  4379. AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
  4380. QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
  4381. CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
  4382. lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
  4383. dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
  4384. IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
  4385. IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
  4386. C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
  4387. IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
  4388. AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
  4389. KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
  4390. Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4391. ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
  4392. IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
  4393. EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
  4394. IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
  4395. IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
  4396. QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
  4397. IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
  4398. AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
  4399. ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
  4400. CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
  4401. AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
  4402. QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
  4403. kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
  4404. GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
  4405. ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
  4406. OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
  4407. BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
  4408. AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
  4409. IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
  4410. A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
  4411. dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
  4412. KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
  4413. IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
  4414. FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
  4415. GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
  4416. PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
  4417. E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
  4418. IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
  4419. IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
  4420. KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
  4421. Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4422. ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
  4423. gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
  4424. BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
  4425. EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
  4426. gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
  4427. AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
  4428. IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
  4429. AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
  4430. lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
  4431. b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
  4432. ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
  4433. IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
  4434. gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
  4435. ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
  4436. KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
  4437. AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
  4438. dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
  4439. NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
  4440. ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
  4441. DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
  4442. IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
  4443. IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
  4444. GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
  4445. BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
  4446. ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
  4447. gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
  4448. IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
  4449. FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
  4450. ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
  4451. QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
  4452. DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
  4453. dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
  4454. IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
  4455. IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
  4456. IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
  4457. AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
  4458. D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
  4459. EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
  4460. QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
  4461. IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
  4462. IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
  4463. KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
  4464. QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
  4465. GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
  4466. AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
  4467. ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
  4468. DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
  4469. AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
  4470. IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
  4471. zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
  4472. ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
  4473. AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
  4474. IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
  4475. AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
  4476. QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
  4477. B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
  4478. DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
  4479. gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
  4480. KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
  4481. AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
  4482. KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
  4483. HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
  4484. IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
  4485. IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
  4486. HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
  4487. QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
  4488. gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
  4489. QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
  4490. QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
  4491. ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
  4492. ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
  4493. ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
  4494. EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
  4495. KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
  4496. X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
  4497. ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
  4498. QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
  4499. dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
  4500. AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
  4501. AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
  4502. AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
  4503. KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
  4504. koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
  4505. CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
  4506. CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
  4507. IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
  4508. aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
  4509. ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
  4510. IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
  4511. EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
  4512. A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
  4513. A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
  4514. KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
  4515. AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
  4516. lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
  4517. ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
  4518. IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
  4519. aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
  4520. iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
  4521. DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
  4522. AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
  4523. AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
  4524. IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
  4525. lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
  4526. QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
  4527. IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
  4528. Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
  4529. BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
  4530. A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
  4531. ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
  4532. IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
  4533. taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
  4534. NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
  4535. gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
  4536. Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
  4537. DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
  4538. aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
  4539. ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
  4540. C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
  4541. dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
  4542. IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
  4543. dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
  4544. NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
  4545. YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
  4546. IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
  4547. bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
  4548. AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
  4549. b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
  4550. QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
  4551. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
  4552. dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
  4553. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
  4554. dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
  4555. NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
  4556. ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
  4557. YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
  4558. aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
  4559. MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
  4560. IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
  4561. dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
  4562. ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
  4563. IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
  4564. cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
  4565. AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
  4566. AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
  4567. OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
  4568. MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
  4569. MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
  4570. IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
  4571. cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
  4572. aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
  4573. LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
  4574. ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
  4575. bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
  4576. dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
  4577. MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
  4578. YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
  4579. YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
  4580. cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
  4581. b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
  4582. AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
  4583. NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
  4584. bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
  4585. aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
  4586. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
  4587. `
  4588. /***/ })
  4589. /******/ });
  4590. /************************************************************************/
  4591. /******/ // The module cache
  4592. /******/ var __webpack_module_cache__ = {};
  4593. /******/
  4594. /******/ // The require function
  4595. /******/ function __nested_webpack_require_312600__(moduleId) {
  4596. /******/ // Check if module is in cache
  4597. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  4598. /******/ if (cachedModule !== undefined) {
  4599. /******/ return cachedModule.exports;
  4600. /******/ }
  4601. /******/ // Create a new module (and put it into the cache)
  4602. /******/ var module = __webpack_module_cache__[moduleId] = {
  4603. /******/ // no module.id needed
  4604. /******/ // no module.loaded needed
  4605. /******/ exports: {}
  4606. /******/ };
  4607. /******/
  4608. /******/ // Execute the module function
  4609. /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_312600__);
  4610. /******/
  4611. /******/ // Return the exports of the module
  4612. /******/ return module.exports;
  4613. /******/ }
  4614. /******/
  4615. /************************************************************************/
  4616. /******/ /* webpack/runtime/define property getters */
  4617. /******/ (() => {
  4618. /******/ // define getter functions for harmony exports
  4619. /******/ __nested_webpack_require_312600__.d = (exports, definition) => {
  4620. /******/ for(var key in definition) {
  4621. /******/ if(__nested_webpack_require_312600__.o(definition, key) && !__nested_webpack_require_312600__.o(exports, key)) {
  4622. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  4623. /******/ }
  4624. /******/ }
  4625. /******/ };
  4626. /******/ })();
  4627. /******/
  4628. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  4629. /******/ (() => {
  4630. /******/ __nested_webpack_require_312600__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  4631. /******/ })();
  4632. /******/
  4633. /******/ /* webpack/runtime/make namespace object */
  4634. /******/ (() => {
  4635. /******/ // define __esModule on exports
  4636. /******/ __nested_webpack_require_312600__.r = (exports) => {
  4637. /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
  4638. /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
  4639. /******/ }
  4640. /******/ Object.defineProperty(exports, '__esModule', { value: true });
  4641. /******/ };
  4642. /******/ })();
  4643. /******/
  4644. /************************************************************************/
  4645. var __webpack_exports__ = {};
  4646. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  4647. (() => {
  4648. "use strict";
  4649. // EXPORTS
  4650. __nested_webpack_require_312600__.d(__webpack_exports__, {
  4651. "default": () => (/* binding */ Speedy)
  4652. });
  4653. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  4654. var speedy_gl = __nested_webpack_require_312600__(7905);
  4655. // EXTERNAL MODULE: ./src/utils/utils.js
  4656. var utils = __nested_webpack_require_312600__(5484);
  4657. // EXTERNAL MODULE: ./src/core/settings.js
  4658. var settings = __nested_webpack_require_312600__(3135);
  4659. // EXTERNAL MODULE: ./src/core/speedy-promise.js
  4660. var speedy_promise = __nested_webpack_require_312600__(4500);
  4661. ;// CONCATENATED MODULE: ./src/utils/asap.js
  4662. /*
  4663. * speedy-vision.js
  4664. * GPU-accelerated Computer Vision for JavaScript
  4665. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  4666. *
  4667. * Licensed under the Apache License, Version 2.0 (the "License");
  4668. * you may not use this file except in compliance with the License.
  4669. * You may obtain a copy of the License at
  4670. *
  4671. * http://www.apache.org/licenses/LICENSE-2.0
  4672. *
  4673. * Unless required by applicable law or agreed to in writing, software
  4674. * distributed under the License is distributed on an "AS IS" BASIS,
  4675. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4676. * See the License for the specific language governing permissions and
  4677. * limitations under the License.
  4678. *
  4679. * asap.js
  4680. * Schedule a function to run "as soon as possible"
  4681. */
  4682. /** callbacks */
  4683. const callbacks = /** @type {Function[]} */ ( [] );
  4684. /** arguments to be passed to the callbacks */
  4685. const args = /** @type {any[][]} */ ( [] );
  4686. /** asap key */
  4687. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  4688. // Register an event listener
  4689. window.addEventListener('message', event => {
  4690. if(event.source !== window || event.data !== ASAP_KEY)
  4691. return;
  4692. event.stopPropagation();
  4693. if(callbacks.length == 0)
  4694. return;
  4695. const fn = callbacks.pop();
  4696. const argArray = args.pop();
  4697. fn.apply(undefined, argArray);
  4698. }, true);
  4699. /**
  4700. * Schedule a function to run "as soon as possible"
  4701. * @param {Function} fn callback
  4702. * @param {any[]} params optional parameters
  4703. */
  4704. function asap(fn, ...params)
  4705. {
  4706. callbacks.unshift(fn);
  4707. args.unshift(params);
  4708. window.postMessage(ASAP_KEY, '*');
  4709. }
  4710. // EXTERNAL MODULE: ./src/utils/errors.js
  4711. var utils_errors = __nested_webpack_require_312600__(3841);
  4712. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
  4713. /*
  4714. * speedy-vision.js
  4715. * GPU-accelerated Computer Vision for JavaScript
  4716. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  4717. *
  4718. * Licensed under the Apache License, Version 2.0 (the "License");
  4719. * you may not use this file except in compliance with the License.
  4720. * You may obtain a copy of the License at
  4721. *
  4722. * http://www.apache.org/licenses/LICENSE-2.0
  4723. *
  4724. * Unless required by applicable law or agreed to in writing, software
  4725. * distributed under the License is distributed on an "AS IS" BASIS,
  4726. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4727. * See the License for the specific language governing permissions and
  4728. * limitations under the License.
  4729. *
  4730. * speedy-texture-reader.js
  4731. * Reads data from textures
  4732. */
  4733. /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
  4734. const DEFAULT_NUMBER_OF_BUFFERS = 2;
  4735. /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
  4736. const runOnNextFrame = navigator.userAgent.includes('Firefox') ?
  4737. ((fn, ...args) => setTimeout(fn, 10, ...args)) : // RAF produces a warning on Firefox
  4738. ((fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args))); // reduce battery usage
  4739. /**
  4740. * Reads data from textures
  4741. */
  4742. class SpeedyTextureReader
  4743. {
  4744. /**
  4745. * Constructor
  4746. * @param {number} [numberOfBuffers]
  4747. */
  4748. constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS)
  4749. {
  4750. utils/* Utils.assert */.c.assert(numberOfBuffers > 0);
  4751. /** @type {boolean} is this object initialized? */
  4752. this._initialized = false;
  4753. /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
  4754. this._pixelBuffer = (new Array(numberOfBuffers)).fill(null).map(() => new Uint8Array(0));
  4755. /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
  4756. this._pbo = (new Array(numberOfBuffers)).fill(null);
  4757. /** @type {number} the index of the buffer that will be consumed in this frame */
  4758. this._consumerIndex = 0;
  4759. /** @type {number} the index of the buffer that will be produced next */
  4760. this._producerIndex = numberOfBuffers - 1;
  4761. /** @type {SpeedyPromise<void>[]} producer-consumer promises */
  4762. this._promise = Array.from({ length: numberOfBuffers }, () => speedy_promise/* SpeedyPromise.resolve */.s.resolve());
  4763. /** @type {boolean[]} are the contents of the ith buffer being produced? */
  4764. this._busy = (new Array(numberOfBuffers)).fill(false);
  4765. /** @type {boolean[]} can the ith buffer be consumed? */
  4766. this._ready = (new Array(numberOfBuffers)).fill(true);
  4767. }
  4768. /**
  4769. * Initialize this object
  4770. * @param {SpeedyGPU} gpu
  4771. */
  4772. init(gpu)
  4773. {
  4774. this._allocatePBOs(gpu);
  4775. gpu.subscribe(this._allocatePBOs, this, gpu);
  4776. this._initialized = true;
  4777. }
  4778. /**
  4779. * Release resources
  4780. * @param {SpeedyGPU} gpu
  4781. * @returns {null}
  4782. */
  4783. release(gpu)
  4784. {
  4785. gpu.unsubscribe(this._allocatePBOs, this);
  4786. this._deallocatePBOs(gpu);
  4787. this._initialized = false;
  4788. return null;
  4789. }
  4790. /**
  4791. * Read pixels from a texture, synchronously.
  4792. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4793. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4794. * @param {number} [x]
  4795. * @param {number} [y]
  4796. * @param {number} [width]
  4797. * @param {number} [height]
  4798. * @returns {Uint8Array} pixels in the RGBA format
  4799. */
  4800. readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height)
  4801. {
  4802. utils/* Utils.assert */.c.assert(this._initialized);
  4803. const gl = texture.gl;
  4804. const fbo = texture.glFbo;
  4805. // clamp values
  4806. width = Math.max(0, Math.min(width, texture.width));
  4807. height = Math.max(0, Math.min(height, texture.height));
  4808. x = Math.max(0, Math.min(x, texture.width - width));
  4809. y = Math.max(0, Math.min(y, texture.height - height));
  4810. // buffer allocation
  4811. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4812. this._reallocate(sizeofBuffer);
  4813. // lost context?
  4814. if(gl.isContextLost())
  4815. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4816. // read pixels
  4817. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4818. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
  4819. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4820. // done!
  4821. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4822. }
  4823. /**
  4824. * Read pixels from a texture, asynchronously, with PBOs.
  4825. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4826. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4827. * @param {number} [x]
  4828. * @param {number} [y]
  4829. * @param {number} [width]
  4830. * @param {number} [height]
  4831. * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
  4832. * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
  4833. */
  4834. readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false)
  4835. {
  4836. utils/* Utils.assert */.c.assert(this._initialized);
  4837. const gl = texture.gl;
  4838. const fbo = texture.glFbo;
  4839. // clamp values
  4840. width = Math.max(0, Math.min(width, texture.width));
  4841. height = Math.max(0, Math.min(height, texture.height));
  4842. x = Math.max(0, Math.min(x, texture.width - width));
  4843. y = Math.max(0, Math.min(y, texture.height - height));
  4844. // buffer allocation
  4845. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4846. this._reallocate(sizeofBuffer);
  4847. // lost context?
  4848. if(gl.isContextLost())
  4849. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
  4850. // do not optimize?
  4851. if(!useBufferedDownloads) {
  4852. const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4853. return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() =>
  4854. pixelBuffer
  4855. );
  4856. }
  4857. // Hide latency with a Producer-Consumer mechanism
  4858. const numberOfBuffers = this._pixelBuffer.length;
  4859. // GPU needs to produce data
  4860. const producerIndex = this._producerIndex;
  4861. if(!this._busy[producerIndex]) {
  4862. const pbo = this._pbo[producerIndex];
  4863. const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
  4864. this._producerIndex = (producerIndex + 1) % numberOfBuffers;
  4865. this._ready[producerIndex] = false;
  4866. this._busy[producerIndex] = true;
  4867. //console.time("produce "+producerIndex);
  4868. this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
  4869. //console.timeEnd("produce "+producerIndex);
  4870. this._busy[producerIndex] = false;
  4871. this._ready[producerIndex] = true;
  4872. });
  4873. }
  4874. //else console.log("skip",producerIndex);
  4875. else /* skip frame */ ;
  4876. // CPU needs to consume data
  4877. const consumerIndex = this._consumerIndex;
  4878. this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
  4879. if(!this._ready[consumerIndex]) {
  4880. //console.time("consume "+consumerIndex);
  4881. return this._promise[consumerIndex].then(() => {
  4882. //console.timeEnd("consume "+consumerIndex);
  4883. this._ready[consumerIndex] = false;
  4884. return this._pixelBuffer[consumerIndex];
  4885. });
  4886. }
  4887. //console.log("NO WAIT "+consumerIndex);
  4888. this._ready[consumerIndex] = false;
  4889. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(this._pixelBuffer[consumerIndex]);
  4890. }
  4891. /**
  4892. * Reallocate the pixel buffers, so that they can hold the required number of bytes
  4893. * If the pixel buffers already have the required capacity, then nothing is done
  4894. * @param {number} size in bytes
  4895. */
  4896. _reallocate(size)
  4897. {
  4898. // no need to reallocate
  4899. if(size <= this._pixelBuffer[0].byteLength)
  4900. return;
  4901. // reallocate
  4902. for(let i = 0; i < this._pixelBuffer.length; i++) {
  4903. const newBuffer = new Uint8Array(size);
  4904. //newBuffer.set(this._pixelBuffer[i]); // make this optional?
  4905. this._pixelBuffer[i] = newBuffer;
  4906. }
  4907. }
  4908. /**
  4909. * Allocate PBOs
  4910. * @param {SpeedyGPU} gpu
  4911. */
  4912. _allocatePBOs(gpu)
  4913. {
  4914. const gl = gpu.gl;
  4915. for(let i = 0; i < this._pbo.length; i++)
  4916. this._pbo[i] = gl.createBuffer();
  4917. }
  4918. /**
  4919. * Deallocate PBOs
  4920. * @param {SpeedyGPU} gpu
  4921. */
  4922. _deallocatePBOs(gpu)
  4923. {
  4924. const gl = gpu.gl;
  4925. for(let i = this._pbo.length - 1; i >= 0; i--) {
  4926. gl.deleteBuffer(this._pbo[i]);
  4927. this._pbo[i] = null;
  4928. }
  4929. }
  4930. /**
  4931. * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
  4932. * It's assumed that the target texture is in the RGBA8 format
  4933. * @param {WebGL2RenderingContext} gl
  4934. * @param {WebGLBuffer} pbo
  4935. * @param {Uint8Array} outputBuffer with size >= width * height * 4
  4936. * @param {WebGLFramebuffer} fbo
  4937. * @param {GLint} x
  4938. * @param {GLint} y
  4939. * @param {GLsizei} width
  4940. * @param {GLsizei} height
  4941. * @returns {SpeedyPromise<void>}
  4942. */
  4943. static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height)
  4944. {
  4945. /*
  4946. When testing Speedy on Chrome (mobile) using about:tracing with the
  4947. --enable-gpu-service-tracing flag, I found that A LOT of time is spent
  4948. in TraceGLAPI::glMapBufferRange, which takes place just after
  4949. GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
  4950. Using multiple PBOs doesn't seem to impact Chrome too much. Performance
  4951. is much better on Firefox. This suggests there is room for improvement.
  4952. I do not yet understand clearly the cause for this lag on Chrome. It
  4953. may be a CPU-GPU synchronization issue.
  4954. EDIT: I have found that using gl.flush() aggressively greatly improves
  4955. things. WebGL commands will be pushed frequently!
  4956. See also:
  4957. https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
  4958. https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
  4959. */
  4960. const size = width * height * 4;
  4961. // validate outputBuffer
  4962. utils/* Utils.assert */.c.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
  4963. // read pixels into the PBO
  4964. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4965. gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
  4966. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4967. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
  4968. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4969. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4970. // create a fence
  4971. const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
  4972. gl.flush(); // make sure the sync command is read
  4973. // wait for the commands to be processed by the GPU
  4974. return new speedy_promise/* SpeedyPromise */.s((resolve, reject) => {
  4975. // according to the WebGL2 spec sec 3.7.14 Sync objects,
  4976. // "sync objects may only transition to the signaled state
  4977. // when the user agent's event loop is not executing a task"
  4978. // in other words, it won't be signaled in the same frame
  4979. if(settings/* Settings.gpuPollingMode */.Z.gpuPollingMode != 'asap')
  4980. runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  4981. else
  4982. asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  4983. }).then(() => {
  4984. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4985. gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
  4986. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4987. }).catch(err => {
  4988. throw new utils_errors/* IllegalOperationError */.js(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
  4989. }).finally(() => {
  4990. gl.deleteSync(sync);
  4991. });
  4992. }
  4993. /**
  4994. * Waits for a sync object to become signaled
  4995. * @param {WebGL2RenderingContext} gl
  4996. * @param {WebGLSync} sync
  4997. * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
  4998. * @param {Function} resolve
  4999. * @param {Function} reject
  5000. * @param {number} [pollInterval] in milliseconds
  5001. * @param {number} [remainingAttempts] for timeout
  5002. */
  5003. static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000)
  5004. {
  5005. (function poll() {
  5006. const status = gl.clientWaitSync(sync, flags, 0);
  5007. if(remainingAttempts-- <= 0) {
  5008. reject(new utils_errors/* TimeoutError */.W5(`GPU polling timeout`, utils_errors/* GLError.from */.Ql.from(gl)));
  5009. }
  5010. else if(status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
  5011. resolve();
  5012. }
  5013. else {
  5014. //setTimeout(poll, pollInterval);
  5015. if(settings/* Settings.gpuPollingMode */.Z.gpuPollingMode != 'asap')
  5016. requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
  5017. else
  5018. asap(poll);
  5019. }
  5020. })();
  5021. }
  5022. }
  5023. // EXTERNAL MODULE: ./src/utils/globals.js
  5024. var globals = __nested_webpack_require_312600__(3020);
  5025. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
  5026. /*
  5027. * speedy-vision.js
  5028. * GPU-accelerated Computer Vision for JavaScript
  5029. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  5030. *
  5031. * Licensed under the Apache License, Version 2.0 (the "License");
  5032. * you may not use this file except in compliance with the License.
  5033. * You may obtain a copy of the License at
  5034. *
  5035. * http://www.apache.org/licenses/LICENSE-2.0
  5036. *
  5037. * Unless required by applicable law or agreed to in writing, software
  5038. * distributed under the License is distributed on an "AS IS" BASIS,
  5039. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5040. * See the License for the specific language governing permissions and
  5041. * limitations under the License.
  5042. *
  5043. * speedy-texture.js
  5044. * A wrapper around WebGLTexture
  5045. */
  5046. /**
  5047. * Get a buffer filled with zeros
  5048. * @param {number} size number of bytes
  5049. * @returns {Uint8Array}
  5050. */
  5051. /*
  5052. const zeros = (function() {
  5053. let buffer = new Uint8Array(4);
  5054. return function(size) {
  5055. if(size > buffer.length)
  5056. buffer = new Uint8Array(size);
  5057. return buffer.subarray(0, size);
  5058. }
  5059. })();
  5060. */
  5061. /**
  5062. * A wrapper around WebGLTexture
  5063. */
  5064. class SpeedyTexture
  5065. {
  5066. /**
  5067. * Constructor
  5068. * @param {WebGL2RenderingContext} gl
  5069. * @param {number} width texture width in pixels
  5070. * @param {number} height texture height in pixels
  5071. * @param {number} [format]
  5072. * @param {number} [internalFormat]
  5073. * @param {number} [dataType]
  5074. * @param {number} [filter]
  5075. * @param {number} [wrap]
  5076. */
  5077. constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT)
  5078. {
  5079. /** @type {WebGL2RenderingContext} rendering context */
  5080. this._gl = gl;
  5081. /** @type {number} width of the texture */
  5082. this._width = Math.max(1, width | 0);
  5083. /** @type {number} height of the texture */
  5084. this._height = Math.max(1, height | 0);
  5085. /** @type {boolean} have we generated mipmaps for this texture? */
  5086. this._hasMipmaps = false;
  5087. /** @type {number} texture format */
  5088. this._format = format;
  5089. /** @type {number} internal format (usually a sized format) */
  5090. this._internalFormat = internalFormat;
  5091. /** @type {number} data type */
  5092. this._dataType = dataType;
  5093. /** @type {number} texture filtering (min & mag) */
  5094. this._filter = filter;
  5095. /** @type {number} texture wrapping */
  5096. this._wrap = wrap;
  5097. /** @type {WebGLTexture} internal texture object */
  5098. this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
  5099. }
  5100. /**
  5101. * Releases the texture
  5102. * @returns {null}
  5103. */
  5104. release()
  5105. {
  5106. const gl = this._gl;
  5107. // already released?
  5108. if(this._glTexture == null)
  5109. throw new utils_errors/* IllegalOperationError */.js(`The SpeedyTexture has already been released`);
  5110. // release resources
  5111. this.discardMipmaps();
  5112. gl.deleteTexture(this._glTexture);
  5113. this._glTexture = null;
  5114. this._width = this._height = 0;
  5115. // done!
  5116. return null;
  5117. }
  5118. /**
  5119. * Upload pixel data to the texture. The texture will be resized if needed.
  5120. * @param {TexImageSource} pixels
  5121. * @param {number} [width] in pixels
  5122. * @param {number} [height] in pixels
  5123. * @return {SpeedyTexture} this
  5124. */
  5125. upload(pixels, width = this._width, height = this._height)
  5126. {
  5127. const gl = this._gl;
  5128. utils/* Utils.assert */.c.assert(width > 0 && height > 0);
  5129. this.discardMipmaps();
  5130. this._width = width;
  5131. this._height = height;
  5132. this._internalFormat = gl.RGBA8;
  5133. this._format = gl.RGBA;
  5134. this._dataType = gl.UNSIGNED_BYTE;
  5135. SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, pixels, 0, this._format, this._internalFormat, this._dataType);
  5136. return this;
  5137. }
  5138. /**
  5139. * Clear the texture
  5140. * @returns {this}
  5141. */
  5142. clear()
  5143. {
  5144. const gl = this._gl;
  5145. // context loss?
  5146. if(gl.isContextLost())
  5147. return this;
  5148. // clear texture data
  5149. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5150. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5151. gl.bindTexture(gl.TEXTURE_2D, null);
  5152. // no mipmaps
  5153. this.discardMipmaps();
  5154. // done!
  5155. return this;
  5156. }
  5157. /**
  5158. * Resize this texture. Its content will be lost!
  5159. * @param {number} width new width, in pixels
  5160. * @param {number} height new height, in pixels
  5161. * @returns {this}
  5162. */
  5163. resize(width, height)
  5164. {
  5165. const gl = this._gl;
  5166. // no need to resize?
  5167. if(this._width === width && this._height === height)
  5168. return this;
  5169. // validate size
  5170. width |= 0; height |= 0;
  5171. if(width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH)
  5172. throw new utils_errors/* NotSupportedError */.B8(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);
  5173. else if(width < 1 || height < 1)
  5174. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid texture size: ${width} x ${height}`);
  5175. // context loss?
  5176. if(gl.isContextLost())
  5177. return this;
  5178. // update dimensions
  5179. this._width = width;
  5180. this._height = height;
  5181. // resize
  5182. // Note: this is fast on Chrome, but seems slow on Firefox
  5183. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5184. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5185. gl.bindTexture(gl.TEXTURE_2D, null);
  5186. // no mipmaps
  5187. this.discardMipmaps();
  5188. // done!
  5189. return this;
  5190. }
  5191. /**
  5192. * Generate mipmap
  5193. * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
  5194. * @returns {SpeedyTexture} this
  5195. */
  5196. generateMipmaps(mipmap = [])
  5197. {
  5198. const gl = this._gl;
  5199. // nothing to do
  5200. if(this._hasMipmaps)
  5201. return this;
  5202. // let the hardware compute the all levels of the pyramid, up to 1x1
  5203. // we also specify the TEXTURE_MIN_FILTER to be used from now on
  5204. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5205. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
  5206. gl.generateMipmap(gl.TEXTURE_2D);
  5207. gl.bindTexture(gl.TEXTURE_2D, null);
  5208. // accept custom textures
  5209. if(mipmap.length > 0) {
  5210. // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  5211. const width = this.width, height = this.height;
  5212. const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
  5213. utils/* Utils.assert */.c.assert(mipmap.length <= numMipmaps);
  5214. // verify the dimensions of each level
  5215. for(let level = 1; level < mipmap.length; level++) {
  5216. // use max(1, floor(size / 2^lod)), in accordance to
  5217. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  5218. const w = Math.max(1, width >>> level);
  5219. const h = Math.max(1, height >>> level);
  5220. // verify the dimensions of this level
  5221. utils/* Utils.assert */.c.assert(mipmap[level].width === w && mipmap[level].height === h);
  5222. // copy to mipmap
  5223. mipmap[level].copyTo(this, level);
  5224. }
  5225. }
  5226. // done!
  5227. this._hasMipmaps = true;
  5228. return this;
  5229. }
  5230. /**
  5231. * Invalidates previously generated mipmap, if any
  5232. */
  5233. discardMipmaps()
  5234. {
  5235. const gl = this._gl;
  5236. // nothing to do
  5237. if(!this._hasMipmaps)
  5238. return;
  5239. // reset the min filter
  5240. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5241. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
  5242. gl.bindTexture(gl.TEXTURE_2D, null);
  5243. // done!
  5244. this._hasMipmaps = false;
  5245. }
  5246. /**
  5247. * Does this texture have a mipmap?
  5248. * @returns {boolean}
  5249. */
  5250. hasMipmaps()
  5251. {
  5252. return this._hasMipmaps;
  5253. }
  5254. /**
  5255. * Has this texture been released?
  5256. * @returns {boolean}
  5257. */
  5258. isReleased()
  5259. {
  5260. return this._glTexture == null;
  5261. }
  5262. /**
  5263. * The internal WebGLTexture
  5264. * @returns {WebGLTexture}
  5265. */
  5266. get glTexture()
  5267. {
  5268. return this._glTexture;
  5269. }
  5270. /**
  5271. * The width of the texture, in pixels
  5272. * @returns {number}
  5273. */
  5274. get width()
  5275. {
  5276. return this._width;
  5277. }
  5278. /**
  5279. * The height of the texture, in pixels
  5280. * @returns {number}
  5281. */
  5282. get height()
  5283. {
  5284. return this._height;
  5285. }
  5286. /**
  5287. * The WebGL Context
  5288. * @returns {WebGL2RenderingContext}
  5289. */
  5290. get gl()
  5291. {
  5292. return this._gl;
  5293. }
  5294. /**
  5295. * Create a WebGL texture
  5296. * @param {WebGL2RenderingContext} gl
  5297. * @param {number} width in pixels
  5298. * @param {number} height in pixels
  5299. * @param {number} format usually gl.RGBA
  5300. * @param {number} internalFormat usually gl.RGBA8
  5301. * @param {number} dataType usually gl.UNSIGNED_BYTE
  5302. * @param {number} filter usually gl.NEAREST or gl.LINEAR
  5303. * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
  5304. * @returns {WebGLTexture}
  5305. */
  5306. static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap)
  5307. {
  5308. utils/* Utils.assert */.c.assert(width > 0 && height > 0);
  5309. // create & bind texture
  5310. const texture = gl.createTexture();
  5311. gl.bindTexture(gl.TEXTURE_2D, texture);
  5312. // setup
  5313. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
  5314. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
  5315. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
  5316. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
  5317. //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
  5318. gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
  5319. // unbind & return
  5320. gl.bindTexture(gl.TEXTURE_2D, null);
  5321. return texture;
  5322. }
  5323. /**
  5324. * Upload pixel data to a WebGL texture
  5325. * @param {WebGL2RenderingContext} gl
  5326. * @param {WebGLTexture} texture
  5327. * @param {GLsizei} width texture width
  5328. * @param {GLsizei} height texture height
  5329. * @param {TexImageSource} pixels
  5330. * @param {GLint} lod mipmap level-of-detail
  5331. * @param {number} format
  5332. * @param {number} internalFormat
  5333. * @param {number} dataType
  5334. * @returns {WebGLTexture} texture
  5335. */
  5336. static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType)
  5337. {
  5338. // Prefer calling _upload() before gl.useProgram() to avoid the
  5339. // needless switching of GL programs internally. See also:
  5340. // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  5341. gl.bindTexture(gl.TEXTURE_2D, texture);
  5342. /*
  5343. // slower than texImage2D, unlike the spec?
  5344. gl.texSubImage2D(gl.TEXTURE_2D, // target
  5345. lod, // mip level
  5346. 0, // x-offset
  5347. 0, // y-offset
  5348. width, // texture width
  5349. height, // texture height
  5350. gl.RGBA, // source format
  5351. gl.UNSIGNED_BYTE, // source type
  5352. pixels); // source data
  5353. */
  5354. gl.texImage2D(gl.TEXTURE_2D, // target
  5355. lod, // mip level
  5356. internalFormat, // internal format
  5357. width, // texture width
  5358. height, // texture height
  5359. 0, // border
  5360. format, // source format
  5361. dataType, // source type
  5362. pixels); // source data
  5363. gl.bindTexture(gl.TEXTURE_2D, null);
  5364. return texture;
  5365. }
  5366. }
  5367. /**
  5368. * A SpeedyTexture with a framebuffer
  5369. */
  5370. class SpeedyDrawableTexture extends SpeedyTexture
  5371. {
  5372. /**
  5373. * Constructor
  5374. * @param {WebGL2RenderingContext} gl
  5375. * @param {number} width texture width in pixels
  5376. * @param {number} height texture height in pixels
  5377. * @param {number} [format]
  5378. * @param {number} [internalFormat]
  5379. * @param {number} [dataType]
  5380. * @param {number} [filter]
  5381. * @param {number} [wrap]
  5382. */
  5383. constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined)
  5384. {
  5385. super(gl, width, height, format, internalFormat, dataType, filter, wrap);
  5386. /** @type {WebGLFramebuffer} framebuffer */
  5387. this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
  5388. }
  5389. /**
  5390. * Releases the texture
  5391. * @returns {null}
  5392. */
  5393. release()
  5394. {
  5395. const gl = this._gl;
  5396. // already released?
  5397. if(this._glFbo == null)
  5398. throw new utils_errors/* IllegalOperationError */.js(`The SpeedyDrawableTexture has already been released`);
  5399. // release the framebuffer
  5400. gl.deleteFramebuffer(this._glFbo);
  5401. this._glFbo = null;
  5402. // release the SpeedyTexture
  5403. return super.release();
  5404. }
  5405. /**
  5406. * The internal WebGLFramebuffer
  5407. * @returns {WebGLFramebuffer}
  5408. */
  5409. get glFbo()
  5410. {
  5411. return this._glFbo;
  5412. }
  5413. /**
  5414. * Copy this texture into another
  5415. * (you may have to discard the mipmaps after calling this function)
  5416. * @param {SpeedyTexture} texture target texture
  5417. * @param {number} [lod] level-of-detail of the target texture
  5418. */
  5419. copyTo(texture, lod = 0)
  5420. {
  5421. const gl = this._gl;
  5422. // context loss?
  5423. if(gl.isContextLost())
  5424. return;
  5425. // compute texture size as max(1, floor(size / 2^lod)),
  5426. // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
  5427. // (Mipmapping)
  5428. const pot = 1 << (lod |= 0);
  5429. const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
  5430. const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
  5431. // validate
  5432. utils/* Utils.assert */.c.assert(this._width === expectedWidth && this._height === expectedHeight);
  5433. // copy to texture
  5434. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
  5435. }
  5436. /*
  5437. * Resize this texture
  5438. * @param {number} width new width, in pixels
  5439. * @param {number} height new height, in pixels
  5440. * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
  5441. * @returns {this}
  5442. */
  5443. /*resize(width, height, preserveContent = false)
  5444. {
  5445. const gl = this._gl;
  5446. // no need to preserve the content?
  5447. if(!preserveContent)
  5448. return super.resize(width, height);
  5449. // no need to resize?
  5450. if(this._width === width && this._height === height)
  5451. return this;
  5452. // validate size
  5453. width |= 0; height |= 0;
  5454. Utils.assert(width > 0 && height > 0);
  5455. // context loss?
  5456. if(gl.isContextLost())
  5457. return this;
  5458. // allocate new texture
  5459. const newTexture = SpeedyTexture._createTexture(gl, width, height);
  5460. // initialize the new texture with zeros to avoid a
  5461. // warning when calling copyTexSubImage2D() on Firefox
  5462. // this may not be very efficient?
  5463. SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
  5464. // copy the old texture to the new one
  5465. const oldWidth = this._width, oldHeight = this._height;
  5466. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
  5467. // bind FBO
  5468. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5469. // invalidate old data (is this needed?)
  5470. gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
  5471. // attach the new texture to the existing framebuffer
  5472. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5473. gl.COLOR_ATTACHMENT0, // color buffer
  5474. gl.TEXTURE_2D, // tex target
  5475. newTexture, // texture
  5476. 0); // mipmap level
  5477. // unbind FBO
  5478. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5479. // release the old texture and replace it
  5480. gl.deleteTexture(this._glTexture);
  5481. this._glTexture = newTexture;
  5482. // update dimensions & discard mipmaps
  5483. this.discardMipmaps();
  5484. this._width = width;
  5485. this._height = height;
  5486. // done!
  5487. return this;
  5488. }
  5489. */
  5490. /**
  5491. * Clear the texture
  5492. * @returns {this}
  5493. */
  5494. clear()
  5495. {
  5496. //
  5497. // When we pass null to texImage2D(), it seems that Firefox
  5498. // doesn't clear the texture. Instead, it displays this warning:
  5499. //
  5500. // "WebGL warning: drawArraysInstanced:
  5501. // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
  5502. //
  5503. // Here is a workaround:
  5504. //
  5505. return this.clearToColor(0, 0, 0, 0);
  5506. }
  5507. /**
  5508. * Clear the texture to a color
  5509. * @param {number} r red component, a value in [0,1]
  5510. * @param {number} g green component, a value in [0,1]
  5511. * @param {number} b blue component, a value in [0,1]
  5512. * @param {number} a alpha component, a value in [0,1]
  5513. * @returns {this}
  5514. */
  5515. clearToColor(r, g, b, a)
  5516. {
  5517. const gl = this._gl;
  5518. // context loss?
  5519. if(gl.isContextLost())
  5520. return this;
  5521. // clamp parameters
  5522. r = Math.max(0.0, Math.min(+r, 1.0));
  5523. g = Math.max(0.0, Math.min(+g, 1.0));
  5524. b = Math.max(0.0, Math.min(+b, 1.0));
  5525. a = Math.max(0.0, Math.min(+a, 1.0));
  5526. // discard mipmaps, if any
  5527. this.discardMipmaps();
  5528. // clear the texture
  5529. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5530. gl.viewport(0, 0, this._width, this._height);
  5531. gl.clearColor(r, g, b, a);
  5532. gl.clear(gl.COLOR_BUFFER_BIT);
  5533. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5534. // done!
  5535. return this;
  5536. }
  5537. /**
  5538. * Inspect the pixels of the texture for debugging purposes
  5539. * @param {SpeedyGPU} gpu
  5540. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5541. * @returns {Uint8Array}
  5542. */
  5543. inspect(gpu, textureReader)
  5544. {
  5545. if(textureReader === undefined) {
  5546. textureReader = new SpeedyTextureReader();
  5547. textureReader.init(gpu);
  5548. const pixels = textureReader.readPixelsSync(this);
  5549. textureReader.release(gpu);
  5550. return new Uint8Array(pixels); // copy the array
  5551. }
  5552. else {
  5553. const pixels = textureReader.readPixelsSync(this);
  5554. return new Uint8Array(pixels);
  5555. }
  5556. }
  5557. /**
  5558. * Inspect the pixels of the texture as unsigned 32-bit integers
  5559. * @param {SpeedyGPU} gpu
  5560. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5561. * @returns {Uint32Array}
  5562. */
  5563. inspect32(gpu, textureReader)
  5564. {
  5565. utils/* Utils.assert */.c.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
  5566. return new Uint32Array(this.inspect(gpu, textureReader).buffer);
  5567. }
  5568. /**
  5569. * Create a FBO associated with an existing texture
  5570. * @param {WebGL2RenderingContext} gl
  5571. * @param {WebGLTexture} texture
  5572. * @returns {WebGLFramebuffer}
  5573. */
  5574. static _createFramebuffer(gl, texture)
  5575. {
  5576. const fbo = gl.createFramebuffer();
  5577. // setup framebuffer
  5578. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5579. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5580. gl.COLOR_ATTACHMENT0, // color buffer
  5581. gl.TEXTURE_2D, // tex target
  5582. texture, // texture
  5583. 0); // mipmap level
  5584. // check for errors
  5585. const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
  5586. if(status != gl.FRAMEBUFFER_COMPLETE) {
  5587. const error = (() => (([
  5588. 'FRAMEBUFFER_UNSUPPORTED',
  5589. 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT',
  5590. 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS',
  5591. 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT',
  5592. 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'
  5593. ].filter(err => gl[err] === status))[0] || 'unknown error'))();
  5594. throw new utils_errors/* GLError */.Ql(`Can't create framebuffer: ${error} (${status})`);
  5595. }
  5596. // unbind & return
  5597. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5598. return fbo;
  5599. }
  5600. /**
  5601. * Copy data from a framebuffer to a texture
  5602. * @param {WebGL2RenderingContext} gl
  5603. * @param {WebGLFramebuffer} fbo we'll read the data from this
  5604. * @param {WebGLTexture} texture destination texture
  5605. * @param {GLint} x xpos (where to start copying)
  5606. * @param {GLint} y ypos (where to start copying)
  5607. * @param {GLsizei} width width of the texture
  5608. * @param {GLsizei} height height of the texture
  5609. * @param {GLint} [lod] mipmap level-of-detail
  5610. * @returns {WebGLTexture} texture
  5611. */
  5612. static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0)
  5613. {
  5614. //gl.activeTexture(gl.TEXTURE0);
  5615. gl.bindTexture(gl.TEXTURE_2D, texture);
  5616. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5617. gl.copyTexSubImage2D(
  5618. gl.TEXTURE_2D, // target
  5619. lod, // mipmap level
  5620. 0, // xoffset
  5621. 0, // yoffset
  5622. x, // xpos (where to start copying)
  5623. y, // ypos (where to start copying)
  5624. width, // width of the texture
  5625. height // height of the texture
  5626. );
  5627. /*
  5628. gl.copyTexImage2D(
  5629. gl.TEXTURE_2D, // target
  5630. lod, // mipmap level
  5631. gl.RGBA, // internal format
  5632. x, // xpos (where to start copying)
  5633. y, // ypos (where to start copying)
  5634. width, // width of the texture
  5635. height, // height of the texture
  5636. 0 // border
  5637. );
  5638. */
  5639. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5640. gl.bindTexture(gl.TEXTURE_2D, null);
  5641. return texture;
  5642. }
  5643. }
  5644. // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
  5645. var shader_declaration = __nested_webpack_require_312600__(9759);
  5646. ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
  5647. /*
  5648. * speedy-vision.js
  5649. * GPU-accelerated Computer Vision for JavaScript
  5650. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  5651. *
  5652. * Licensed under the Apache License, Version 2.0 (the "License");
  5653. * you may not use this file except in compliance with the License.
  5654. * You may obtain a copy of the License at
  5655. *
  5656. * http://www.apache.org/licenses/LICENSE-2.0
  5657. *
  5658. * Unless required by applicable law or agreed to in writing, software
  5659. * distributed under the License is distributed on an "AS IS" BASIS,
  5660. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5661. * See the License for the specific language governing permissions and
  5662. * limitations under the License.
  5663. *
  5664. * speedy-program.js
  5665. * SpeedyProgram class
  5666. */
  5667. /** @const {Object<string,string>} Map uniform type to a gl function */
  5668. const UNIFORM_SETTERS = Object.freeze({
  5669. 'sampler2D': 'uniform1i',
  5670. 'isampler2D':'uniform1i',
  5671. 'usampler2D':'uniform1i',
  5672. 'float': 'uniform1f',
  5673. 'int': 'uniform1i',
  5674. 'uint': 'uniform1ui',
  5675. 'bool': 'uniform1i',
  5676. 'vec2': 'uniform2f',
  5677. 'vec3': 'uniform3f',
  5678. 'vec4': 'uniform4f',
  5679. 'ivec2': 'uniform2i',
  5680. 'ivec3': 'uniform3i',
  5681. 'ivec4': 'uniform4i',
  5682. 'uvec2': 'uniform2ui',
  5683. 'uvec3': 'uniform3ui',
  5684. 'uvec4': 'uniform4ui',
  5685. 'bvec2': 'uniform2i',
  5686. 'bvec3': 'uniform3i',
  5687. 'bvec4': 'uniform4i',
  5688. 'mat2': 'uniformMatrix2fv',
  5689. 'mat3': 'uniformMatrix3fv',
  5690. 'mat4': 'uniformMatrix4fv',
  5691. });
  5692. /**
  5693. * @typedef {object} SpeedyProgramOptions
  5694. * @property {boolean} [renderToTexture] render results to a texture?
  5695. * @property {boolean} [pingpong] alternate output texture between calls
  5696. */
  5697. /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
  5698. /**
  5699. * A SpeedyProgram is a Function that
  5700. * runs GPU-accelerated GLSL code
  5701. */
  5702. class SpeedyProgram extends Function
  5703. {
  5704. /**
  5705. * Creates a new SpeedyProgram
  5706. * @param {WebGL2RenderingContext} gl WebGL context
  5707. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5708. * @param {SpeedyProgramOptions} [options] user options
  5709. */
  5710. constructor(gl, shaderdecl, options = { })
  5711. {
  5712. super('...args', 'return this._self._call(...args)');
  5713. /** @type {SpeedyProgram} this function bound to this function! */
  5714. this._self = this.bind(this);
  5715. this._self._init(gl, shaderdecl, options);
  5716. return this._self;
  5717. }
  5718. /**
  5719. * Initialize the SpeedyProgram
  5720. * @param {WebGL2RenderingContext} gl WebGL context
  5721. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5722. * @param {SpeedyProgramOptions} options user options
  5723. */
  5724. _init(gl, shaderdecl, options)
  5725. {
  5726. // not a valid context?
  5727. if(gl.isContextLost())
  5728. throw new utils_errors/* IllegalOperationError */.js(`Can't initialize SpeedyProgram: lost context`);
  5729. // options object
  5730. options = Object.assign({
  5731. // default options
  5732. renderToTexture: true,
  5733. pingpong: false,
  5734. }, options);
  5735. /** @type {WebGL2RenderingContext} */
  5736. this._gl = gl;
  5737. /** @type {WebGLProgram} vertex shader + fragment shader */
  5738. this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
  5739. /** @type {ProgramGeometry} this is a quad */
  5740. this._geometry = new ProgramGeometry(gl, {
  5741. position: shaderdecl.locationOfAttributes.position,
  5742. texCoord: shaderdecl.locationOfAttributes.texCoord
  5743. });
  5744. /** @type {string[]} names of the arguments of the SpeedyProgram */
  5745. this._argnames = shaderdecl.arguments;
  5746. /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
  5747. this._argIsArray = (new Array(this._argnames.length)).fill(false);
  5748. /** @type {UBOHelper} UBO helper (lazy instantiation) */
  5749. this._ubo = null;
  5750. /** @type {boolean} should we render to a texture? If false, we render to the canvas */
  5751. this._renderToTexture = Boolean(options.renderToTexture);
  5752. /** @type {number} width of the output */
  5753. this._width = 1;
  5754. /** @type {number} height of the output */
  5755. this._height = 1;
  5756. /** @type {SpeedyDrawableTexture[]} output texture(s) */
  5757. this._texture = (new Array(options.pingpong ? 2 : 1)).fill(null);
  5758. /** @type {number} used for pingpong rendering */
  5759. this._textureIndex = 0;
  5760. /** @type {Map<string,UniformVariable>} uniform variables */
  5761. this._uniform = new Map();
  5762. /** @type {ShaderDeclaration} shader declaration */
  5763. this._shaderdecl = shaderdecl;
  5764. // autodetect uniforms
  5765. gl.useProgram(this._program);
  5766. for(const name of shaderdecl.uniforms) {
  5767. const type = shaderdecl.uniformType(name);
  5768. const location = gl.getUniformLocation(this._program, name);
  5769. this._uniform.set(name, new UniformVariable(type, location));
  5770. }
  5771. // match arguments & uniforms
  5772. for(let j = 0; j < this._argnames.length; j++) {
  5773. const argname = this._argnames[j];
  5774. if(!this._uniform.has(argname)) {
  5775. this._argIsArray[j] = this._uniform.has(argname + '[0]');
  5776. if(!this._argIsArray[j])
  5777. throw new utils_errors/* IllegalOperationError */.js(`Expected uniform "${argname}", as declared in the argument list`);
  5778. }
  5779. }
  5780. }
  5781. /**
  5782. * Run the SpeedyProgram
  5783. * @param {...SpeedyProgramUniformValue} args
  5784. * @returns {SpeedyDrawableTexture}
  5785. */
  5786. _call(...args)
  5787. {
  5788. const gl = this._gl;
  5789. const argnames = this._argnames;
  5790. // matching arguments?
  5791. if(args.length != argnames.length)
  5792. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
  5793. // can't use the output texture as an input
  5794. const flatArgs = utils/* Utils.flatten */.c.flatten(args);
  5795. for(let j = flatArgs.length - 1; j >= 0; j--) {
  5796. if(flatArgs[j] === this._texture[this._textureIndex])
  5797. throw new utils_errors/* NotSupportedError */.B8(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5798. }
  5799. // context loss?
  5800. if(gl.isContextLost())
  5801. return this._texture[this._textureIndex];
  5802. // use program
  5803. gl.useProgram(this._program);
  5804. // bind the VAO
  5805. gl.bindVertexArray(this._geometry.vao);
  5806. // select the render target
  5807. const texture = this._texture[this._textureIndex];
  5808. const fbo = this._renderToTexture ? texture.glFbo : null;
  5809. // update texSize uniform (available in all fragment shaders)
  5810. const width = this._width, height = this._height;
  5811. const texSize = this._uniform.get('texSize');
  5812. texSize.setValue(gl, [ width, height ]);
  5813. //gl.uniform2f(texSize.location, width, height);
  5814. // set uniforms[i] to args[i]
  5815. for(let i = 0, texNo = 0; i < args.length; i++) {
  5816. const argname = argnames[i];
  5817. if(!this._argIsArray[i]) {
  5818. // uniform variable matches argument name
  5819. const uniform = this._uniform.get(argname);
  5820. texNo = uniform.setValue(gl, args[i], texNo);
  5821. }
  5822. else {
  5823. // uniform array matches argument name
  5824. const array = args[i];
  5825. if(Array.isArray(array)) {
  5826. if(this._uniform.has(`${argname}[${array.length}]`))
  5827. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: too few elements in the "${argname}" array`);
  5828. for(let j = 0, uniform = undefined; (uniform = this._uniform.get(`${argname}[${j}]`)) !== undefined; j++)
  5829. texNo = uniform.setValue(gl, array[j], texNo);
  5830. }
  5831. else
  5832. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: expected an array for "${argname}"`);
  5833. }
  5834. }
  5835. // set Uniform Buffer Objects (if any)
  5836. if(this._ubo !== null)
  5837. this._ubo.update();
  5838. // bind the FBO
  5839. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5840. // draw call
  5841. gl.viewport(0, 0, width, height);
  5842. gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
  5843. // unbind the FBO
  5844. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5845. // unbind the VAO
  5846. gl.bindVertexArray(null);
  5847. // we've just changed the texture! discard the pyramid, if any
  5848. if(texture != null)
  5849. texture.discardMipmaps();
  5850. // ping-pong rendering
  5851. this._pingpong();
  5852. // done!
  5853. return texture;
  5854. }
  5855. /**
  5856. * Set the output texture(s) and its (their) shape(s)
  5857. * @param {number} width new width, in pixels
  5858. * @param {number} height new height, in pixels
  5859. * @param {...SpeedyDrawableTexture|null} texture output texture(s)
  5860. * @returns {SpeedyProgram} this
  5861. */
  5862. outputs(width, height, ...texture)
  5863. {
  5864. this._setOutputTexture(...texture);
  5865. this._setOutputSize(width, height);
  5866. return this;
  5867. }
  5868. /**
  5869. * Set the size of the output
  5870. * @param {number} width new width, in pixels
  5871. * @param {number} height new height, in pixels
  5872. * @returns {SpeedyProgram} this
  5873. */
  5874. _setOutputSize(width, height)
  5875. {
  5876. utils/* Utils.assert */.c.assert(width > 0 && height > 0);
  5877. // update output size
  5878. this._width = width | 0;
  5879. this._height = height | 0;
  5880. // resize the output texture(s)
  5881. for(let i = 0; i < this._texture.length; i++) {
  5882. if(this._texture[i] != null)
  5883. this._texture[i].resize(this._width, this._height);
  5884. }
  5885. // done!
  5886. return this;
  5887. }
  5888. /**
  5889. * Use the provided texture(s) as output
  5890. * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
  5891. * @returns {SpeedyProgram} this
  5892. */
  5893. _setOutputTexture(...texture)
  5894. {
  5895. utils/* Utils.assert */.c.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
  5896. // update output texture(s)
  5897. for(let i = 0; i < this._texture.length; i++)
  5898. this._texture[i] = texture[i];
  5899. this._textureIndex = 0;
  5900. // done!
  5901. return this;
  5902. }
  5903. /**
  5904. * Clear the internal textures
  5905. * @returns {SpeedyDrawableTexture}
  5906. */
  5907. clear()
  5908. {
  5909. const texture = this._texture[this._textureIndex];
  5910. // clear internal textures
  5911. for(let i = 0; i < this._texture.length; i++)
  5912. this._texture[i].clear();
  5913. // ping-pong rendering
  5914. this._pingpong();
  5915. // done!
  5916. return texture;
  5917. }
  5918. /**
  5919. * Set data using a Uniform Buffer Object
  5920. * @param {string} blockName uniform block name
  5921. * @param {ArrayBufferView} data
  5922. * @returns {SpeedyProgram} this
  5923. */
  5924. setUBO(blockName, data)
  5925. {
  5926. if(this._ubo === null)
  5927. this._ubo = new UBOHelper(this._gl, this._program);
  5928. this._ubo.set(blockName, data);
  5929. return this;
  5930. }
  5931. /**
  5932. * Release the resources associated with this SpeedyProgram
  5933. * @returns {null}
  5934. */
  5935. release()
  5936. {
  5937. const gl = this._gl;
  5938. // Release UBOs (if any)
  5939. if(this._ubo != null)
  5940. this._ubo = this._ubo.release();
  5941. // Unlink textures
  5942. this._texture.fill(null);
  5943. // Release geometry
  5944. this._geometry = this._geometry.release();
  5945. // Release program
  5946. gl.deleteProgram(this._program);
  5947. this._program = null;
  5948. // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
  5949. // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
  5950. // will automatically be marked for deletion when the JS object is
  5951. // destroyed (i.e., garbage collected)
  5952. // done!
  5953. return null;
  5954. }
  5955. /**
  5956. * A constant #defined in the shader declaration
  5957. * @param {string} name
  5958. * @returns {number}
  5959. */
  5960. definedConstant(name)
  5961. {
  5962. return this._shaderdecl.definedConstant(name);
  5963. }
  5964. /**
  5965. * Helper method for pingpong rendering: alternates
  5966. * the texture index from 0 to 1 and vice-versa
  5967. */
  5968. _pingpong()
  5969. {
  5970. if(this._texture.length > 1)
  5971. this._textureIndex = 1 - this._textureIndex;
  5972. }
  5973. /**
  5974. * Compile and link GLSL shaders
  5975. * @param {WebGL2RenderingContext} gl
  5976. * @param {string} vertexShaderSource GLSL code of the vertex shader
  5977. * @param {string} fragmentShaderSource GLSL code of the fragment shader
  5978. * @returns {WebGLProgram}
  5979. */
  5980. static _compile(gl, vertexShaderSource, fragmentShaderSource)
  5981. {
  5982. const program = gl.createProgram();
  5983. const vertexShader = gl.createShader(gl.VERTEX_SHADER);
  5984. const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
  5985. // compile vertex shader
  5986. gl.shaderSource(vertexShader, vertexShaderSource);
  5987. gl.compileShader(vertexShader);
  5988. gl.attachShader(program, vertexShader);
  5989. // compile fragment shader
  5990. gl.shaderSource(fragmentShader, fragmentShaderSource);
  5991. gl.compileShader(fragmentShader);
  5992. gl.attachShader(program, fragmentShader);
  5993. // link program
  5994. gl.linkProgram(program);
  5995. gl.validateProgram(program);
  5996. // return on success
  5997. if(gl.getProgramParameter(program, gl.LINK_STATUS))
  5998. return program;
  5999. // display an error
  6000. const errors = [
  6001. gl.getShaderInfoLog(fragmentShader),
  6002. gl.getShaderInfoLog(vertexShader),
  6003. gl.getProgramInfoLog(program),
  6004. ];
  6005. gl.deleteProgram(program);
  6006. gl.deleteShader(fragmentShader);
  6007. gl.deleteShader(vertexShader);
  6008. // display error
  6009. const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
  6010. const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
  6011. const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
  6012. const formattedSource = source.split('\n')
  6013. .map((line, no) => col(1+no) + line)
  6014. .join('\n');
  6015. throw new utils_errors/* GLError */.Ql(
  6016. `\n\n---------- ERROR ----------\n\n` +
  6017. errors.filter(err => err).join('\n') +
  6018. `\n\n---------- SOURCE CODE ----------\n\n` +
  6019. formattedSource + '\n'
  6020. );
  6021. }
  6022. }
  6023. // ============================================================================
  6024. // HELPERS
  6025. // ============================================================================
  6026. /**
  6027. * Configure and store the VAO and the VBOs
  6028. * @param {WebGL2RenderingContext} gl
  6029. * @param {LocationOfAttributes} location
  6030. * @returns {ProgramGeometry}
  6031. *
  6032. * @typedef {Object} LocationOfAttributes
  6033. * @property {number} position
  6034. * @property {number} texCoord
  6035. *
  6036. * @typedef {Object} BufferOfAttributes
  6037. * @property {WebGLBuffer} position
  6038. * @property {WebGLBuffer} texCoord
  6039. */
  6040. function ProgramGeometry(gl, location)
  6041. {
  6042. /** @type {WebGLVertexArrayObject} Vertex Array Object */
  6043. this.vao = gl.createVertexArray();
  6044. /** @type {BufferOfAttributes} Vertex Buffer Objects */
  6045. this.vbo = Object.freeze({
  6046. position: gl.createBuffer(),
  6047. texCoord: gl.createBuffer()
  6048. });
  6049. /** @type {WebGL2RenderingContext} */
  6050. this._gl = gl;
  6051. // bind the VAO
  6052. gl.bindVertexArray(this.vao);
  6053. // set the position attribute
  6054. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
  6055. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  6056. // clip coordinates (CCW)
  6057. -1, -1,
  6058. 1, -1,
  6059. -1, 1,
  6060. -1, 1,
  6061. 1, -1,
  6062. 1, 1,
  6063. ]), gl.STATIC_DRAW);
  6064. gl.enableVertexAttribArray(location.position);
  6065. gl.vertexAttribPointer(location.position, // attribute location
  6066. 2, // 2 components per vertex (x,y)
  6067. gl.FLOAT, // type
  6068. false, // don't normalize
  6069. 0, // default stride (tightly packed)
  6070. 0); // offset
  6071. // set the texCoord attribute
  6072. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
  6073. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  6074. // texture coordinates (CCW)
  6075. 0, 0,
  6076. 1, 0,
  6077. 0, 1,
  6078. 0, 1,
  6079. 1, 0,
  6080. 1, 1,
  6081. ]), gl.STATIC_DRAW);
  6082. gl.enableVertexAttribArray(location.texCoord);
  6083. gl.vertexAttribPointer(location.texCoord, // attribute location
  6084. 2, // 2 components per vertex (x,y)
  6085. gl.FLOAT, // type
  6086. false, // don't normalize
  6087. 0, // default stride (tightly packed)
  6088. 0); // offset
  6089. // unbind
  6090. gl.bindBuffer(gl.ARRAY_BUFFER, null);
  6091. gl.bindVertexArray(null);
  6092. // done!
  6093. return Object.freeze(this);
  6094. }
  6095. /**
  6096. * Releases the internal resources
  6097. * @returns {null}
  6098. */
  6099. ProgramGeometry.prototype.release = function()
  6100. {
  6101. const gl = this._gl;
  6102. gl.deleteVertexArray(this.vao);
  6103. gl.deleteBuffer(this.vbo.position);
  6104. gl.deleteBuffer(this.vbo.texCoord);
  6105. return null;
  6106. }
  6107. /**
  6108. * Helper class for storing data in GLSL uniform variables
  6109. * @param {string} type
  6110. * @param {WebGLUniformLocation} location
  6111. */
  6112. function UniformVariable(type, location)
  6113. {
  6114. /** @type {string} GLSL data type */
  6115. this.type = String(type);
  6116. if(!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type))
  6117. throw new utils_errors/* NotSupportedError */.B8(`Unsupported uniform type: ${this.type}`);
  6118. /** @type {WebGLUniformLocation} uniform location in a WebGL program */
  6119. this.location = location;
  6120. /** @type {string} setter function */
  6121. this.setter = UNIFORM_SETTERS[this.type];
  6122. const n = Number((this.setter.match(/^uniform(Matrix)?(\d)/))[2]) | 0;
  6123. /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
  6124. this.dim = this.type.startsWith('mat') ? 2 : ((this.type.indexOf('vec') >= 0) ? 1 : 0);
  6125. /** @type {number} required number of scalars */
  6126. this.length = (this.dim == 2) ? n * n : n;
  6127. /** @type {SpeedyProgramUniformValue|null} cached value */
  6128. this._value = null;
  6129. }
  6130. /**
  6131. * Set the value of a uniform variable
  6132. * @param {WebGL2RenderingContext} gl
  6133. * @param {SpeedyProgramUniformValue} value use column-major format for matrices
  6134. * @param {number} [texNo] current texture index
  6135. * @returns {number} new texture index
  6136. */
  6137. UniformVariable.prototype.setValue = function(gl, value, texNo = -1)
  6138. {
  6139. const setValue = /** @type {Function} */ ( gl[this.setter] );
  6140. // check uniform type
  6141. if(typeof value === 'object' && this.type.endsWith('sampler2D')) {
  6142. // set texture
  6143. if(texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS)
  6144. throw new utils_errors/* NotSupportedError */.B8(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);
  6145. else if(Array.isArray(value))
  6146. throw new utils_errors/* NotSupportedError */.B8(`Can't pass arrays of textures to shaders`);
  6147. else if(value == null)
  6148. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: cannot use ${value} as an input texture`);
  6149. else if(texNo < 0)
  6150. throw new utils_errors/* IllegalArgumentError */.mG(`Missing texNo`);
  6151. const tex = value;
  6152. gl.activeTexture(gl.TEXTURE0 + texNo);
  6153. gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
  6154. gl.uniform1i(this.location, texNo);
  6155. texNo++;
  6156. }
  6157. else if(value === this._value) {
  6158. // do not update the uniform if it hasn't changed
  6159. void(0);
  6160. }
  6161. else if(typeof value === 'number' || typeof value === 'boolean') {
  6162. // set scalar value
  6163. setValue.call(gl, this.location, value);
  6164. }
  6165. else if(Array.isArray(value)) {
  6166. // set vector or matrix
  6167. if(value.length === this.length) {
  6168. if(this.dim == 2)
  6169. setValue.call(gl, this.location, false, value); // matrix
  6170. else
  6171. setValue.call(gl, this.location, ...value); // vector
  6172. }
  6173. else
  6174. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
  6175. }
  6176. else
  6177. throw new utils_errors/* IllegalArgumentError */.mG(`Can't run shader: unrecognized argument "${value}"`);
  6178. // cache the value
  6179. this._value = value;
  6180. // done
  6181. return texNo;
  6182. }
  6183. /**
  6184. * @typedef {object} UBOStuff
  6185. * @property {WebGLBuffer} buffer
  6186. * @property {number} blockBindingIndex "global" binding index
  6187. * @property {number} blockIndex UBO "location" in the program
  6188. * @property {ArrayBufferView|null} data user-data
  6189. */
  6190. /**
  6191. * A helper class for handling Uniform Buffer Objects (UBOs)
  6192. * @param {WebGL2RenderingContext} gl
  6193. * @param {WebGLProgram} program
  6194. */
  6195. function UBOHelper(gl, program)
  6196. {
  6197. /** @type {WebGL2RenderingContext} */
  6198. this._gl = gl;
  6199. /** @type {WebGLProgram} */
  6200. this._program = program;
  6201. /** @type {number} auto-increment counter */
  6202. this._nextIndex = 0;
  6203. /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
  6204. this._ubo = Object.create(null);
  6205. }
  6206. /**
  6207. * Set Uniform Buffer Object data
  6208. * (the buffer will be uploaded when the program is executed)
  6209. * @param {string} name uniform block name
  6210. * @param {ArrayBufferView} data
  6211. */
  6212. UBOHelper.prototype.set = function(name, data)
  6213. {
  6214. const gl = this._gl;
  6215. // create UBO entry
  6216. if(this._ubo[name] === undefined) {
  6217. this._ubo[name] = {
  6218. buffer: gl.createBuffer(),
  6219. blockBindingIndex: this._nextIndex++,
  6220. blockIndex: -1,
  6221. data: null
  6222. };
  6223. }
  6224. // get UBO entry for the given block name
  6225. const ubo = this._ubo[name];
  6226. // read block index & assign binding point
  6227. if(ubo.blockIndex < 0) {
  6228. const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
  6229. gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
  6230. ubo.blockIndex = blockIndex;
  6231. }
  6232. // store the data - we'll upload it later
  6233. ubo.data = data;
  6234. }
  6235. /**
  6236. * Update UBO data
  6237. * Called when we're using the appropriate WebGLProgram
  6238. */
  6239. UBOHelper.prototype.update = function()
  6240. {
  6241. const gl = this._gl;
  6242. for(const name in this._ubo) {
  6243. const ubo = this._ubo[name];
  6244. gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
  6245. gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
  6246. gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
  6247. gl.bindBuffer(gl.UNIFORM_BUFFER, null);
  6248. }
  6249. }
  6250. /**
  6251. * Release allocated buffers
  6252. * @returns {null}
  6253. */
  6254. UBOHelper.prototype.release = function()
  6255. {
  6256. const gl = this._gl;
  6257. for(const name in this._ubo) {
  6258. const ubo = this._ubo[name];
  6259. gl.deleteBuffer(ubo.buffer);
  6260. ubo.data = null;
  6261. }
  6262. return null;
  6263. }
  6264. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
  6265. /*
  6266. * speedy-vision.js
  6267. * GPU-accelerated Computer Vision for JavaScript
  6268. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  6269. *
  6270. * Licensed under the Apache License, Version 2.0 (the "License");
  6271. * you may not use this file except in compliance with the License.
  6272. * You may obtain a copy of the License at
  6273. *
  6274. * http://www.apache.org/licenses/LICENSE-2.0
  6275. *
  6276. * Unless required by applicable law or agreed to in writing, software
  6277. * distributed under the License is distributed on an "AS IS" BASIS,
  6278. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6279. * See the License for the specific language governing permissions and
  6280. * limitations under the License.
  6281. *
  6282. * speedy-program-group.js
  6283. * An abstract group of programs that run on the GPU
  6284. */
  6285. /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
  6286. /**
  6287. * @typedef {object} SpeedyProgramHelpers
  6288. * @property {function(): SpeedyProgramOptions} usesPingpongRendering
  6289. * @property {function(): SpeedyProgramOptions} rendersToCanvas
  6290. */
  6291. /** @const {SpeedyProgramHelpers} Program settings generator */
  6292. const PROGRAM_HELPERS = Object.freeze({
  6293. /**
  6294. * Pingpong Rendering: the output texture of a
  6295. * program cannot be used as an input to itself.
  6296. * This is a convenient helper in these situations
  6297. * @returns {SpeedyProgramOptions}
  6298. */
  6299. usesPingpongRendering() {
  6300. return {
  6301. pingpong: true
  6302. };
  6303. },
  6304. /**
  6305. * Render to canvas
  6306. * Use it when we're supposed to see the texture
  6307. * @returns {SpeedyProgramOptions}
  6308. */
  6309. rendersToCanvas() {
  6310. return {
  6311. renderToTexture: false
  6312. };
  6313. },
  6314. });
  6315. /**
  6316. * SpeedyProgramGroup
  6317. * A semantically correlated group
  6318. * of programs that run on the GPU
  6319. * @abstract
  6320. */
  6321. class SpeedyProgramGroup
  6322. {
  6323. /**
  6324. * Class constructor
  6325. * @protected
  6326. * @param {SpeedyGPU} gpu
  6327. */
  6328. constructor(gpu)
  6329. {
  6330. /** @type {SpeedyGPU} GPU-accelerated routines */
  6331. this._gpu = gpu;
  6332. /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
  6333. this._programs = [];
  6334. }
  6335. /**
  6336. * Declare a program
  6337. * @protected
  6338. * @param {string} name Program name
  6339. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6340. * @param {SpeedyProgramOptions} [options] Program settings
  6341. * @returns {this}
  6342. */
  6343. declare(name, shaderdecl, options = {})
  6344. {
  6345. // lazy instantiation of kernels
  6346. Object.defineProperty(this, name, {
  6347. get: (() => {
  6348. // Why cast a symbol to symbol?
  6349. // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
  6350. const key = /** @type {symbol} */ ( Symbol(name) );
  6351. return () => this[key] || (this[key] = this._createProgram(shaderdecl, options));
  6352. })()
  6353. });
  6354. return this;
  6355. }
  6356. /**
  6357. * Neat helpers to be used when declaring programs
  6358. * @returns {SpeedyProgramHelpers}
  6359. */
  6360. get program()
  6361. {
  6362. return PROGRAM_HELPERS;
  6363. }
  6364. /**
  6365. * Releases all programs from this group
  6366. * @returns {null}
  6367. */
  6368. release()
  6369. {
  6370. for(let i = 0; i < this._programs.length; i++)
  6371. this._programs[i].release();
  6372. return null;
  6373. }
  6374. /**
  6375. * Spawn a SpeedyProgram
  6376. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6377. * @param {SpeedyProgramOptions} [options] Program settings
  6378. * @returns {SpeedyProgram}
  6379. */
  6380. _createProgram(shaderdecl, options = {})
  6381. {
  6382. const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
  6383. this._programs.push(program);
  6384. return program;
  6385. }
  6386. }
  6387. ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
  6388. /*
  6389. * speedy-vision.js
  6390. * GPU-accelerated Computer Vision for JavaScript
  6391. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  6392. *
  6393. * Licensed under the Apache License, Version 2.0 (the "License");
  6394. * you may not use this file except in compliance with the License.
  6395. * You may obtain a copy of the License at
  6396. *
  6397. * http://www.apache.org/licenses/LICENSE-2.0
  6398. *
  6399. * Unless required by applicable law or agreed to in writing, software
  6400. * distributed under the License is distributed on an "AS IS" BASIS,
  6401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6402. * See the License for the specific language governing permissions and
  6403. * limitations under the License.
  6404. *
  6405. * utils.js
  6406. * GPU utilities
  6407. */
  6408. //
  6409. // Shaders
  6410. //
  6411. // Copy image
  6412. const copy = (0,shader_declaration/* importShader */.Nt)('utils/copy.glsl').withArguments('image');
  6413. // Copy keypoints
  6414. const copyKeypoints = (0,shader_declaration/* importShader */.Nt)('utils/copy-raster.glsl').withDefines({ 'TYPE': 1 }).withArguments('image');
  6415. // Copy 2D vectors
  6416. const copy2DVectors = (0,shader_declaration/* importShader */.Nt)('utils/copy-raster.glsl').withDefines({ 'TYPE': 2 }).withArguments('image');
  6417. // Flip y-axis for output
  6418. const flipY = (0,shader_declaration/* importShader */.Nt)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
  6419. // Fill image with a constant
  6420. const fill = (0,shader_declaration/* importShader */.Nt)('utils/fill.glsl').withArguments('value');
  6421. // Fill zero or more color components of the input image with a constant value
  6422. const fillComponents = (0,shader_declaration/* importShader */.Nt)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
  6423. // Copy the src component of src to zero or more color components of a copy of dest
  6424. const copyComponents = (0,shader_declaration/* importShader */.Nt)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
  6425. // Scan the entire image and find the minimum & maximum pixel intensity
  6426. const scanMinMax2D = (0,shader_declaration/* importShader */.Nt)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
  6427. // Compute the partial derivatives of an image
  6428. const sobelDerivatives = (0,shader_declaration/* importShader */.Nt)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
  6429. /**
  6430. * SpeedyProgramGroupUtils
  6431. * Utility operations
  6432. */
  6433. class SpeedyProgramGroupUtils extends SpeedyProgramGroup
  6434. {
  6435. /**
  6436. * Class constructor
  6437. * @param {SpeedyGPU} gpu
  6438. */
  6439. constructor(gpu)
  6440. {
  6441. super(gpu);
  6442. this
  6443. // render to the canvas
  6444. .declare('renderToCanvas', flipY, {
  6445. ...this.program.rendersToCanvas()
  6446. })
  6447. // copy image
  6448. .declare('copy', copy)
  6449. // copy keypoints
  6450. .declare('copyKeypoints', copyKeypoints)
  6451. // copy 2D vectors
  6452. .declare('copy2DVectors', copy2DVectors)
  6453. // Fill image with a constant
  6454. .declare('fill', fill)
  6455. // Fill zero or more color components of the input image with a constant value
  6456. .declare('fillComponents', fillComponents)
  6457. // Copy the src component of src to zero or more color components of a copy of dest
  6458. .declare('copyComponents', copyComponents)
  6459. // find minimum & maximum pixel intensity
  6460. .declare('scanMinMax2D', scanMinMax2D, {
  6461. ...this.program.usesPingpongRendering()
  6462. })
  6463. // Compute the partial derivatives of an image
  6464. .declare('sobelDerivatives', sobelDerivatives)
  6465. ;
  6466. }
  6467. }
  6468. // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
  6469. var convolution = __nested_webpack_require_312600__(6776);
  6470. ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
  6471. /*
  6472. * speedy-vision.js
  6473. * GPU-accelerated Computer Vision for JavaScript
  6474. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  6475. *
  6476. * Licensed under the Apache License, Version 2.0 (the "License");
  6477. * you may not use this file except in compliance with the License.
  6478. * You may obtain a copy of the License at
  6479. *
  6480. * http://www.apache.org/licenses/LICENSE-2.0
  6481. *
  6482. * Unless required by applicable law or agreed to in writing, software
  6483. * distributed under the License is distributed on an "AS IS" BASIS,
  6484. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6485. * See the License for the specific language governing permissions and
  6486. * limitations under the License.
  6487. *
  6488. * filters.js
  6489. * Image filtering on the GPU
  6490. */
  6491. //
  6492. // Shaders
  6493. //
  6494. // Convert to greyscale
  6495. const rgb2grey = (0,shader_declaration/* importShader */.Nt)('filters/rgb2grey.glsl')
  6496. .withArguments('image');
  6497. // Convolution
  6498. const filters_convolution = [3, 5, 7].reduce((obj, ksize) => ((obj[ksize] =
  6499. (0,shader_declaration/* importShader */.Nt)('filters/convolution2d.glsl')
  6500. .withDefines({ 'KERNEL_SIZE_SQUARED': ksize * ksize })
  6501. .withArguments('image', 'kernel')
  6502. ), obj), {});
  6503. // Separable convolution
  6504. const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => ((obj[ksize] =
  6505. (0,shader_declaration/* importShader */.Nt)('filters/convolution1d.glsl')
  6506. .withDefines({ 'KERNEL_SIZE': ksize, 'AXIS': 0 })
  6507. .withArguments('image', 'kernel')
  6508. ), obj), {});
  6509. const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => ((obj[ksize] =
  6510. (0,shader_declaration/* importShader */.Nt)('filters/convolution1d.glsl')
  6511. .withDefines({ 'KERNEL_SIZE': ksize, 'AXIS': 1 })
  6512. .withArguments('image', 'kernel')
  6513. ), obj), {});
  6514. // Median filter
  6515. const median = [3, 5, 7].reduce((obj, ksize) => ((obj[ksize] =
  6516. (0,shader_declaration/* importShader */.Nt)('filters/fast-median.glsl')
  6517. .withDefines({ 'KERNEL_SIZE': ksize })
  6518. .withArguments('image')
  6519. ), obj), {});
  6520. // Normalize image
  6521. const normalizeGreyscale = (0,shader_declaration/* importShader */.Nt)('filters/normalize-image.glsl')
  6522. .withDefines({ 'GREYSCALE': 1 })
  6523. .withArguments('minmax2d', 'minValue', 'maxValue');
  6524. const normalizeColored = (0,shader_declaration/* importShader */.Nt)('filters/normalize-image.glsl')
  6525. .withDefines({ 'GREYSCALE': 0 })
  6526. .withArguments('minmax2dRGB', 'minValue', 'maxValue');
  6527. // Nightvision
  6528. const nightvision = (0,shader_declaration/* importShader */.Nt)('filters/nightvision.glsl')
  6529. .withDefines({ 'GREYSCALE': 0 })
  6530. .withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6531. const nightvisionGreyscale = (0,shader_declaration/* importShader */.Nt)('filters/nightvision.glsl')
  6532. .withDefines({ 'GREYSCALE': 1 })
  6533. .withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6534. //
  6535. // Utilities
  6536. //
  6537. // Handy conversion for Gaussian filters
  6538. // (symmetric kernel, approx. zero after 3*sigma)
  6539. const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
  6540. // Generate a 1D Gaussian kernel
  6541. const gaussian = ksize => utils/* Utils.gaussianKernel */.c.gaussianKernel(ksize2sigma(ksize), ksize);
  6542. // Generate a 1D Box filter
  6543. const box = ksize => (new Array(ksize)).fill(1.0 / ksize);
  6544. /**
  6545. * SpeedyProgramGroupFilters
  6546. * Image filtering
  6547. */
  6548. class SpeedyProgramGroupFilters extends SpeedyProgramGroup
  6549. {
  6550. /**
  6551. * Class constructor
  6552. * @param {SpeedyGPU} gpu
  6553. */
  6554. constructor(gpu)
  6555. {
  6556. super(gpu);
  6557. this
  6558. // convert to greyscale
  6559. .declare('rgb2grey', rgb2grey)
  6560. // median filters
  6561. .declare('median3', median[3]) // 3x3 window
  6562. .declare('median5', median[5]) // 5x5 window
  6563. .declare('median7', median[7]) // 7x7 window
  6564. // 2D convolution
  6565. .declare('convolution3', filters_convolution[3]) // 3x3 kernel
  6566. .declare('convolution5', filters_convolution[5]) // 5x5 kernel
  6567. .declare('convolution7', filters_convolution[7]) // 7x7 kernel
  6568. // 1D separable convolution
  6569. .declare('convolution3x', convolutionX[3]) // 1x3 kernel
  6570. .declare('convolution3y', convolutionY[3]) // 3x1 kernel
  6571. .declare('convolution5x', convolutionX[5]) // 1x5 kernel
  6572. .declare('convolution5y', convolutionY[5]) // 5x1 kernel
  6573. .declare('convolution7x', convolutionX[7])
  6574. .declare('convolution7y', convolutionY[7])
  6575. .declare('convolution9x', convolutionX[9])
  6576. .declare('convolution9y', convolutionY[9])
  6577. .declare('convolution11x', convolutionX[11])
  6578. .declare('convolution11y', convolutionY[11])
  6579. .declare('convolution13x', convolutionX[13])
  6580. .declare('convolution13y', convolutionY[13])
  6581. .declare('convolution15x', convolutionX[15])
  6582. .declare('convolution15y', convolutionY[15])
  6583. // normalize image
  6584. .declare('normalizeGreyscale', normalizeGreyscale)
  6585. .declare('normalizeColored', normalizeColored)
  6586. // nightvision
  6587. .declare('nightvision', nightvision)
  6588. .declare('nightvisionGreyscale', nightvisionGreyscale)
  6589. .declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 31)))
  6590. .declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 31)))
  6591. .declare('illuminationMapX', (0,convolution.convX)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 63)))
  6592. .declare('illuminationMapY', (0,convolution.convY)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 63)))
  6593. .declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 255)))
  6594. .declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils.gaussianKernel */.c.gaussianKernel(80, 255)))
  6595. // gaussian: separable kernels
  6596. // see also: http://dev.theomader.com/gaussian-kernel-calculator/
  6597. .declare('gaussian3x', (0,convolution.convX)([ 0.25, 0.5, 0.25 ])) // sigma ~ 1.0
  6598. .declare('gaussian3y', (0,convolution.convY)([ 0.25, 0.5, 0.25 ]))
  6599. .declare('gaussian5x', (0,convolution.convX)([ 0.05, 0.25, 0.4, 0.25, 0.05 ])) // sigma ~ 1.0
  6600. .declare('gaussian5y', (0,convolution.convY)([ 0.05, 0.25, 0.4, 0.25, 0.05 ]))
  6601. .declare('gaussian7x', (0,convolution.convX)(gaussian(7)))
  6602. .declare('gaussian7y', (0,convolution.convY)(gaussian(7)))
  6603. .declare('gaussian9x', (0,convolution.convX)(gaussian(9)))
  6604. .declare('gaussian9y', (0,convolution.convY)(gaussian(9)))
  6605. .declare('gaussian11x', (0,convolution.convX)(gaussian(11)))
  6606. .declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
  6607. // box filter: separable kernels
  6608. .declare('box3x', (0,convolution.convX)(box(3)))
  6609. .declare('box3y', (0,convolution.convY)(box(3)))
  6610. .declare('box5x', (0,convolution.convX)(box(5)))
  6611. .declare('box5y', (0,convolution.convY)(box(5)))
  6612. .declare('box7x', (0,convolution.convX)(box(7)))
  6613. .declare('box7y', (0,convolution.convY)(box(7)))
  6614. .declare('box9x', (0,convolution.convX)(box(9)))
  6615. .declare('box9y', (0,convolution.convY)(box(9)))
  6616. .declare('box11x', (0,convolution.convX)(box(11)))
  6617. .declare('box11y', (0,convolution.convY)(box(11)))
  6618. ;
  6619. }
  6620. }
  6621. // EXTERNAL MODULE: ./src/core/speedy-namespace.js
  6622. var speedy_namespace = __nested_webpack_require_312600__(2411);
  6623. ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
  6624. /*
  6625. * speedy-vision.js
  6626. * GPU-accelerated Computer Vision for JavaScript
  6627. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  6628. *
  6629. * Licensed under the Apache License, Version 2.0 (the "License");
  6630. * you may not use this file except in compliance with the License.
  6631. * You may obtain a copy of the License at
  6632. *
  6633. * http://www.apache.org/licenses/LICENSE-2.0
  6634. *
  6635. * Unless required by applicable law or agreed to in writing, software
  6636. * distributed under the License is distributed on an "AS IS" BASIS,
  6637. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6638. * See the License for the specific language governing permissions and
  6639. * limitations under the License.
  6640. *
  6641. * speedy-descriptordb.js
  6642. * A database of binary descriptors in video memory
  6643. */
  6644. //
  6645. // A database of binary descriptors is a texture that stores
  6646. // a set of (descriptor: uint8_t[]) entries.
  6647. //
  6648. /** @type {number} we use RGBA8 textures to store the descriptors */
  6649. const DESCRIPTORDB_BYTESPERPIXEL = 4;
  6650. /** @type {number} texture size goes up to 16 MB */
  6651. const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
  6652. /**
  6653. * Utility for generating a database of binary descriptors in video memory
  6654. */
  6655. class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.R
  6656. {
  6657. /**
  6658. * Create a database of binary descriptors
  6659. * @param {SpeedyTexture} texture output texture
  6660. * @param {Uint8Array[]} descriptors binary descriptors
  6661. * @param {number} descriptorSize in bytes, a multiple of 4
  6662. * @returns {SpeedyTexture} texture
  6663. */
  6664. static create(texture, descriptors, descriptorSize)
  6665. {
  6666. utils/* Utils.assert */.c.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
  6667. const numberOfDescriptors = descriptors.length;
  6668. const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
  6669. // find an appropriate texture size
  6670. const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
  6671. const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
  6672. // setup texture parameters
  6673. const stride = 1 << log2stride;
  6674. const width = stride, height = stride; // we use powers-of-two
  6675. // are we within storage capacity?
  6676. const capacity = (width * height) / pixelsPerDescriptor;
  6677. if(numberOfDescriptors > capacity)
  6678. throw new utils_errors/* NotSupportedError */.B8(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
  6679. // create texture data
  6680. const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
  6681. for(let i = 0; i < numberOfDescriptors; i++) {
  6682. const byteOffset = i * descriptorSize;
  6683. const descriptor = descriptors[i];
  6684. // validate input
  6685. utils/* Utils.assert */.c.assert(descriptor.byteLength === descriptorSize);
  6686. utils/* Utils.assert */.c.assert(byteOffset + descriptorSize <= data.byteLength);
  6687. // write data
  6688. data.set(descriptor, byteOffset);
  6689. }
  6690. // log data for further study
  6691. const MEGABYTE = 1048576;
  6692. const totalSize = numberOfDescriptors * descriptorSize;
  6693. utils/* Utils.log */.c.log(
  6694. `Creating a ${width}x${height} database of ${numberOfDescriptors} ` +
  6695. `${descriptorSize * 8}-bit descriptors ` +
  6696. `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`
  6697. );
  6698. // upload to the texture
  6699. texture.resize(width, height);
  6700. texture.upload(data);
  6701. return texture;
  6702. }
  6703. }
  6704. ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
  6705. /*
  6706. * speedy-vision.js
  6707. * GPU-accelerated Computer Vision for JavaScript
  6708. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  6709. *
  6710. * Licensed under the Apache License, Version 2.0 (the "License");
  6711. * you may not use this file except in compliance with the License.
  6712. * You may obtain a copy of the License at
  6713. *
  6714. * http://www.apache.org/licenses/LICENSE-2.0
  6715. *
  6716. * Unless required by applicable law or agreed to in writing, software
  6717. * distributed under the License is distributed on an "AS IS" BASIS,
  6718. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6719. * See the License for the specific language governing permissions and
  6720. * limitations under the License.
  6721. *
  6722. * speedy-lsh.js
  6723. * GPU-based LSH tables for fast matching of binary descriptors
  6724. */
  6725. /*
  6726. * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
  6727. * ------------------------------------------------
  6728. *
  6729. * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
  6730. * Indices of keypoint descriptors are stored in several tables, each with many
  6731. * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
  6732. * size to match the keypoints.
  6733. *
  6734. * Buckets in video memory may get full. Wouldn't it be cool if we could use a
  6735. * probabilistic approach to let us work within their storage capacity?
  6736. *
  6737. * Let there be n buckets in a table, each with storage capacity c (holding
  6738. * up to c elements). Buckets are numbered from 0 to n-1.
  6739. *
  6740. * We pick uniformly a random bucket to store a new element in the table. Let
  6741. * X be the chosen bucket. The probability that we'll store the new element in
  6742. * any particular bucket k is:
  6743. *
  6744. * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
  6745. *
  6746. * On average, each new element stored in the table inserts 1/n of an element
  6747. * in each bucket. If we add m new elements to the table, each bucket receives
  6748. * m/n elements, on average(*).
  6749. *
  6750. * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
  6751. * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
  6752. * addition, the expected value of (m Ik) is m * E(ik) = m/n.
  6753. *
  6754. * Now let Yi be the number of elements inserted in bucket i in m additions to
  6755. * the table. We model Yi as Poisson(m/n), since on average, m additions to
  6756. * the table result in m/n new elements being inserted in bucket i. Buckets
  6757. * are picked independently. Hence, for all i, the probability that we insert
  6758. * q elements in bucket i in m additions to the table is:
  6759. *
  6760. * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
  6761. *
  6762. * Given that each bucket has storage capacity c, we require Yi <= c with a
  6763. * high probability p (say, p = 0.99). This means that, in m additions, we
  6764. * don't want to exceed the capacity c with high probability. So, let us find
  6765. * a (large) value of m such that:
  6766. *
  6767. * P(Yi <= c) >= p
  6768. *
  6769. * Sounds good! We can find the largest matching m using binary search.
  6770. *
  6771. * I don't think we need to enforce a high probability that ALL buckets stay
  6772. * within their capacity - n is large, we need to use the available space, and
  6773. * we have multiple tables anyway.
  6774. *
  6775. * In practice, the assumption that buckets are picked uniformly doesn't hold:
  6776. * keypoints that are nearby tend to have similar descriptors and buckets are
  6777. * picked according to those descriptors. Still, this model works well enough
  6778. * in practice and it is simple! That's what I like about it!
  6779. *
  6780. * ... now, how I actually do the matching is the theme of the next episode!
  6781. */
  6782. /** @type {number} Default number of tables in a LSH data structure */
  6783. const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
  6784. /** @type {number} Default number of bits of a hash */
  6785. const LSH_DEFAULT_HASH_SIZE = 15;
  6786. /** @type {number[]} Acceptable number of tables for a LSH data structure */
  6787. const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32];
  6788. /** @type {number[]} Acceptable values for hashSize, in bits */
  6789. const LSH_ACCEPTABLE_HASH_SIZES = [10,11,12,13,14,15,16,17,18,19,20];
  6790. /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
  6791. const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32,64];
  6792. /**
  6793. * @typedef {Object} LSHProfile LSH profile
  6794. * @property {string} name name of the profile
  6795. * @property {number} capacity maximum number of keypoints that can be stored in such a table
  6796. * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
  6797. * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
  6798. * @property {number} bucketCapacity maximum number of entries of a bucket of a table
  6799. */
  6800. /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
  6801. const generateLSHProfiles = (t,h,p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [
  6802. {
  6803. name: 'x-small',
  6804. bucketCapacity: 1,
  6805. tableCount: t,
  6806. hashSize: h,
  6807. capacity: findTableCapacity(h, 1, p),
  6808. },
  6809. {
  6810. name: 'small',
  6811. bucketCapacity: 2,
  6812. tableCount: t,
  6813. hashSize: h,
  6814. capacity: findTableCapacity(h, 2, p),
  6815. },
  6816. {
  6817. name: 'small-plus',
  6818. bucketCapacity: 3,
  6819. tableCount: t,
  6820. hashSize: h,
  6821. capacity: findTableCapacity(h, 3, p),
  6822. },
  6823. {
  6824. name: 'medium',
  6825. bucketCapacity: 4,
  6826. tableCount: t,
  6827. hashSize: h,
  6828. capacity: findTableCapacity(h, 4, p),
  6829. },
  6830. {
  6831. name: 'medium-plus',
  6832. bucketCapacity: 5,
  6833. tableCount: t,
  6834. hashSize: h,
  6835. capacity: findTableCapacity(h, 5, p),
  6836. },
  6837. {
  6838. name: 'large',
  6839. bucketCapacity: 6,
  6840. tableCount: t,
  6841. hashSize: h,
  6842. capacity: findTableCapacity(h, 6, p),
  6843. },
  6844. {
  6845. name: 'x-large',
  6846. bucketCapacity: 8,
  6847. tableCount: t,
  6848. hashSize: h,
  6849. capacity: findTableCapacity(h, 8, p),
  6850. },
  6851. ];
  6852. //
  6853. // LSH hash sequences: random bits in increasing order
  6854. // We generate a few sequences (one for each table) supporting up to 16 hash bits
  6855. // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
  6856. //
  6857. /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
  6858. /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
  6859. /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
  6860. /** @type {number} maximum number of elements of a sequence */
  6861. const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
  6862. /** @type {number} number of sequences in a BitSequences object */
  6863. const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
  6864. /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
  6865. const partitionedSort = seq => (utils/* Utils.range */.c.range(LSH_SEQUENCE_COUNT)
  6866. .forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i+1) * LSH_SEQUENCE_MAXLEN).sort()),
  6867. seq);
  6868. /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
  6869. const padSequences = (p, seq) => (utils/* Utils.range */.c.range(LSH_SEQUENCE_COUNT)
  6870. .forEach(i => seq.subarray((i+1) * LSH_SEQUENCE_MAXLEN - p, (i+1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)),
  6871. seq);
  6872. /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
  6873. const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p,o) => ((p[o]=f(o)), p), {}))(h => ({
  6874. // for 256-bit descriptors
  6875. 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([
  6876. ...(utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(256))),
  6877. ...(utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(256))),
  6878. ...(utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(256))),
  6879. ].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  6880. // for 512-bit descriptors
  6881. 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([
  6882. ...(utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(512))),
  6883. ...(utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(512))),
  6884. ].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  6885. }));
  6886. //
  6887. // Misc
  6888. //
  6889. /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
  6890. const LSH_BYTESPERPIXEL = 4;
  6891. /** @type {function(number): number} next power of 2 */
  6892. const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  6893. /**
  6894. * GPU-based LSH tables for fast matching of binary descriptors
  6895. */
  6896. class SpeedyLSH
  6897. {
  6898. /**
  6899. * Constructor
  6900. * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
  6901. * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
  6902. * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
  6903. * @param {number} [tableCount] number of LSH tables, preferably a power of two
  6904. * @param {number} [hashSize] number of bits of a hash of a descriptor
  6905. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6906. */
  6907. constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95)
  6908. {
  6909. const descriptorCount = descriptors.length;
  6910. const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
  6911. const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
  6912. // validate input
  6913. utils/* Utils.assert */.c.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
  6914. utils/* Utils.assert */.c.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
  6915. utils/* Utils.assert */.c.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
  6916. utils/* Utils.assert */.c.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
  6917. utils/* Utils.assert */.c.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
  6918. /** @type {LSHProfile} LSH profile */
  6919. this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
  6920. /** @type {number} descriptor size, in bytes */
  6921. this._descriptorSize = descriptorSize;
  6922. /** @type {number} number of descriptors */
  6923. this._descriptorCount = descriptorCount;
  6924. /** @type {BitSequences} bit sequences */
  6925. this._sequences = this._pickSequences(this._descriptorSize);
  6926. /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
  6927. this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
  6928. /** @type {SpeedyTexture} a storage of descriptors */
  6929. this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
  6930. }
  6931. /**
  6932. * Descriptor size, in bytes
  6933. * @returns {number}
  6934. */
  6935. get descriptorSize()
  6936. {
  6937. return this._descriptorSize;
  6938. }
  6939. /**
  6940. * Number of descriptors stored in this LSH data structure
  6941. * @returns {number}
  6942. */
  6943. get descriptorCount()
  6944. {
  6945. return this._descriptorCount;
  6946. }
  6947. /**
  6948. * LSH bit sequences
  6949. * @returns {BitSequences}
  6950. */
  6951. get sequences()
  6952. {
  6953. return this._sequences;
  6954. }
  6955. /**
  6956. * Number of bits that make a hash
  6957. * @returns {number}
  6958. */
  6959. get hashSize()
  6960. {
  6961. return this._profile.hashSize;
  6962. }
  6963. /**
  6964. * Maximum number of descriptors that can be stored in a bucket of a table
  6965. * @returns {number}
  6966. */
  6967. get bucketCapacity()
  6968. {
  6969. return this._profile.bucketCapacity;
  6970. }
  6971. /**
  6972. * How many buckets per table do we have?
  6973. * @returns {number}
  6974. */
  6975. get bucketsPerTable()
  6976. {
  6977. return 1 << this._profile.hashSize;
  6978. }
  6979. /**
  6980. * Number of LSH tables
  6981. * @returns {number}
  6982. */
  6983. get tableCount()
  6984. {
  6985. return this._profile.tableCount;
  6986. }
  6987. /**
  6988. * Size of one LSH table, in bytes
  6989. * @returns {number}
  6990. */
  6991. get tableSize()
  6992. {
  6993. return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
  6994. }
  6995. /**
  6996. * Size of all LSH tables combined, in bytes
  6997. * @returns {number}
  6998. */
  6999. get totalSize()
  7000. {
  7001. // actually, the total memory in VRAM may be a bit larger than
  7002. // this value, depending on the actual size of the texture
  7003. return this.tableCount * this.tableSize;
  7004. }
  7005. /**
  7006. * LSH tables texture
  7007. * @returns {SpeedyDrawableTexture}
  7008. */
  7009. get tables()
  7010. {
  7011. return this._tables;
  7012. }
  7013. /**
  7014. * A collection of descriptors
  7015. * @returns {SpeedyDrawableTexture}
  7016. */
  7017. get descriptorDB()
  7018. {
  7019. return this._descriptorDB;
  7020. }
  7021. /**
  7022. * Pick the appropriate LSH sequences for a particular descriptor size
  7023. * @param {number} descriptorSize in bytes
  7024. * @returns {BitSequences}
  7025. */
  7026. _pickSequences(descriptorSize)
  7027. {
  7028. utils/* Utils.assert */.c.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
  7029. utils/* Utils.assert */.c.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
  7030. return LSH_SEQUENCES[this.hashSize][descriptorSize];
  7031. }
  7032. /**
  7033. * Create LSH tables
  7034. * @param {SpeedyTexture} texture output texture
  7035. * @param {BitSequences} sequences bit sequences
  7036. * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
  7037. * @param {number} descriptorSize in bytes
  7038. * @returns {SpeedyTexture} texture
  7039. */
  7040. _createStaticTables(texture, sequences, descriptors, descriptorSize)
  7041. {
  7042. const END_OF_LIST = 0xFFFFFFFF;
  7043. const profileName = this._profile.name;
  7044. const tableCapacity = this._profile.capacity;
  7045. const tableCount = this.tableCount;
  7046. const bucketsPerTable = this.bucketsPerTable;
  7047. const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
  7048. const hashSize = this.hashSize;
  7049. const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
  7050. const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
  7051. const textureHeight = Math.ceil(numberOfPixels / textureWidth);
  7052. const numberOfDescriptors = descriptors.length;
  7053. // validate input
  7054. utils/* Utils.assert */.c.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
  7055. utils/* Utils.assert */.c.assert(tableCount <= LSH_SEQUENCE_COUNT);
  7056. utils/* Utils.assert */.c.assert(numberOfPixels <= textureWidth * textureHeight);
  7057. // log
  7058. const MEGABYTE = 1048576;
  7059. utils/* Utils.log */.c.log(
  7060. `Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` +
  7061. `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` +
  7062. `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` +
  7063. `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `
  7064. );
  7065. // warn the user if there are too many descriptors
  7066. if(numberOfDescriptors > tableCapacity) {
  7067. const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
  7068. utils/* Utils.warning */.c.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
  7069. }
  7070. // create empty LSH tables
  7071. const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
  7072. const bytes = (new Uint8Array(buffer)).fill(0xFF);
  7073. const data = new DataView(buffer);
  7074. // shuffle the descriptors...
  7075. // it seems like a good idea to handle collisions of similar descriptors,
  7076. // which may be located next to each other in the array
  7077. const permutation = utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(numberOfDescriptors));
  7078. // for each descriptor
  7079. // do everything in little-endian format!
  7080. const numberOfDiscardedDescriptorsPerTable = (new Array(tableCount)).fill(0);
  7081. for(let i = 0; i < numberOfDescriptors; i++) {
  7082. const descriptorIndex = permutation[i]; //i;
  7083. const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
  7084. // for each table
  7085. for(let table = 0; table < tableCount; table++) {
  7086. // compute hash & memory addresses
  7087. const hash = hashes[table];
  7088. const tableByteOffset = table * bucketsPerTable * bucketSize;
  7089. const bucketByteOffset = tableByteOffset + hash * bucketSize;
  7090. // find the end of the list
  7091. let index = END_OF_LIST;
  7092. for(let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
  7093. const byteOffset = bucketByteOffset + entryByteOffset;
  7094. index = data.getUint32(byteOffset, true);
  7095. // add the keypoint
  7096. if(index == END_OF_LIST) {
  7097. data.setUint32(byteOffset, descriptorIndex, true);
  7098. break;
  7099. }
  7100. }
  7101. // note: if the bucket is full, we just discard the entry :\
  7102. // we give this event a probabilistic treatment (see above),
  7103. // so it happens with low probability
  7104. if(index != END_OF_LIST)
  7105. numberOfDiscardedDescriptorsPerTable[table]++;
  7106. }
  7107. }
  7108. // log data for further study
  7109. const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
  7110. const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
  7111. utils/* Utils.log */.c.log(
  7112. `When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` +
  7113. `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` +
  7114. `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` +
  7115. `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` +
  7116. `Minimum: ${Math.min(...profile).toFixed(2)}%. ` +
  7117. `Table capacity: ${tableCapacity}.`
  7118. );
  7119. // upload the LSH tables to the GPU
  7120. texture.resize(textureWidth, textureHeight);
  7121. texture.upload(bytes);
  7122. return texture;
  7123. }
  7124. /**
  7125. * Pick bits from a binary descriptor
  7126. * @param {Uint8Array} descriptor a single descriptor
  7127. * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
  7128. * @returns {number[]} hash code for each table
  7129. */
  7130. _hashCodes(descriptor, sequences)
  7131. {
  7132. const tableCount = this.tableCount;
  7133. const hashSize = this.hashSize;
  7134. const bucketsPerTable = this.bucketsPerTable;
  7135. const hashes = new Array(tableCount);
  7136. //const descriptorSize = descriptor.length;
  7137. // just to be sure...
  7138. utils/* Utils.assert */.c.assert(
  7139. hashSize <= LSH_SEQUENCE_MAXLEN &&
  7140. sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount
  7141. );
  7142. // for each table
  7143. for(let table = 0; table < tableCount; table++) {
  7144. const offset = LSH_SEQUENCE_MAXLEN * table;
  7145. // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
  7146. let hash = 0;
  7147. for(let i = 0; i < hashSize; i++) {
  7148. let bit = sequences[offset + i];
  7149. let b = bit >>> 3;
  7150. let m = 1 << (bit & 7);
  7151. //Utils.assert(b < descriptorSize);
  7152. hash = (hash << 1) | ((descriptor[b] & m) != 0);
  7153. }
  7154. // validate & store
  7155. utils/* Utils.assert */.c.assert(hash >= 0 && hash < bucketsPerTable);
  7156. hashes[table] = hash;
  7157. }
  7158. // done!
  7159. return hashes;
  7160. }
  7161. }
  7162. /**
  7163. * Compute P(X <= k), where X ~ Poisson(lambda)
  7164. * @param {number} lambda positive number
  7165. * @param {number} k non-negative integer
  7166. * @returns {number}
  7167. */
  7168. function cumulativePoisson(lambda, k)
  7169. {
  7170. const exp = Math.exp(-lambda);
  7171. let sum = 1, fat = 1, pow = 1;
  7172. // k should be small!!!
  7173. for(let i = 1; i <= k; i++)
  7174. sum += (pow *= lambda) / (fat *= i);
  7175. return sum * exp;
  7176. }
  7177. /**
  7178. * Find the maximum number of keypoint descriptors that a table can hold
  7179. * @param {number} hashSize positive integer
  7180. * @param {number} bucketCapacity positive integer
  7181. * @param {number} [probability] probability of no discard events happening in the theoretical model
  7182. * @return {number} optimal table capacity
  7183. */
  7184. function findTableCapacity(hashSize, bucketCapacity, probability = 0.99)
  7185. {
  7186. const n = 1 << hashSize // number of buckets
  7187. const c = bucketCapacity;
  7188. const p = probability;
  7189. let l = 1, r = n * c; // watch for overflow!
  7190. let m = 0, pm = 0;
  7191. // binary search
  7192. while(l < r) {
  7193. m = Math.floor((l + r) / 2);
  7194. pm = cumulativePoisson(m / n, c);
  7195. if(pm > p) //if(1-pm < 1-p)
  7196. l = m + 1;
  7197. else
  7198. r = m;
  7199. }
  7200. return m;
  7201. }
  7202. ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
  7203. /*
  7204. * speedy-vision.js
  7205. * GPU-accelerated Computer Vision for JavaScript
  7206. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7207. *
  7208. * Licensed under the Apache License, Version 2.0 (the "License");
  7209. * you may not use this file except in compliance with the License.
  7210. * You may obtain a copy of the License at
  7211. *
  7212. * http://www.apache.org/licenses/LICENSE-2.0
  7213. *
  7214. * Unless required by applicable law or agreed to in writing, software
  7215. * distributed under the License is distributed on an "AS IS" BASIS,
  7216. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7217. * See the License for the specific language governing permissions and
  7218. * limitations under the License.
  7219. *
  7220. * keypoints.js
  7221. * Facade for various keypoint detection algorithms
  7222. */
  7223. // FAST corner detector
  7224. const fast9_16 = (0,shader_declaration/* importShader */.Nt)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl')
  7225. .withDefines({ 'FAST_TYPE': 916 })
  7226. .withArguments('corners', 'pyramid', 'lod', 'threshold');
  7227. // Harris corner detector
  7228. const harris = [1, 3, 5, 7].reduce((obj, win) => ((obj[win] =
  7229. (0,shader_declaration/* importShader */.Nt)('keypoints/harris.glsl')
  7230. .withDefines({ 'WINDOW_SIZE': win })
  7231. .withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian')
  7232. ), obj), {});
  7233. const harrisScoreFindMax = (0,shader_declaration/* importShader */.Nt)('keypoints/score-findmax.glsl')
  7234. .withArguments('corners', 'iterationNumber');
  7235. const harrisScoreCutoff = (0,shader_declaration/* importShader */.Nt)('keypoints/harris-cutoff.glsl')
  7236. .withArguments('corners', 'maxScore', 'quality');
  7237. // Subpixel refinement
  7238. const subpixelQuadratic1d = (0,shader_declaration/* importShader */.Nt)('keypoints/subpixel-refinement.glsl')
  7239. .withDefines({ 'METHOD': 0 })
  7240. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7241. const subpixelTaylor2d = (0,shader_declaration/* importShader */.Nt)('keypoints/subpixel-refinement.glsl')
  7242. .withDefines({ 'METHOD': 1 })
  7243. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7244. const subpixelBilinear = (0,shader_declaration/* importShader */.Nt)('keypoints/subpixel-refinement.glsl')
  7245. .withDefines({ 'METHOD': 2 })
  7246. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7247. const subpixelBicubic = (0,shader_declaration/* importShader */.Nt)('keypoints/subpixel-refinement.glsl')
  7248. .withDefines({ 'METHOD': 3 })
  7249. .withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  7250. // Scale refinement
  7251. const refineScaleLoG = (0,shader_declaration/* importShader */.Nt)('keypoints/refine-scale.glsl')
  7252. .withDefines({ 'METHOD': 0 })
  7253. .withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7254. const refineScaleFAST916 = (0,shader_declaration/* importShader */.Nt)('keypoints/refine-scale.glsl')
  7255. .withDefines({ 'METHOD': 1 })
  7256. .withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7257. // Pixel allocation
  7258. const allocateDescriptors = (0,shader_declaration/* importShader */.Nt)('keypoints/allocate-descriptors.glsl')
  7259. .withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  7260. const allocateExtra = (0,shader_declaration/* importShader */.Nt)('keypoints/allocate-extra.glsl')
  7261. .withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  7262. const transferToExtra = (0,shader_declaration/* importShader */.Nt)('keypoints/transfer-to-extra.glsl')
  7263. .withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7264. // ORB descriptors
  7265. const orbDescriptor = (0,shader_declaration/* importShader */.Nt)('keypoints/orb-descriptor.glsl')
  7266. .withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
  7267. const orbOrientation = (0,shader_declaration/* importShader */.Nt)('keypoints/orb-orientation.glsl')
  7268. .withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7269. // Non-maximum suppression
  7270. const nonMaxSuppression = (0,shader_declaration/* importShader */.Nt)('keypoints/nonmax-suppression.glsl')
  7271. .withDefines({ 'MULTISCALE': 0 })
  7272. .withArguments('image', 'lodStep');
  7273. const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.Nt)('keypoints/nonmax-suppression.glsl')
  7274. .withDefines({ 'MULTISCALE': 1 })
  7275. .withArguments('image', 'lodStep');
  7276. const nonmaxSpace = (0,shader_declaration/* importShader */.Nt)('keypoints/nonmax-space.glsl')
  7277. .withArguments('corners');
  7278. const nonmaxScale = (0,shader_declaration/* importShader */.Nt)('keypoints/nonmax-scale.glsl')
  7279. .withDefines({ 'USE_LAPLACIAN': 1 })
  7280. .withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
  7281. const nonmaxScaleSimple = (0,shader_declaration/* importShader */.Nt)('keypoints/nonmax-scale.glsl')
  7282. .withDefines({ 'USE_LAPLACIAN': 0 })
  7283. .withArguments('corners', 'pyramid', 'lodStep');
  7284. const laplacian = (0,shader_declaration/* importShader */.Nt)('keypoints/laplacian.glsl')
  7285. .withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
  7286. // Keypoint tracking & optical-flow
  7287. const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => ((obj[win] =
  7288. (0,shader_declaration/* importShader */.Nt)('keypoints/lk.glsl')
  7289. .withDefines({ 'WINDOW_SIZE': win })
  7290. .withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength')
  7291. ), obj), {});
  7292. const transferFlow = (0,shader_declaration/* importShader */.Nt)('keypoints/transfer-flow.glsl')
  7293. .withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7294. // Brute-force matching
  7295. const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-init.glsl')
  7296. .withDefines({ 'ENCODE_FILTERS': 0 });
  7297. const bfMatcherInitFilters = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-init.glsl')
  7298. .withDefines({ 'ENCODE_FILTERS': 1 });
  7299. const bfMatcherTransfer = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-transfer.glsl')
  7300. .withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7301. const bfMatcher32 = (0,shader_declaration/* importShader */.Nt)('keypoints/bf-knn.glsl')
  7302. .withDefines({
  7303. 'DESCRIPTOR_SIZE': 32,
  7304. 'NUMBER_OF_KEYPOINTS_PER_PASS': 16,
  7305. })
  7306. .withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7307. const bfMatcher64 = (0,shader_declaration/* importShader */.Nt)('keypoints/bf-knn.glsl')
  7308. .withDefines({
  7309. 'DESCRIPTOR_SIZE': 64,
  7310. 'NUMBER_OF_KEYPOINTS_PER_PASS': 8,
  7311. })
  7312. .withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7313. // LSH-based KNN matching
  7314. const lshKnnInitCandidates = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-init.glsl')
  7315. .withDefines({ 'ENCODE_FILTERS': 0 });
  7316. const lshKnnInitFilters = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-init.glsl')
  7317. .withDefines({ 'ENCODE_FILTERS': 1 });
  7318. const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => ((obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => ((obj[hashSize] = [0, 1, 2].reduce((obj, level) => ((obj[level] =
  7319. (0,shader_declaration/* importShader */.Nt)('keypoints/lsh-knn.glsl')
  7320. .withDefines({
  7321. 'DESCRIPTOR_SIZE': descriptorSize,
  7322. 'HASH_SIZE': hashSize,
  7323. 'LEVEL': level,
  7324. 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
  7325. 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT,
  7326. })
  7327. .withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength')
  7328. ), obj), {})), obj), {})), obj), {});
  7329. const lshKnnTransfer = (0,shader_declaration/* importShader */.Nt)('keypoints/knn-transfer.glsl')
  7330. .withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7331. // Keypoint sorting
  7332. const sortCreatePermutation = (0,shader_declaration/* importShader */.Nt)('keypoints/sort-keypoints.glsl')
  7333. .withDefines({ 'STAGE': 1 })
  7334. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7335. const sortMergePermutation = (0,shader_declaration/* importShader */.Nt)('keypoints/sort-keypoints.glsl')
  7336. .withDefines({ 'STAGE': 2 })
  7337. .withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
  7338. const sortApplyPermutation = (0,shader_declaration/* importShader */.Nt)('keypoints/sort-keypoints.glsl')
  7339. .withDefines({ 'STAGE': 3 })
  7340. .withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
  7341. // Keypoint mixing
  7342. const mixKeypointsPreInit = (0,shader_declaration/* importShader */.Nt)('keypoints/mix-keypoints.glsl')
  7343. .withDefines({ 'STAGE': 1 })
  7344. .withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
  7345. const mixKeypointsInit = (0,shader_declaration/* importShader */.Nt)('keypoints/mix-keypoints.glsl')
  7346. .withDefines({ 'STAGE': 2 })
  7347. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7348. const mixKeypointsSort = (0,shader_declaration/* importShader */.Nt)('keypoints/mix-keypoints.glsl')
  7349. .withDefines({ 'STAGE': 3 })
  7350. .withArguments('array', 'blockSize');
  7351. const mixKeypointsView = (0,shader_declaration/* importShader */.Nt)('keypoints/mix-keypoints.glsl')
  7352. .withDefines({ 'STAGE': 5 })
  7353. .withArguments('array');
  7354. const mixKeypointsApply = (0,shader_declaration/* importShader */.Nt)('keypoints/mix-keypoints.glsl')
  7355. .withDefines({ 'STAGE': 4 })
  7356. .withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7357. // Keypoint encoding
  7358. const initLookupTable = (0,shader_declaration/* importShader */.Nt)('keypoints/lookup-of-locations.glsl')
  7359. .withDefines({ 'FS_OUTPUT_TYPE': 2, 'STAGE': 1 })
  7360. .withArguments('corners');
  7361. const sortLookupTable = (0,shader_declaration/* importShader */.Nt)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl')
  7362. .withDefines({ 'FS_OUTPUT_TYPE': 2, 'FS_USE_CUSTOM_PRECISION': 1, 'STAGE': 2 })
  7363. .withArguments('lookupTable', 'blockSize', 'width', 'height');
  7364. const viewLookupTable = (0,shader_declaration/* importShader */.Nt)('keypoints/lookup-of-locations.glsl')
  7365. .withDefines({ 'STAGE': -1 })
  7366. .withArguments('lookupTable');
  7367. const encodeKeypoints = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-keypoints.glsl')
  7368. .withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
  7369. const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-keypoint-offsets.glsl')
  7370. .withArguments('corners', 'imageSize');
  7371. const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-keypoint-long-offsets.glsl')
  7372. .withDefines({ 'MAX_ITERATIONS': 6 }) // dependent texture reads :(
  7373. .withArguments('offsetsImage', 'imageSize');
  7374. const encodeKeypointPositions = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-keypoint-positions.glsl')
  7375. .withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7376. const encodeKeypointProperties = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-keypoint-properties.glsl')
  7377. .withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7378. const encodeNullKeypoints = (0,shader_declaration/* importShader */.Nt)('keypoints/encode-null-keypoints.glsl')
  7379. .withArguments();
  7380. const transferOrientation = (0,shader_declaration/* importShader */.Nt)('keypoints/transfer-orientation.glsl')
  7381. .withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7382. const uploadKeypoints = (0,shader_declaration/* importShader */.Nt)('keypoints/upload-keypoints.glsl')
  7383. .withDefines({
  7384. // UBOs can hold at least 16KB of data;
  7385. // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
  7386. // according to the GL ES 3 reference.
  7387. // Each keypoint uses 16 bytes (vec4)
  7388. 'BUFFER_SIZE': 1024 //16384 / 16
  7389. })
  7390. .withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
  7391. // Geometric transformations
  7392. const applyHomography = (0,shader_declaration/* importShader */.Nt)('keypoints/apply-homography.glsl')
  7393. .withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7394. // Keypoint filters
  7395. const clipBorder = (0,shader_declaration/* importShader */.Nt)('keypoints/clip-border.glsl')
  7396. .withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7397. const distanceFilter = (0,shader_declaration/* importShader */.Nt)('keypoints/distance-filter.glsl')
  7398. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7399. const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.Nt)('keypoints/hamming-distance-filter.glsl')
  7400. .withDefines({ 'DESCRIPTOR_SIZE': 32 })
  7401. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7402. const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.Nt)('keypoints/hamming-distance-filter.glsl')
  7403. .withDefines({ 'DESCRIPTOR_SIZE': 64 })
  7404. .withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7405. // Other utilities
  7406. const shuffle = (0,shader_declaration/* importShader */.Nt)('keypoints/shuffle.glsl')
  7407. .withDefines({ 'PERMUTATION_MAXLEN': 2048 })
  7408. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7409. const clip = (0,shader_declaration/* importShader */.Nt)('keypoints/clip.glsl')
  7410. .withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7411. /**
  7412. * SpeedyProgramGroupKeypoints
  7413. * Keypoint detection
  7414. */
  7415. class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup
  7416. {
  7417. /**
  7418. * Class constructor
  7419. * @param {SpeedyGPU} gpu
  7420. */
  7421. constructor(gpu)
  7422. {
  7423. super(gpu);
  7424. this
  7425. //
  7426. // FAST corner detector
  7427. //
  7428. .declare('fast9_16', fast9_16, {
  7429. ...this.program.usesPingpongRendering()
  7430. })
  7431. //
  7432. // Harris corner detector
  7433. //
  7434. .declare('harris1', harris[1], {
  7435. ...this.program.usesPingpongRendering()
  7436. })
  7437. .declare('harris3', harris[3], {
  7438. ...this.program.usesPingpongRendering()
  7439. })
  7440. .declare('harris5', harris[5], {
  7441. ...this.program.usesPingpongRendering()
  7442. })
  7443. .declare('harris7', harris[7], {
  7444. ...this.program.usesPingpongRendering()
  7445. })
  7446. .declare('harrisScoreFindMax', harrisScoreFindMax, {
  7447. ...this.program.usesPingpongRendering()
  7448. })
  7449. .declare('harrisScoreCutoff', harrisScoreCutoff)
  7450. //
  7451. // Subpixel refinement
  7452. //
  7453. .declare('subpixelQuadratic1d', subpixelQuadratic1d)
  7454. .declare('subpixelTaylor2d', subpixelTaylor2d)
  7455. .declare('subpixelBicubic', subpixelBicubic)
  7456. .declare('subpixelBilinear', subpixelBilinear)
  7457. //
  7458. // Scale refinement
  7459. //
  7460. .declare('refineScaleLoG', refineScaleLoG)
  7461. .declare('refineScaleFAST916', refineScaleFAST916)
  7462. //
  7463. // Pixel allocation
  7464. //
  7465. .declare('allocateDescriptors', allocateDescriptors)
  7466. .declare('allocateExtra', allocateExtra)
  7467. .declare('transferToExtra', transferToExtra)
  7468. //
  7469. // ORB descriptors
  7470. //
  7471. .declare('orbDescriptor', orbDescriptor)
  7472. .declare('orbOrientation', orbOrientation)
  7473. //
  7474. // Non-maximum suppression
  7475. //
  7476. .declare('nonmax', nonMaxSuppression)
  7477. .declare('pyrnonmax', multiscaleNonMaxSuppression)
  7478. .declare('nonmaxSpace', nonmaxSpace)
  7479. .declare('nonmaxScale', nonmaxScale)
  7480. .declare('nonmaxScaleSimple', nonmaxScaleSimple)
  7481. .declare('laplacian', laplacian)
  7482. //
  7483. // LK optical-flow
  7484. //
  7485. .declare('lk21', lk[21], {
  7486. ...this.program.usesPingpongRendering()
  7487. })
  7488. .declare('lk19', lk[19], {
  7489. ...this.program.usesPingpongRendering()
  7490. })
  7491. .declare('lk17', lk[17], {
  7492. ...this.program.usesPingpongRendering()
  7493. })
  7494. .declare('lk15', lk[15], {
  7495. ...this.program.usesPingpongRendering()
  7496. })
  7497. .declare('lk13', lk[13], {
  7498. ...this.program.usesPingpongRendering()
  7499. })
  7500. .declare('lk11', lk[11], {
  7501. ...this.program.usesPingpongRendering()
  7502. })
  7503. .declare('lk9', lk[9], {
  7504. ...this.program.usesPingpongRendering()
  7505. })
  7506. .declare('lk7', lk[7], {
  7507. ...this.program.usesPingpongRendering()
  7508. })
  7509. .declare('lk5', lk[5], {
  7510. ...this.program.usesPingpongRendering()
  7511. })
  7512. .declare('lk3', lk[3], {
  7513. ...this.program.usesPingpongRendering()
  7514. })
  7515. .declare('transferFlow', transferFlow)
  7516. //
  7517. // Brute-force KNN matching
  7518. //
  7519. .declare('bfMatcherInitCandidates', bfMatcherInitCandidates)
  7520. .declare('bfMatcherInitFilters', bfMatcherInitFilters)
  7521. .declare('bfMatcherTransfer', bfMatcherTransfer, {
  7522. ...this.program.usesPingpongRendering()
  7523. })
  7524. .declare('bfMatcher32', bfMatcher32, {
  7525. ...this.program.usesPingpongRendering()
  7526. })
  7527. .declare('bfMatcher64', bfMatcher64, {
  7528. ...this.program.usesPingpongRendering()
  7529. })
  7530. //
  7531. // LSH-based KNN matching
  7532. //
  7533. .declare('lshKnnInitCandidates', lshKnnInitCandidates)
  7534. .declare('lshKnnInitFilters', lshKnnInitFilters)
  7535. .declare('lshKnnTransfer', lshKnnTransfer, {
  7536. ...this.program.usesPingpongRendering()
  7537. })
  7538. //
  7539. // Keypoint sorting
  7540. //
  7541. .declare('sortCreatePermutation', sortCreatePermutation)
  7542. .declare('sortMergePermutation', sortMergePermutation, {
  7543. ...this.program.usesPingpongRendering()
  7544. })
  7545. .declare('sortApplyPermutation', sortApplyPermutation)
  7546. //
  7547. // Keypoint mixing
  7548. //
  7549. .declare('mixKeypointsPreInit', mixKeypointsPreInit)
  7550. .declare('mixKeypointsInit', mixKeypointsInit)
  7551. .declare('mixKeypointsSort', mixKeypointsSort, {
  7552. ...this.program.usesPingpongRendering()
  7553. })
  7554. .declare('mixKeypointsView', mixKeypointsView)
  7555. .declare('mixKeypointsApply', mixKeypointsApply)
  7556. //
  7557. // Keypoint encoders
  7558. //
  7559. .declare('encodeNullKeypoints', encodeNullKeypoints)
  7560. .declare('encodeKeypoints', encodeKeypoints)
  7561. .declare('initLookupTable', initLookupTable)
  7562. .declare('sortLookupTable', sortLookupTable, {
  7563. ...this.program.usesPingpongRendering()
  7564. })
  7565. .declare('viewLookupTable', viewLookupTable)
  7566. .declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets)
  7567. .declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, {
  7568. ...this.program.usesPingpongRendering()
  7569. })
  7570. .declare('encodeKeypointPositions', encodeKeypointPositions, {
  7571. ...this.program.usesPingpongRendering()
  7572. })
  7573. .declare('encodeKeypointProperties', encodeKeypointProperties)
  7574. .declare('transferOrientation', transferOrientation)
  7575. .declare('uploadKeypoints', uploadKeypoints, {
  7576. ...this.program.usesPingpongRendering()
  7577. })
  7578. //
  7579. // Geometric transformations
  7580. //
  7581. .declare('applyHomography', applyHomography)
  7582. //
  7583. // Keypoint filters
  7584. //
  7585. .declare('clipBorder', clipBorder)
  7586. .declare('distanceFilter', distanceFilter)
  7587. .declare('hammingDistanceFilter32', hammingDistanceFilter32)
  7588. .declare('hammingDistanceFilter64', hammingDistanceFilter64)
  7589. //
  7590. // Other utilities
  7591. //
  7592. .declare('shuffle', shuffle)
  7593. .declare('clip', clip)
  7594. ;
  7595. //
  7596. // LSH-based KNN matching
  7597. //
  7598. for(const descriptorSize of Object.keys(lshKnn)) {
  7599. for(const hashSize of Object.keys(lshKnn[descriptorSize])) {
  7600. for(const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
  7601. const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
  7602. this.declare(name, lshKnn[descriptorSize][hashSize][level], {
  7603. ...this.program.usesPingpongRendering()
  7604. });
  7605. }
  7606. }
  7607. }
  7608. }
  7609. }
  7610. ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
  7611. /*
  7612. * speedy-vision.js
  7613. * GPU-accelerated Computer Vision for JavaScript
  7614. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7615. *
  7616. * Licensed under the Apache License, Version 2.0 (the "License");
  7617. * you may not use this file except in compliance with the License.
  7618. * You may obtain a copy of the License at
  7619. *
  7620. * http://www.apache.org/licenses/LICENSE-2.0
  7621. *
  7622. * Unless required by applicable law or agreed to in writing, software
  7623. * distributed under the License is distributed on an "AS IS" BASIS,
  7624. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7625. * See the License for the specific language governing permissions and
  7626. * limitations under the License.
  7627. *
  7628. * pyramids.js
  7629. * Image pyramids
  7630. */
  7631. //
  7632. // Shaders
  7633. //
  7634. const upsample2 = (0,shader_declaration/* importShader */.Nt)('pyramids/upsample2.glsl').withArguments('image');
  7635. const downsample2 = (0,shader_declaration/* importShader */.Nt)('pyramids/downsample2.glsl').withArguments('image');
  7636. /**
  7637. * SpeedyProgramGroupPyramids
  7638. * Image pyramids
  7639. */
  7640. class SpeedyProgramGroupPyramids extends SpeedyProgramGroup
  7641. {
  7642. /**
  7643. * Class constructor
  7644. * @param {SpeedyGPU} gpu
  7645. */
  7646. constructor(gpu)
  7647. {
  7648. super(gpu);
  7649. this
  7650. // upsampling & downsampling
  7651. .declare('upsample2', upsample2)
  7652. .declare('downsample2', downsample2)
  7653. // separable kernels for gaussian smoothing
  7654. // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
  7655. // pick a = 0.4 for gaussian approximation (sigma = 1)
  7656. .declare('smoothX', (0,convolution.convX)([
  7657. 0.05, 0.25, 0.4, 0.25, 0.05
  7658. ]))
  7659. .declare('smoothY', (0,convolution.convY)([
  7660. 0.05, 0.25, 0.4, 0.25, 0.05
  7661. ]))
  7662. /*
  7663. .declare('reduce', conv2D([
  7664. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
  7665. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7666. 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
  7667. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7668. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
  7669. ]))
  7670. */
  7671. // smoothing for 2x image
  7672. // same rules as above with sum(k) = 2
  7673. .declare('smoothX2', (0,convolution.convX)([
  7674. 0.1, 0.5, 0.8, 0.5, 0.1
  7675. // NOTE: this would saturate the image, but we apply it
  7676. // on a 2x upsampled version with lots of zero pixels
  7677. ]))
  7678. .declare('smoothY2', (0,convolution.convY)([
  7679. 0.1, 0.5, 0.8, 0.5, 0.1
  7680. ], 1.0 / 2.0))
  7681. ;
  7682. }
  7683. }
  7684. ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
  7685. /*
  7686. * speedy-vision.js
  7687. * GPU-accelerated Computer Vision for JavaScript
  7688. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7689. *
  7690. * Licensed under the Apache License, Version 2.0 (the "License");
  7691. * you may not use this file except in compliance with the License.
  7692. * You may obtain a copy of the License at
  7693. *
  7694. * http://www.apache.org/licenses/LICENSE-2.0
  7695. *
  7696. * Unless required by applicable law or agreed to in writing, software
  7697. * distributed under the License is distributed on an "AS IS" BASIS,
  7698. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7699. * See the License for the specific language governing permissions and
  7700. * limitations under the License.
  7701. *
  7702. * transforms.js
  7703. * Geometric transformations
  7704. */
  7705. //
  7706. // Shaders
  7707. //
  7708. // Perspective warp
  7709. const warpPerspective = (0,shader_declaration/* importShader */.Nt)('transforms/warp-perspective.glsl')
  7710. .withArguments('image', 'inverseHomography');
  7711. // Resize image
  7712. const resizeNearest = (0,shader_declaration/* importShader */.Nt)('transforms/resize.glsl')
  7713. .withDefines({
  7714. 'INTERPOLATION_METHOD': 0 // Nearest neighbors
  7715. })
  7716. .withArguments('image');
  7717. const resizeBilinear = (0,shader_declaration/* importShader */.Nt)('transforms/resize.glsl')
  7718. .withDefines({
  7719. 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
  7720. })
  7721. .withArguments('image');
  7722. // Additive mix (TODO create a new program group?)
  7723. const additiveMix = (0,shader_declaration/* importShader */.Nt)('transforms/additive-mix.glsl')
  7724. .withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
  7725. /**
  7726. * SpeedyProgramGroupTransforms
  7727. * Geometric transformations
  7728. */
  7729. class SpeedyProgramGroupTransforms extends SpeedyProgramGroup
  7730. {
  7731. /**
  7732. * Class constructor
  7733. * @param {SpeedyGPU} gpu
  7734. */
  7735. constructor(gpu)
  7736. {
  7737. super(gpu);
  7738. this
  7739. .declare('warpPerspective', warpPerspective)
  7740. .declare('resizeNearest', resizeNearest)
  7741. .declare('resizeBilinear', resizeBilinear)
  7742. .declare('additiveMix', additiveMix)
  7743. ;
  7744. }
  7745. }
  7746. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
  7747. /*
  7748. * speedy-vision.js
  7749. * GPU-accelerated Computer Vision for JavaScript
  7750. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7751. *
  7752. * Licensed under the Apache License, Version 2.0 (the "License");
  7753. * you may not use this file except in compliance with the License.
  7754. * You may obtain a copy of the License at
  7755. *
  7756. * http://www.apache.org/licenses/LICENSE-2.0
  7757. *
  7758. * Unless required by applicable law or agreed to in writing, software
  7759. * distributed under the License is distributed on an "AS IS" BASIS,
  7760. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7761. * See the License for the specific language governing permissions and
  7762. * limitations under the License.
  7763. *
  7764. * speedy-program-center.js
  7765. * An access point to all programs that run on the GPU
  7766. */
  7767. /**
  7768. * An access point to all programs that run on the CPU
  7769. * All program groups can be accessed via this class
  7770. */
  7771. class SpeedyProgramCenter
  7772. {
  7773. /**
  7774. * Class constructor
  7775. * @param {SpeedyGPU} gpu reference to SpeedyGPU
  7776. */
  7777. constructor(gpu)
  7778. {
  7779. // Note: we instantiate the program groups lazily
  7780. /** @type {SpeedyGPU} reference to SpeedyGPU */
  7781. this._gpu = gpu;
  7782. /** @type {SpeedyProgramGroupFilters} image filters */
  7783. this._filters = null;
  7784. /** @type {SpeedyProgramGroupTransforms} geometric transformations */
  7785. this._transforms = null;
  7786. /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
  7787. this._pyramids = null;
  7788. /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
  7789. this._keypoints = null;
  7790. /** @type {SpeedyProgramGroupUtils} utility programs */
  7791. this._utils = null;
  7792. }
  7793. /**
  7794. * Image filters & convolutions
  7795. * @returns {SpeedyProgramGroupFilters}
  7796. */
  7797. get filters()
  7798. {
  7799. return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
  7800. }
  7801. /**
  7802. * Geometric transformations
  7803. * @returns {SpeedyProgramGroupTransforms}
  7804. */
  7805. get transforms()
  7806. {
  7807. return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
  7808. }
  7809. /**
  7810. * Image pyramids & scale-space
  7811. * @returns {SpeedyProgramGroupPyramids}
  7812. */
  7813. get pyramids()
  7814. {
  7815. return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
  7816. }
  7817. /**
  7818. * Keypoint detection & description
  7819. * @returns {SpeedyProgramGroupKeypoints}
  7820. */
  7821. get keypoints()
  7822. {
  7823. return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
  7824. }
  7825. /**
  7826. * Utility programs
  7827. * @returns {SpeedyProgramGroupUtils}
  7828. */
  7829. get utils()
  7830. {
  7831. return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
  7832. }
  7833. /**
  7834. * Release all programs from all groups. You'll
  7835. * no longer be able to use any of them.
  7836. * @returns {null}
  7837. */
  7838. release()
  7839. {
  7840. for(const key in this) {
  7841. if(Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
  7842. const group = this[key];
  7843. if(group instanceof SpeedyProgramGroup)
  7844. group.release();
  7845. }
  7846. }
  7847. return null;
  7848. }
  7849. }
  7850. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
  7851. /*
  7852. * speedy-vision.js
  7853. * GPU-accelerated Computer Vision for JavaScript
  7854. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7855. *
  7856. * Licensed under the Apache License, Version 2.0 (the "License");
  7857. * you may not use this file except in compliance with the License.
  7858. * You may obtain a copy of the License at
  7859. *
  7860. * http://www.apache.org/licenses/LICENSE-2.0
  7861. *
  7862. * Unless required by applicable law or agreed to in writing, software
  7863. * distributed under the License is distributed on an "AS IS" BASIS,
  7864. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7865. * See the License for the specific language governing permissions and
  7866. * limitations under the License.
  7867. *
  7868. * speedy-texture-pool.js
  7869. * Texture pool
  7870. */
  7871. // Constants
  7872. const DEFAULT_CAPACITY = 1024;
  7873. const BUCKET = Symbol('Bucket');
  7874. /*
  7875. === Heuristics to figure out the capacity of a texture pool ===
  7876. 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
  7877. 2. Figure out the average texture size in your application (say, 640x360 pixels).
  7878. 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
  7879. uses 4 bytes (RGBA format).
  7880. 4. Divide the maximum amount of VRAM by the average texture size in bytes
  7881. (say, 72). That's the capacity of the pool.
  7882. Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
  7883. Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  7884. */
  7885. /**
  7886. * @typedef {number} TextureBucketIndex index of a bucket in a pool
  7887. */
  7888. /**
  7889. * A bucket
  7890. */
  7891. class TextureBucket
  7892. {
  7893. /**
  7894. * Constructor
  7895. * @param {SpeedyDrawableTexture} texture managed texture
  7896. * @param {TextureBucketIndex} index index of this bucket
  7897. * @param {TextureBucketIndex} next index of the next bucket
  7898. */
  7899. constructor(texture, index, next)
  7900. {
  7901. /** @type {SpeedyDrawableTexture} managed texture */
  7902. this.texture = texture;
  7903. /** @type {TextureBucketIndex} index of this bucket */
  7904. this.index = index;
  7905. /** @type {TextureBucketIndex} index of the next bucket */
  7906. this.next = next;
  7907. /** @type {boolean} whether the texture is available or not */
  7908. this.free = true;
  7909. }
  7910. }
  7911. /**
  7912. * Texture pool
  7913. */
  7914. class SpeedyTexturePool
  7915. {
  7916. /**
  7917. * Constructor
  7918. * @param {SpeedyGPU} gpu
  7919. * @param {number} [capacity] number of textures in the pool
  7920. */
  7921. constructor(gpu, capacity = DEFAULT_CAPACITY)
  7922. {
  7923. utils/* Utils.assert */.c.assert(capacity > 0);
  7924. /** @type {TextureBucket[]} buckets */
  7925. this._bucket = Array.from({ length: capacity }, (_, i) => new TextureBucket(null, i, i - 1));
  7926. /** @type {TextureBucketIndex} index of an available bucket */
  7927. this._head = capacity - 1;
  7928. /** @type {SpeedyGPU} GPU instance */
  7929. this._gpu = gpu;
  7930. }
  7931. /**
  7932. * Get a texture from the pool
  7933. * @returns {SpeedyDrawableTexture}
  7934. */
  7935. allocate()
  7936. {
  7937. if(this._head < 0)
  7938. throw new utils_errors/* OutOfMemoryError */.Cx(`Exhausted pool (capacity: ${this._bucket.length})`);
  7939. const bucket = this._bucket[this._head];
  7940. bucket.free = false;
  7941. this._head = bucket.next;
  7942. if(bucket.texture == null) // lazy instantiation
  7943. bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
  7944. return bucket.texture;
  7945. }
  7946. /**
  7947. * Put a texture back in the pool
  7948. * @param {SpeedyDrawableTexture} texture
  7949. * @returns {null}
  7950. */
  7951. free(texture)
  7952. {
  7953. const bucket = texture[BUCKET];
  7954. utils/* Utils.assert */.c.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
  7955. bucket.next = this._head;
  7956. bucket.free = true;
  7957. this._head = bucket.index;
  7958. return null;
  7959. }
  7960. /**
  7961. * Release the texture pool
  7962. * @returns {null}
  7963. */
  7964. release()
  7965. {
  7966. for(let i = 0; i < this._bucket.length; i++) {
  7967. if(this._bucket[i].texture != null)
  7968. this._bucket[i].texture = this._bucket[i].texture.release();
  7969. }
  7970. return null;
  7971. }
  7972. /**
  7973. * Create a texture with a reference to a bucket
  7974. * @param {WebGL2RenderingContext} gl
  7975. * @param {TextureBucket} bucket
  7976. * @returns {SpeedyDrawableTexture}
  7977. */
  7978. static _createManagedTexture(gl, bucket)
  7979. {
  7980. const texture = new SpeedyDrawableTexture(gl, 1, 1);
  7981. return Object.defineProperty(texture, BUCKET, {
  7982. configurable: false,
  7983. enumerable: false,
  7984. writable: false,
  7985. value: bucket
  7986. });
  7987. }
  7988. }
  7989. // EXTERNAL MODULE: ./src/utils/types.js
  7990. var types = __nested_webpack_require_312600__(6731);
  7991. ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
  7992. /*
  7993. * speedy-vision.js
  7994. * GPU-accelerated Computer Vision for JavaScript
  7995. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  7996. *
  7997. * Licensed under the Apache License, Version 2.0 (the "License");
  7998. * you may not use this file except in compliance with the License.
  7999. * You may obtain a copy of the License at
  8000. *
  8001. * http://www.apache.org/licenses/LICENSE-2.0
  8002. *
  8003. * Unless required by applicable law or agreed to in writing, software
  8004. * distributed under the License is distributed on an "AS IS" BASIS,
  8005. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8006. * See the License for the specific language governing permissions and
  8007. * limitations under the License.
  8008. *
  8009. * speedy-media-source.js
  8010. * Wrappers around <img>, <video>, <canvas>, etc.
  8011. */
  8012. /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap} SpeedyMediaSourceNativeElement */
  8013. /** Internal token for protected constructors */
  8014. const PRIVATE_TOKEN = Symbol();
  8015. /**
  8016. * An abstract media source: a wrapper around native
  8017. * elements such as: HTMLImageElement, HTMLVideoElement,
  8018. * and so on
  8019. * @abstract
  8020. */
  8021. class SpeedyMediaSource
  8022. {
  8023. /**
  8024. * @protected Constructor
  8025. * @param {symbol} token
  8026. */
  8027. constructor(token)
  8028. {
  8029. // the constructor is not public
  8030. if(token !== PRIVATE_TOKEN)
  8031. throw new utils_errors/* IllegalOperationError */.js();
  8032. /** @type {SpeedyMediaSourceNativeElement} underlying media object */
  8033. this._data = null;
  8034. }
  8035. /**
  8036. * Load a media source
  8037. * @param {SpeedyMediaSourceNativeElement} wrappedObject
  8038. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8039. */
  8040. static load(wrappedObject)
  8041. {
  8042. if(wrappedObject instanceof HTMLImageElement)
  8043. return SpeedyImageMediaSource.load(wrappedObject);
  8044. else if(wrappedObject instanceof HTMLVideoElement)
  8045. return SpeedyVideoMediaSource.load(wrappedObject);
  8046. else if(wrappedObject instanceof HTMLCanvasElement)
  8047. return SpeedyCanvasMediaSource.load(wrappedObject);
  8048. else if(typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas)
  8049. return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);
  8050. else if(wrappedObject instanceof ImageBitmap)
  8051. return SpeedyBitmapMediaSource.load(wrappedObject);
  8052. else
  8053. throw new utils_errors/* IllegalArgumentError */.mG(`Unsupported media type: ${wrappedObject}`);
  8054. }
  8055. /**
  8056. * The underlying wrapped object
  8057. * @returns {SpeedyMediaSourceNativeElement}
  8058. */
  8059. get data()
  8060. {
  8061. return this._data;
  8062. }
  8063. /**
  8064. * Is the underlying media loaded?
  8065. * @returns {boolean}
  8066. */
  8067. isLoaded()
  8068. {
  8069. return this._data !== null;
  8070. }
  8071. /**
  8072. * The type of the underlying media source
  8073. * @abstract
  8074. * @returns {MediaType}
  8075. */
  8076. get type()
  8077. {
  8078. throw new utils_errors/* AbstractMethodError */.Mi();
  8079. }
  8080. /**
  8081. * Media width, in pixels
  8082. * @abstract
  8083. * @returns {number}
  8084. */
  8085. get width()
  8086. {
  8087. throw new utils_errors/* AbstractMethodError */.Mi();
  8088. }
  8089. /**
  8090. * Media height, in pixels
  8091. * @abstract
  8092. * @returns {number}
  8093. */
  8094. get height()
  8095. {
  8096. throw new utils_errors/* AbstractMethodError */.Mi();
  8097. }
  8098. /**
  8099. * Clone this media source
  8100. * @abstract
  8101. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8102. */
  8103. clone()
  8104. {
  8105. throw new utils_errors/* AbstractMethodError */.Mi();
  8106. }
  8107. /**
  8108. * Release resources associated with this object
  8109. * @returns {null}
  8110. */
  8111. release()
  8112. {
  8113. return (this._data = null);
  8114. }
  8115. /**
  8116. * Load the underlying media
  8117. * @abstract
  8118. * @param {SpeedyMediaSourceNativeElement} element
  8119. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8120. */
  8121. _load(element)
  8122. {
  8123. throw new utils_errors/* AbstractMethodError */.Mi();
  8124. }
  8125. /**
  8126. * Wait for an event to be triggered in an element
  8127. * @param {Element} element
  8128. * @param {string} eventName
  8129. * @param {number} [timeout] in ms
  8130. * @returns {SpeedyPromise<Element>}
  8131. */
  8132. static _waitUntil(element, eventName, timeout = 30000)
  8133. {
  8134. return new speedy_promise/* SpeedyPromise */.s((resolve, reject) => {
  8135. utils/* Utils.log */.c.log(`Waiting for ${eventName} to be triggered in ${element}...`);
  8136. const timer = setTimeout(() => {
  8137. clear();
  8138. reject(new utils_errors/* TimeoutError */.W5(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
  8139. }, timeout);
  8140. function clear()
  8141. {
  8142. clearTimeout(timer);
  8143. element.removeEventListener('error', handleError, false);
  8144. element.removeEventListener(eventName, handleSuccess, false);
  8145. }
  8146. function handleError()
  8147. {
  8148. const hasError = (element.error !== null && typeof element.error === 'object');
  8149. const error = hasError ? element.error : ({ code: -1, message: '' });
  8150. const info = `${error.message} (error code ${error.code})`;
  8151. clear();
  8152. reject(new utils_errors/* ResourceNotLoadedError */.tg(`Can't load ${element}. ${info}`));
  8153. }
  8154. function handleSuccess()
  8155. {
  8156. clear();
  8157. resolve(element);
  8158. }
  8159. element.addEventListener('error', handleError, false);
  8160. element.addEventListener(eventName, handleSuccess, false);
  8161. });
  8162. }
  8163. }
  8164. /**
  8165. * Image media source:
  8166. * a wrapper around HTMLImageElement
  8167. */
  8168. class SpeedyImageMediaSource extends SpeedyMediaSource
  8169. {
  8170. /**
  8171. * @private Constructor
  8172. * @param {symbol} token
  8173. */
  8174. constructor(token)
  8175. {
  8176. super(token);
  8177. /** @type {HTMLImageElement} image element */
  8178. this._data = null;
  8179. }
  8180. /**
  8181. * The underlying wrapped object
  8182. * @returns {HTMLImageElement}
  8183. */
  8184. get data()
  8185. {
  8186. return this._data;
  8187. }
  8188. /**
  8189. * The type of the underlying media source
  8190. * @returns {MediaType}
  8191. */
  8192. get type()
  8193. {
  8194. return types/* MediaType.Image */.DD.Image;
  8195. }
  8196. /**
  8197. * Media width, in pixels
  8198. * @returns {number}
  8199. */
  8200. get width()
  8201. {
  8202. return this._data ? this._data.naturalWidth : 0;
  8203. }
  8204. /**
  8205. * Media height, in pixels
  8206. * @returns {number}
  8207. */
  8208. get height()
  8209. {
  8210. return this._data ? this._data.naturalHeight : 0;
  8211. }
  8212. /**
  8213. * Clone this media source
  8214. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8215. */
  8216. clone()
  8217. {
  8218. if(this._data == null)
  8219. throw new utils_errors/* IllegalOperationError */.js(`Media not loaded`);
  8220. const newNode = /** @type {HTMLImageElement} */ ( this._data.cloneNode(true) );
  8221. return SpeedyImageMediaSource.load(newNode);
  8222. }
  8223. /**
  8224. * Load the underlying media
  8225. * @param {HTMLImageElement} image
  8226. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8227. */
  8228. _load(image)
  8229. {
  8230. if(this.isLoaded())
  8231. this.release();
  8232. if(image.complete && image.naturalWidth !== 0) { // already loaded?
  8233. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  8234. this._data = image;
  8235. resolve(this);
  8236. });
  8237. }
  8238. else {
  8239. return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
  8240. this._data = image;
  8241. return this;
  8242. });
  8243. }
  8244. }
  8245. /**
  8246. * Load the underlying media
  8247. * @param {HTMLImageElement} image
  8248. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8249. */
  8250. static load(image)
  8251. {
  8252. return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
  8253. }
  8254. }
  8255. /**
  8256. * Video media source:
  8257. * a wrapper around HTMLVideoElement
  8258. */
  8259. class SpeedyVideoMediaSource extends SpeedyMediaSource
  8260. {
  8261. /**
  8262. * @private Constructor
  8263. * @param {symbol} token
  8264. */
  8265. constructor(token)
  8266. {
  8267. super(token);
  8268. /** @type {HTMLVideoElement} video element */
  8269. this._data = null;
  8270. }
  8271. /**
  8272. * The underlying wrapped object
  8273. * @returns {HTMLVideoElement}
  8274. */
  8275. get data()
  8276. {
  8277. return this._data;
  8278. }
  8279. /**
  8280. * The type of the underlying media source
  8281. * @returns {MediaType}
  8282. */
  8283. get type()
  8284. {
  8285. return types/* MediaType.Video */.DD.Video;
  8286. }
  8287. /**
  8288. * Media width, in pixels
  8289. * @returns {number}
  8290. */
  8291. get width()
  8292. {
  8293. // Warning: videoWidth & videoHeight may change at any time !!!
  8294. // so you can't cache these dimensions
  8295. return this._data ? this._data.videoWidth : 0;
  8296. }
  8297. /**
  8298. * Media height, in pixels
  8299. * @returns {number}
  8300. */
  8301. get height()
  8302. {
  8303. return this._data ? this._data.videoHeight : 0;
  8304. }
  8305. /**
  8306. * Clone this media source
  8307. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8308. */
  8309. clone()
  8310. {
  8311. if(this._data == null)
  8312. throw new utils_errors/* IllegalOperationError */.js(`Media not loaded`);
  8313. const newNode = /** @type {HTMLVideoElement} */ ( this._data.cloneNode(true) );
  8314. return SpeedyVideoMediaSource.load(newNode);
  8315. }
  8316. /**
  8317. * Load the underlying media
  8318. * @param {HTMLVideoElement} video
  8319. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8320. */
  8321. _load(video)
  8322. {
  8323. if(this.isLoaded())
  8324. this.release();
  8325. if(video.readyState >= 4) { // already loaded?
  8326. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  8327. this._data = video;
  8328. resolve(this);
  8329. });
  8330. }
  8331. else {
  8332. // waitUntil('canplay'); // use readyState >= 3
  8333. setTimeout(() => video.load());
  8334. return SpeedyMediaSource._waitUntil(video, 'canplaythrough').then(() => {
  8335. this._data = video;
  8336. return this;
  8337. });
  8338. }
  8339. }
  8340. /**
  8341. * Load the underlying media
  8342. * @param {HTMLVideoElement} video
  8343. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8344. */
  8345. static load(video)
  8346. {
  8347. return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
  8348. }
  8349. }
  8350. /**
  8351. * Canvas media source:
  8352. * a wrapper around HTMLCanvasElement
  8353. */
  8354. class SpeedyCanvasMediaSource extends SpeedyMediaSource
  8355. {
  8356. /**
  8357. * @private Constructor
  8358. * @param {symbol} token
  8359. */
  8360. constructor(token)
  8361. {
  8362. super(token);
  8363. /** @type {HTMLCanvasElement} canvas element */
  8364. this._data = null;
  8365. }
  8366. /**
  8367. * The underlying wrapped object
  8368. * @returns {HTMLCanvasElement}
  8369. */
  8370. get data()
  8371. {
  8372. return this._data;
  8373. }
  8374. /**
  8375. * The type of the underlying media source
  8376. * @returns {MediaType}
  8377. */
  8378. get type()
  8379. {
  8380. return types/* MediaType.Canvas */.DD.Canvas;
  8381. }
  8382. /**
  8383. * Media width, in pixels
  8384. * @returns {number}
  8385. */
  8386. get width()
  8387. {
  8388. return this._data ? this._data.width : 0;
  8389. }
  8390. /**
  8391. * Media height, in pixels
  8392. * @returns {number}
  8393. */
  8394. get height()
  8395. {
  8396. return this._data ? this._data.height : 0;
  8397. }
  8398. /**
  8399. * Clone this media source
  8400. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8401. */
  8402. clone()
  8403. {
  8404. if(this._data == null)
  8405. throw new utils_errors/* IllegalOperationError */.js(`Media not loaded`);
  8406. const newCanvas = utils/* Utils.createCanvas */.c.createCanvas(this.width, this.height);
  8407. const newContext = newCanvas.getContext('2d');
  8408. newContext.drawImage(this._data, 0, 0);
  8409. return SpeedyCanvasMediaSource.load(newCanvas);
  8410. }
  8411. /**
  8412. * Load the underlying media
  8413. * @param {HTMLCanvasElement} canvas
  8414. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8415. */
  8416. _load(canvas)
  8417. {
  8418. if(this.isLoaded())
  8419. this.release();
  8420. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  8421. this._data = canvas;
  8422. resolve(this);
  8423. });
  8424. }
  8425. /**
  8426. * Load the underlying media
  8427. * @param {HTMLCanvasElement} canvas
  8428. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8429. */
  8430. static load(canvas)
  8431. {
  8432. return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
  8433. }
  8434. }
  8435. /**
  8436. * OffscreenCanvas media source:
  8437. * a wrapper around OffscreenCanvas
  8438. */
  8439. class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource
  8440. {
  8441. /**
  8442. * @private Constructor
  8443. * @param {symbol} token
  8444. */
  8445. constructor(token)
  8446. {
  8447. super(token);
  8448. /** @type {OffscreenCanvas} offscreen canvas element */
  8449. this._data = null;
  8450. }
  8451. /**
  8452. * The underlying wrapped object
  8453. * @returns {OffscreenCanvas}
  8454. */
  8455. get data()
  8456. {
  8457. return this._data;
  8458. }
  8459. /**
  8460. * The type of the underlying media source
  8461. * @returns {MediaType}
  8462. */
  8463. get type()
  8464. {
  8465. return types/* MediaType.Canvas */.DD.Canvas; // or a new MediaType for OffscreenCanvas if necessary
  8466. }
  8467. /**
  8468. * Media width, in pixels
  8469. * @returns {number}
  8470. */
  8471. get width()
  8472. {
  8473. return this._data ? this._data.width : 0;
  8474. }
  8475. /**
  8476. * Media height, in pixels
  8477. * @returns {number}
  8478. */
  8479. get height()
  8480. {
  8481. return this._data ? this._data.height : 0;
  8482. }
  8483. /**
  8484. * Clone this media source
  8485. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8486. */
  8487. clone()
  8488. {
  8489. if(this._data == null)
  8490. throw new utils_errors/* IllegalOperationError */.js(`Media not loaded`);
  8491. const newCanvas = new OffscreenCanvas(this.width, this.height);
  8492. const newContext = newCanvas.getContext('2d');
  8493. newContext.drawImage(this._data, 0, 0);
  8494. return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
  8495. }
  8496. /**
  8497. * Load the underlying media
  8498. * @param {OffscreenCanvas} offscreenCanvas
  8499. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8500. */
  8501. _load(offscreenCanvas)
  8502. {
  8503. if(this.isLoaded())
  8504. this.release();
  8505. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  8506. this._data = offscreenCanvas;
  8507. resolve(this);
  8508. });
  8509. }
  8510. /**
  8511. * Load the underlying media
  8512. * @param {OffscreenCanvas} offscreenCanvas
  8513. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8514. */
  8515. static load(offscreenCanvas)
  8516. {
  8517. return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
  8518. }
  8519. }
  8520. /**
  8521. * Bitmap media source:
  8522. * a wrapper around ImageBitmap
  8523. */
  8524. class SpeedyBitmapMediaSource extends SpeedyMediaSource
  8525. {
  8526. /**
  8527. * @private Constructor
  8528. * @param {symbol} token
  8529. */
  8530. constructor(token)
  8531. {
  8532. super(token);
  8533. /** @type {ImageBitmap} image bitmap */
  8534. this._data = null;
  8535. }
  8536. /**
  8537. * The underlying wrapped object
  8538. * @returns {ImageBitmap}
  8539. */
  8540. get data()
  8541. {
  8542. return this._data;
  8543. }
  8544. /**
  8545. * The type of the underlying media source
  8546. * @returns {MediaType}
  8547. */
  8548. get type()
  8549. {
  8550. return types/* MediaType.Bitmap */.DD.Bitmap;
  8551. }
  8552. /**
  8553. * Media width, in pixels
  8554. * @returns {number}
  8555. */
  8556. get width()
  8557. {
  8558. return this._data ? this._data.width : 0;
  8559. }
  8560. /**
  8561. * Media height, in pixels
  8562. * @returns {number}
  8563. */
  8564. get height()
  8565. {
  8566. return this._data ? this._data.height : 0;
  8567. }
  8568. /**
  8569. * Clone this media source
  8570. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8571. */
  8572. clone()
  8573. {
  8574. if(this._data == null)
  8575. throw new utils_errors/* IllegalOperationError */.js(`Media not loaded`);
  8576. return new speedy_promise/* SpeedyPromise */.s((resolve, reject) => {
  8577. createImageBitmap(this._data).then(
  8578. newBitmap => {
  8579. const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
  8580. newSource._load(newBitmap).then(resolve, reject);
  8581. },
  8582. reject
  8583. );
  8584. });
  8585. }
  8586. /**
  8587. * Release resources associated with this object
  8588. * @returns {null}
  8589. */
  8590. release()
  8591. {
  8592. if(this._data != null)
  8593. this._data.close();
  8594. return super.release();
  8595. }
  8596. /**
  8597. * Load the underlying media
  8598. * @param {ImageBitmap} bitmap
  8599. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8600. */
  8601. _load(bitmap)
  8602. {
  8603. if(this.isLoaded())
  8604. this.release();
  8605. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  8606. this._data = bitmap;
  8607. resolve(this);
  8608. });
  8609. }
  8610. /**
  8611. * Load the underlying media
  8612. * @param {ImageBitmap} bitmap
  8613. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8614. */
  8615. static load(bitmap)
  8616. {
  8617. return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
  8618. }
  8619. }
  8620. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-uploader.js
  8621. /*
  8622. * speedy-vision.js
  8623. * GPU-accelerated Computer Vision for JavaScript
  8624. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  8625. *
  8626. * Licensed under the Apache License, Version 2.0 (the "License");
  8627. * you may not use this file except in compliance with the License.
  8628. * You may obtain a copy of the License at
  8629. *
  8630. * http://www.apache.org/licenses/LICENSE-2.0
  8631. *
  8632. * Unless required by applicable law or agreed to in writing, software
  8633. * distributed under the License is distributed on an "AS IS" BASIS,
  8634. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8635. * See the License for the specific language governing permissions and
  8636. * limitations under the License.
  8637. *
  8638. * speedy-texture-uploader.js
  8639. * A utility that helps uploading data to textures
  8640. */
  8641. /**
  8642. * A utility that helps uploading data to textures
  8643. */
  8644. class SpeedyTextureUploader
  8645. {
  8646. /**
  8647. * Constructor
  8648. * @param {SpeedyGPU} gpu
  8649. */
  8650. constructor(gpu)
  8651. {
  8652. /** @type {SpeedyGPU} GPU instance */
  8653. this._gpu = gpu;
  8654. }
  8655. /**
  8656. * Upload an image to the GPU
  8657. * @param {SpeedyMediaSource} source
  8658. * @param {SpeedyTexture} outputTexture
  8659. * @returns {SpeedyTexture} output texture
  8660. */
  8661. upload(source, outputTexture)
  8662. {
  8663. const data = source.data;
  8664. // bugfix: if the media is a video, we can't really
  8665. // upload it to the GPU unless it's ready
  8666. //if(data.constructor.name == 'HTMLVideoElement') {
  8667. if(data instanceof HTMLVideoElement) {
  8668. if(data.readyState < 2) {
  8669. // this may happen when the video loops (Firefox)
  8670. // return the previously uploaded texture
  8671. //Utils.warning(`Trying to process a video that isn't ready yet`);
  8672. return outputTexture;
  8673. }
  8674. }
  8675. // upload to the output texture
  8676. return outputTexture.upload(data, source.width, source.height);
  8677. }
  8678. /**
  8679. * Release the texture uploader
  8680. * @returns {null}
  8681. */
  8682. release()
  8683. {
  8684. return null;
  8685. }
  8686. }
  8687. // EXTERNAL MODULE: ./src/utils/observable.js
  8688. var observable = __nested_webpack_require_312600__(9845);
  8689. ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
  8690. /*
  8691. * speedy-vision.js
  8692. * GPU-accelerated Computer Vision for JavaScript
  8693. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  8694. *
  8695. * Licensed under the Apache License, Version 2.0 (the "License");
  8696. * you may not use this file except in compliance with the License.
  8697. * You may obtain a copy of the License at
  8698. *
  8699. * http://www.apache.org/licenses/LICENSE-2.0
  8700. *
  8701. * Unless required by applicable law or agreed to in writing, software
  8702. * distributed under the License is distributed on an "AS IS" BASIS,
  8703. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8704. * See the License for the specific language governing permissions and
  8705. * limitations under the License.
  8706. *
  8707. * speedy-gpu.js
  8708. * GPU-accelerated routines for Computer Vision
  8709. */
  8710. /**
  8711. * GPU-accelerated routines for Computer Vision
  8712. */
  8713. class SpeedyGPU extends observable/* Observable */.y
  8714. {
  8715. /**
  8716. * Constructor
  8717. */
  8718. constructor()
  8719. {
  8720. super();
  8721. /** @type {SpeedyGL} cached reference */
  8722. this._speedyGL = speedy_gl/* SpeedyGL.instance */.$.instance;
  8723. /** @type {SpeedyProgramCenter} GPU-based programs */
  8724. this._programs = new SpeedyProgramCenter(this);
  8725. /** @type {SpeedyTexturePool} texture pool */
  8726. this._texturePool = new SpeedyTexturePool(this);
  8727. /** @type {SpeedyTextureUploader} texture uploader */
  8728. this._textureUploader = new SpeedyTextureUploader(this);
  8729. // recreate the state if necessary
  8730. this._speedyGL.subscribe(this._reset, this);
  8731. }
  8732. /**
  8733. * Access point to all GPU programs
  8734. * @returns {SpeedyProgramCenter}
  8735. */
  8736. get programs()
  8737. {
  8738. return this._programs;
  8739. }
  8740. /**
  8741. * The WebGL Rendering Context
  8742. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  8743. * @returns {WebGL2RenderingContext}
  8744. */
  8745. get gl()
  8746. {
  8747. return this._speedyGL.gl;
  8748. }
  8749. /**
  8750. * Internal canvas
  8751. * @returns {HTMLCanvasElement}
  8752. */
  8753. get canvas()
  8754. {
  8755. return this._speedyGL.canvas;
  8756. }
  8757. /**
  8758. * Texture pool
  8759. * @returns {SpeedyTexturePool}
  8760. */
  8761. get texturePool()
  8762. {
  8763. return this._texturePool;
  8764. }
  8765. /**
  8766. * Renders a texture to the canvas
  8767. * @param {SpeedyTexture} texture
  8768. * @returns {HTMLCanvasElement} returned for convenience
  8769. */
  8770. renderToCanvas(texture)
  8771. {
  8772. const width = texture.width;
  8773. const height = texture.height;
  8774. const canvas = this.canvas;
  8775. // do we need to resize the canvas?
  8776. if(width > canvas.width || height > canvas.height) {
  8777. utils/* Utils.warning */.c.warning(`Resizing the canvas to ${width} x ${height}`);
  8778. canvas.width = width;
  8779. canvas.height = height;
  8780. }
  8781. // render
  8782. this.programs.utils.renderToCanvas.outputs(width, height, null);
  8783. this.programs.utils.renderToCanvas(texture);
  8784. // done!
  8785. return canvas;
  8786. }
  8787. /**
  8788. * Upload an image to the GPU
  8789. * @param {SpeedyMediaSource} source
  8790. * @param {SpeedyTexture} outputTexture
  8791. * @returns {SpeedyTexture} outputTexture
  8792. */
  8793. upload(source, outputTexture)
  8794. {
  8795. return this._textureUploader.upload(source, outputTexture);
  8796. }
  8797. /**
  8798. * Releases resources
  8799. * @returns {null}
  8800. */
  8801. release()
  8802. {
  8803. utils/* Utils.assert */.c.assert(!this.isReleased());
  8804. // release internal components
  8805. this._programs = this._programs.release();
  8806. this._texturePool = this._texturePool.release();
  8807. this._textureUploader = this._textureUploader.release();
  8808. // unsubscribe
  8809. this._speedyGL.unsubscribe(this._reset);
  8810. return null;
  8811. }
  8812. /**
  8813. * Has this SpeedyGPU been released?
  8814. * @returns {boolean}
  8815. */
  8816. isReleased()
  8817. {
  8818. return this._programs == null;
  8819. }
  8820. /**
  8821. * Lose & restore the WebGL context (useful for testing purposes)
  8822. * @return {SpeedyPromise<void>} resolves as soon as the context is restored
  8823. */
  8824. loseAndRestoreWebGLContext()
  8825. {
  8826. return this._speedyGL.loseAndRestoreContext().then(() => void(0));
  8827. }
  8828. /**
  8829. * Reset the internal state
  8830. * (called on context reset)
  8831. */
  8832. _reset()
  8833. {
  8834. if(this.isReleased())
  8835. return;
  8836. this._programs = new SpeedyProgramCenter(this);
  8837. this._texturePool = new SpeedyTexturePool(this);
  8838. this._textureUploader = new SpeedyTextureUploader(this);
  8839. this._notify();
  8840. }
  8841. }
  8842. ;// CONCATENATED MODULE: ./src/core/speedy-size.js
  8843. /*
  8844. * speedy-vision.js
  8845. * GPU-accelerated Computer Vision for JavaScript
  8846. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  8847. *
  8848. * Licensed under the Apache License, Version 2.0 (the "License");
  8849. * you may not use this file except in compliance with the License.
  8850. * You may obtain a copy of the License at
  8851. *
  8852. * http://www.apache.org/licenses/LICENSE-2.0
  8853. *
  8854. * Unless required by applicable law or agreed to in writing, software
  8855. * distributed under the License is distributed on an "AS IS" BASIS,
  8856. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8857. * See the License for the specific language governing permissions and
  8858. * limitations under the License.
  8859. *
  8860. * speedy-size.js
  8861. * Size of a rectangle
  8862. */
  8863. /**
  8864. * Size of a rectangle
  8865. */
  8866. class SpeedySize
  8867. {
  8868. /**
  8869. * Constructor
  8870. * @param {number} width non-negative number
  8871. * @param {number} height non-negative number
  8872. */
  8873. constructor(width, height)
  8874. {
  8875. /** @type {number} width */
  8876. this._width = Math.max(0, +width);
  8877. /** @type {number} height */
  8878. this._height = Math.max(0, +height);
  8879. }
  8880. //
  8881. // ===== METHODS =====
  8882. //
  8883. /**
  8884. * Width
  8885. * @returns {number}
  8886. */
  8887. get width()
  8888. {
  8889. return this._width;
  8890. }
  8891. /**
  8892. * Width
  8893. * @param {number} value
  8894. */
  8895. set width(value)
  8896. {
  8897. this._width = Math.max(0, +value);
  8898. }
  8899. /**
  8900. * Height
  8901. * @returns {number}
  8902. */
  8903. get height()
  8904. {
  8905. return this._height;
  8906. }
  8907. /**
  8908. * Height
  8909. * @param {number} value
  8910. */
  8911. set height(value)
  8912. {
  8913. this._height = Math.max(0, +value);
  8914. }
  8915. /**
  8916. * Convert to string
  8917. * @returns {string}
  8918. */
  8919. toString()
  8920. {
  8921. return `SpeedySize(${this.width}, ${this.height})`;
  8922. }
  8923. /**
  8924. * Is this size equal to anotherSize?
  8925. * @param {SpeedySize} anotherSize
  8926. * @returns {boolean}
  8927. */
  8928. equals(anotherSize)
  8929. {
  8930. return this.width === anotherSize.width && this.height === anotherSize.height;
  8931. }
  8932. /**
  8933. * The area of the rectangle
  8934. * @returns {number}
  8935. */
  8936. area()
  8937. {
  8938. return this.width * this.height;
  8939. }
  8940. }
  8941. ;// CONCATENATED MODULE: ./src/core/speedy-media.js
  8942. /*
  8943. * speedy-vision.js
  8944. * GPU-accelerated Computer Vision for JavaScript
  8945. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  8946. *
  8947. * Licensed under the Apache License, Version 2.0 (the "License");
  8948. * you may not use this file except in compliance with the License.
  8949. * You may obtain a copy of the License at
  8950. *
  8951. * http://www.apache.org/licenses/LICENSE-2.0
  8952. *
  8953. * Unless required by applicable law or agreed to in writing, software
  8954. * distributed under the License is distributed on an "AS IS" BASIS,
  8955. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8956. * See the License for the specific language governing permissions and
  8957. * limitations under the License.
  8958. *
  8959. * speedy-media.js
  8960. * SpeedyMedia implementation
  8961. */
  8962. /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  8963. /**
  8964. * @typedef {object} SpeedyMediaOptions
  8965. * @property {ImageFormat} [format] default is RGBA
  8966. */
  8967. /** A helper used to keep the constructor of SpeedyMedia private */
  8968. const speedy_media_PRIVATE_TOKEN = Symbol();
  8969. /**
  8970. * SpeedyMedia encapsulates a media element
  8971. * (e.g., image, video, canvas)
  8972. */
  8973. class SpeedyMedia
  8974. {
  8975. /**
  8976. * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
  8977. * @param {symbol} token
  8978. * @param {SpeedyMediaSource} source
  8979. * @param {SpeedyMediaOptions} [options] options object
  8980. */
  8981. constructor(token, source, options = {})
  8982. {
  8983. // private constructor
  8984. if(token !== speedy_media_PRIVATE_TOKEN)
  8985. throw new utils_errors/* IllegalOperationError */.js();
  8986. /** @type {SpeedyMediaSource} media source */
  8987. this._source = source;
  8988. /** @type {ImageFormat} format */
  8989. this._format = options.format !== undefined ? options.format : types/* ImageFormat.RGBA */.D3.RGBA;
  8990. /** @type {SpeedyMediaOptions} options */
  8991. this._options = Object.freeze({ ...options, format: this._format });
  8992. // validate
  8993. if(!source.isLoaded())
  8994. throw new utils_errors/* IllegalOperationError */.js(`Source not loaded: ${source}`);
  8995. else if(this._format !== types/* ImageFormat.RGBA */.D3.RGBA && this._format !== types/* ImageFormat.GREY */.D3.GREY)
  8996. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid format: ${this._format}`);
  8997. }
  8998. /**
  8999. * Load a media source
  9000. * Will wait until the HTML media source is loaded
  9001. * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
  9002. * @param {SpeedyMediaOptions} [options] options object
  9003. * @param {boolean} [log] show log message?
  9004. * @returns {SpeedyPromise<SpeedyMedia>}
  9005. */
  9006. static load(mediaSource, options = {}, log = true)
  9007. {
  9008. return SpeedyMediaSource.load(mediaSource).then(source => {
  9009. utils/* Utils.assert */.c.assert(source.width !== 0 && source.height !== 0);
  9010. // FIXME user could pass an invalid format in options if ImageFormat is made public
  9011. const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
  9012. // show log message
  9013. if(log)
  9014. utils/* Utils.log */.c.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
  9015. // done!
  9016. return media;
  9017. });
  9018. }
  9019. /**
  9020. * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
  9021. * @returns {SpeedyMediaSourceNativeElement} the media element
  9022. */
  9023. get source()
  9024. {
  9025. return this._source ? this._source.data : null;
  9026. }
  9027. /**
  9028. * The type of the media attached to this SpeedyMedia object
  9029. * @returns {"image" | "video" | "canvas" | "bitmap" | "unknown"}
  9030. */
  9031. get type()
  9032. {
  9033. if(this.isReleased())
  9034. return 'unknown';
  9035. switch(this._source.type) {
  9036. case types/* MediaType.Image */.DD.Image:
  9037. return 'image';
  9038. case types/* MediaType.Video */.DD.Video:
  9039. return 'video';
  9040. case types/* MediaType.Canvas */.DD.Canvas:
  9041. return 'canvas';
  9042. case types/* MediaType.Bitmap */.DD.Bitmap:
  9043. return 'bitmap';
  9044. default: // this shouldn't happen
  9045. return 'unknown';
  9046. }
  9047. }
  9048. /**
  9049. * Gets the width of the media
  9050. * @returns {number} media width
  9051. */
  9052. get width()
  9053. {
  9054. return this._source ? this._source.width : 0;
  9055. }
  9056. /**
  9057. * Gets the height of the media
  9058. * @returns {number} media height
  9059. */
  9060. get height()
  9061. {
  9062. return this._source ? this._source.height : 0;
  9063. }
  9064. /**
  9065. * The size of this media, in pixels
  9066. * @returns {SpeedySize}
  9067. */
  9068. get size()
  9069. {
  9070. return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
  9071. }
  9072. /**
  9073. * Returns a read-only object featuring advanced options
  9074. * related to this SpeedyMedia object
  9075. * @returns {SpeedyMediaOptions}
  9076. */
  9077. get options()
  9078. {
  9079. return this._options;
  9080. }
  9081. /**
  9082. * Releases resources associated with this media
  9083. * @returns {null}
  9084. */
  9085. release()
  9086. {
  9087. if(!this.isReleased()) {
  9088. utils/* Utils.log */.c.log('Releasing SpeedyMedia object...');
  9089. this._source = this._source.release();
  9090. }
  9091. return null;
  9092. }
  9093. /**
  9094. * Has this media been released?
  9095. * @returns {boolean}
  9096. */
  9097. isReleased()
  9098. {
  9099. return this._source == null;
  9100. }
  9101. /**
  9102. * Clones the SpeedyMedia object
  9103. * @returns {SpeedyPromise<SpeedyMedia>} a clone object
  9104. */
  9105. clone()
  9106. {
  9107. // has the media been released?
  9108. if(this.isReleased())
  9109. throw new utils_errors/* IllegalOperationError */.js(`Can't clone a SpeedyMedia that has been released`);
  9110. // clone the object
  9111. const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
  9112. // done!
  9113. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(clone);
  9114. }
  9115. /**
  9116. * Converts the media to an ImageBitmap
  9117. * @returns {SpeedyPromise<ImageBitmap>}
  9118. */
  9119. toBitmap()
  9120. {
  9121. if(this.isReleased())
  9122. throw new utils_errors/* IllegalOperationError */.js('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');
  9123. else if(!this._source.isLoaded())
  9124. throw new utils_errors/* IllegalOperationError */.js('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');
  9125. else if(this._source.type == types/* MediaType.Bitmap */.DD.Bitmap)
  9126. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(this._source.data);
  9127. else
  9128. return new speedy_promise/* SpeedyPromise */.s((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
  9129. }
  9130. }
  9131. ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
  9132. /*
  9133. * speedy-vision.js
  9134. * GPU-accelerated Computer Vision for JavaScript
  9135. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  9136. *
  9137. * Licensed under the Apache License, Version 2.0 (the "License");
  9138. * you may not use this file except in compliance with the License.
  9139. * You may obtain a copy of the License at
  9140. *
  9141. * http://www.apache.org/licenses/LICENSE-2.0
  9142. *
  9143. * Unless required by applicable law or agreed to in writing, software
  9144. * distributed under the License is distributed on an "AS IS" BASIS,
  9145. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9146. * See the License for the specific language governing permissions and
  9147. * limitations under the License.
  9148. *
  9149. * fps-counter.js
  9150. * A FPS counter
  9151. */
  9152. /** @const {number} update interval in milliseconds */
  9153. const UPDATE_INTERVAL = 500;
  9154. /** @type {FPSCounter|null} Singleton */
  9155. let instance = null;
  9156. /**
  9157. * FPS counter
  9158. */
  9159. class FPSCounter
  9160. {
  9161. /**
  9162. * Creates a new FPSCounter
  9163. * @private
  9164. */
  9165. constructor()
  9166. {
  9167. /** @type {number} current FPS rate */
  9168. this._fps = 60;
  9169. /** @type {number} frame counter */
  9170. this._frames = 0;
  9171. /** @type {number} update interval in milliseconds */
  9172. this._updateInterval = UPDATE_INTERVAL;
  9173. /** @type {number} time of the last update */
  9174. this._lastUpdate = performance.now();
  9175. /** @type {function(): void} bound update function */
  9176. this._boundUpdate = this._update.bind(this);
  9177. // this should never happen...
  9178. if(instance !== null)
  9179. throw new utils_errors/* IllegalOperationError */.js(`Can't have multiple instances of FPSCounter`);
  9180. // start FPS counter
  9181. this._boundUpdate();
  9182. }
  9183. /**
  9184. * Gets an instance of the FPS counter.
  9185. * We use lazy loading, i.e., we will not
  9186. * create a FPS counter unless we need to!
  9187. * @returns {FPSCounter}
  9188. */
  9189. static get instance()
  9190. {
  9191. if(instance === null)
  9192. instance = new FPSCounter();
  9193. return instance;
  9194. }
  9195. /**
  9196. * Get the FPS rate
  9197. * @returns {number} frames per second
  9198. */
  9199. get fps()
  9200. {
  9201. return this._fps;
  9202. }
  9203. /**
  9204. * Updates the FPS counter
  9205. */
  9206. _update()
  9207. {
  9208. const now = performance.now();
  9209. const deltaTime = now - this._lastUpdate;
  9210. if(deltaTime >= this._updateInterval) {
  9211. this._fps = Math.round(this._frames / (deltaTime * 0.001));
  9212. this._frames = 0;
  9213. this._lastUpdate = now;
  9214. }
  9215. this._frames++;
  9216. requestAnimationFrame(this._boundUpdate);
  9217. }
  9218. }
  9219. ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
  9220. /*
  9221. * speedy-vision.js
  9222. * GPU-accelerated Computer Vision for JavaScript
  9223. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  9224. *
  9225. * Licensed under the Apache License, Version 2.0 (the "License");
  9226. * you may not use this file except in compliance with the License.
  9227. * You may obtain a copy of the License at
  9228. *
  9229. * http://www.apache.org/licenses/LICENSE-2.0
  9230. *
  9231. * Unless required by applicable law or agreed to in writing, software
  9232. * distributed under the License is distributed on an "AS IS" BASIS,
  9233. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9234. * See the License for the specific language governing permissions and
  9235. * limitations under the License.
  9236. *
  9237. * speedy-vector.js
  9238. * Vectors
  9239. */
  9240. /**
  9241. * 2D vector of floating-point numbers
  9242. */
  9243. class SpeedyVector2
  9244. {
  9245. /**
  9246. * Create a 2D vector
  9247. * @param {number} x
  9248. * @param {number} y
  9249. */
  9250. constructor(x, y)
  9251. {
  9252. /** @type {number} x coordinate */
  9253. this._x = +x;
  9254. /** @type {number} y coordinate */
  9255. this._y = +y;
  9256. }
  9257. //
  9258. // ===== METHODS =====
  9259. //
  9260. /**
  9261. * x-coordinate
  9262. * @returns {number}
  9263. */
  9264. get x()
  9265. {
  9266. return this._x;
  9267. }
  9268. /**
  9269. * x-coordinate
  9270. * @param {number} value
  9271. */
  9272. set x(value)
  9273. {
  9274. this._x = +value;
  9275. }
  9276. /**
  9277. * y-coordinate
  9278. * @returns {number}
  9279. */
  9280. get y()
  9281. {
  9282. return this._y;
  9283. }
  9284. /**
  9285. * y-coordinate
  9286. * @param {number} value
  9287. */
  9288. set y(value)
  9289. {
  9290. this._y = +value;
  9291. }
  9292. /**
  9293. * Convert to string
  9294. * @returns {string}
  9295. */
  9296. toString()
  9297. {
  9298. return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  9299. }
  9300. /**
  9301. * Is this vector equal to v?
  9302. * @param {SpeedyVector2} v
  9303. * @returns {boolean}
  9304. */
  9305. equals(v)
  9306. {
  9307. return this.x === v.x && this.y === v.y;
  9308. }
  9309. /**
  9310. * Dot product between this vector and another vector
  9311. * @param {SpeedyVector2} v another vector
  9312. * @returns {number}
  9313. */
  9314. dot(v)
  9315. {
  9316. return this.x * v.x + this.y * v.y;
  9317. }
  9318. /**
  9319. * The distance between this vector and another vector
  9320. * @param {SpeedyVector2} v another vector
  9321. * @returns {number}
  9322. */
  9323. distanceTo(v)
  9324. {
  9325. const dx = this.x - v.x;
  9326. const dy = this.y - v.y;
  9327. return Math.sqrt(dx * dx + dy * dy);
  9328. }
  9329. /**
  9330. * Euclidean norm
  9331. * @returns {number}
  9332. */
  9333. length()
  9334. {
  9335. return Math.sqrt(this.x * this.x + this.y * this.y);
  9336. }
  9337. /**
  9338. * Returns a normalized version of this vector
  9339. * @returns {SpeedyVector2}
  9340. */
  9341. normalized()
  9342. {
  9343. const len = this.length();
  9344. if(len > 0.0)
  9345. return new SpeedyVector2(this.x / len, this.y / len);
  9346. else
  9347. return new SpeedyVector2(0.0, 0.0);
  9348. }
  9349. /**
  9350. * Returns a copy of this vector translated by offset
  9351. * @param {SpeedyVector2} offset
  9352. * @returns {SpeedyVector2}
  9353. */
  9354. plus(offset)
  9355. {
  9356. return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
  9357. }
  9358. /**
  9359. * Returns a copy of this vector translated by -offset
  9360. * @param {SpeedyVector2} offset
  9361. * @returns {SpeedyVector2}
  9362. */
  9363. minus(offset)
  9364. {
  9365. return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
  9366. }
  9367. /**
  9368. * Returns a copy of this vector scaled by a scalar
  9369. * @param {number} scalar
  9370. * @returns {SpeedyVector2}
  9371. */
  9372. times(scalar)
  9373. {
  9374. return new SpeedyVector2(this.x * scalar, this.y * scalar);
  9375. }
  9376. }
  9377. ;// CONCATENATED MODULE: ./src/core/speedy-point.js
  9378. /*
  9379. * speedy-vision.js
  9380. * GPU-accelerated Computer Vision for JavaScript
  9381. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  9382. *
  9383. * Licensed under the Apache License, Version 2.0 (the "License");
  9384. * you may not use this file except in compliance with the License.
  9385. * You may obtain a copy of the License at
  9386. *
  9387. * http://www.apache.org/licenses/LICENSE-2.0
  9388. *
  9389. * Unless required by applicable law or agreed to in writing, software
  9390. * distributed under the License is distributed on an "AS IS" BASIS,
  9391. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9392. * See the License for the specific language governing permissions and
  9393. * limitations under the License.
  9394. *
  9395. * speedy-point.js
  9396. * Points in space
  9397. */
  9398. /**
  9399. * 2D point
  9400. */
  9401. class SpeedyPoint2
  9402. {
  9403. /**
  9404. * Create a 2D point
  9405. * @param {number} x
  9406. * @param {number} y
  9407. */
  9408. constructor(x, y)
  9409. {
  9410. /** @type {number} x coordinate */
  9411. this._x = +x;
  9412. /** @type {number} y coordinate */
  9413. this._y = +y;
  9414. }
  9415. //
  9416. // ===== METHODS =====
  9417. //
  9418. /**
  9419. * x-coordinate
  9420. * @returns {number}
  9421. */
  9422. get x()
  9423. {
  9424. return this._x;
  9425. }
  9426. /**
  9427. * x-coordinate
  9428. * @param {number} value
  9429. */
  9430. set x(value)
  9431. {
  9432. this._x = +value;
  9433. }
  9434. /**
  9435. * y-coordinate
  9436. * @returns {number}
  9437. */
  9438. get y()
  9439. {
  9440. return this._y;
  9441. }
  9442. /**
  9443. * y-coordinate
  9444. * @param {number} value
  9445. */
  9446. set y(value)
  9447. {
  9448. this._y = +value;
  9449. }
  9450. /**
  9451. * Convert to string
  9452. * @returns {string}
  9453. */
  9454. toString()
  9455. {
  9456. return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  9457. }
  9458. /**
  9459. * Add a vector to this point
  9460. * @param {SpeedyVector2} v
  9461. * @returns {SpeedyPoint2}
  9462. */
  9463. plus(v)
  9464. {
  9465. return new SpeedyPoint2(this.x + v.x, this.y + v.y);
  9466. }
  9467. /**
  9468. * Subtracts a point p from this point
  9469. * @param {SpeedyPoint2} p
  9470. * @returns {SpeedyVector2}
  9471. */
  9472. minus(p)
  9473. {
  9474. return new SpeedyVector2(this.x - p.x, this.y - p.y);
  9475. }
  9476. /**
  9477. * Is this point equal to p?
  9478. * @param {SpeedyPoint2} p
  9479. * @returns {boolean}
  9480. */
  9481. equals(p)
  9482. {
  9483. return this.x === p.x && this.y === p.y;
  9484. }
  9485. }
  9486. // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
  9487. var speedy_matrix_expr = __nested_webpack_require_312600__(5137);
  9488. // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
  9489. var speedy_matrix_wasm = __nested_webpack_require_312600__(4368);
  9490. // EXTERNAL MODULE: ./src/core/speedy-matrix.js
  9491. var speedy_matrix = __nested_webpack_require_312600__(8007);
  9492. ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
  9493. /*
  9494. * speedy-vision.js
  9495. * GPU-accelerated Computer Vision for JavaScript
  9496. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  9497. *
  9498. * Licensed under the Apache License, Version 2.0 (the "License");
  9499. * you may not use this file except in compliance with the License.
  9500. * You may obtain a copy of the License at
  9501. *
  9502. * http://www.apache.org/licenses/LICENSE-2.0
  9503. *
  9504. * Unless required by applicable law or agreed to in writing, software
  9505. * distributed under the License is distributed on an "AS IS" BASIS,
  9506. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9507. * See the License for the specific language governing permissions and
  9508. * limitations under the License.
  9509. *
  9510. * speedy-matrix-factory.js
  9511. * A factory of matrices
  9512. */
  9513. /**
  9514. * Matrix routines
  9515. */
  9516. class SpeedyMatrixFactory extends Function
  9517. {
  9518. /**
  9519. * Constructor
  9520. */
  9521. constructor()
  9522. {
  9523. // This factory can be invoked as a function
  9524. super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
  9525. return this.bind(this);
  9526. }
  9527. /**
  9528. * @private
  9529. *
  9530. * Create a new matrix filled with the specified size and entries
  9531. * @param {number} rows
  9532. * @param {number} [columns]
  9533. * @param {number[]} [entries] in column-major format
  9534. * @returns {SpeedyMatrix}
  9535. */
  9536. _create(rows, columns = rows, entries = [])
  9537. {
  9538. return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
  9539. }
  9540. /**
  9541. * @private
  9542. *
  9543. * Evaluate an expression synchronously and store the result in a new matrix
  9544. * @param {SpeedyMatrixExpr} expr matrix expression
  9545. * @returns {SpeedyMatrix}
  9546. */
  9547. _from(expr)
  9548. {
  9549. return speedy_matrix.SpeedyMatrix.From(expr);
  9550. }
  9551. /**
  9552. * Create a new matrix filled with zeros with the specified size
  9553. * @param {number} rows
  9554. * @param {number} [columns]
  9555. * @returns {SpeedyMatrix}
  9556. */
  9557. Zeros(rows, columns = rows)
  9558. {
  9559. return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
  9560. }
  9561. /**
  9562. * Create a new matrix filled with ones with the specified size
  9563. * @param {number} rows
  9564. * @param {number} [columns]
  9565. * @returns {SpeedyMatrix}
  9566. */
  9567. Ones(rows, columns = rows)
  9568. {
  9569. return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
  9570. }
  9571. /**
  9572. * Create an identity matrix with the specified size
  9573. * @param {number} rows
  9574. * @param {number} [columns]
  9575. * @returns {SpeedyMatrix}
  9576. */
  9577. Eye(rows, columns = rows)
  9578. {
  9579. return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
  9580. }
  9581. /**
  9582. * Returns a promise that resolves immediately if the WebAssembly routines
  9583. * are ready to be used, or as soon as they do become ready
  9584. * @returns {SpeedyPromise<void>}
  9585. */
  9586. ready()
  9587. {
  9588. return speedy_matrix.SpeedyMatrix.ready();
  9589. }
  9590. /**
  9591. * QR decomposition
  9592. * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
  9593. * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
  9594. * @param {SpeedyMatrix} mat is m x n, input
  9595. * @param {object} [options]
  9596. * @param {'reduced'|'full'} [options.mode]
  9597. * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
  9598. */
  9599. qr(Q, R, mat, { mode = 'reduced' } = {})
  9600. {
  9601. const A = mat, m = mat.rows, n = mat.columns;
  9602. // validate shapes & mode
  9603. if(mode == 'reduced') {
  9604. if(Q.rows != m || Q.columns != n || R.rows != n || R.columns != n)
  9605. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shape for reduced QR`);
  9606. }
  9607. else if(mode == 'full') {
  9608. if(Q.rows != m || Q.columns != m || R.rows != m || R.columns != n)
  9609. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shape for full QR`);
  9610. }
  9611. else
  9612. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid mode for QR: "${mode}"`);
  9613. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9614. // allocate matrices
  9615. const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, Q);
  9616. const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, R);
  9617. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, A);
  9618. // copy input matrices to WASM memory
  9619. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, Aptr, A);
  9620. // run the WASM routine
  9621. if(mode == 'reduced')
  9622. wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);
  9623. else
  9624. wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
  9625. // copy output matrices from WASM memory
  9626. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, Qptr, Q);
  9627. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, Rptr, R);
  9628. // deallocate matrices
  9629. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, Aptr);
  9630. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, Rptr);
  9631. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, Qptr);
  9632. // done!
  9633. return [Q, R];
  9634. });
  9635. }
  9636. /**
  9637. * Solve a possibly overdetermined system of linear
  9638. * equations Ax = b for x using ordinary least squares
  9639. * @param {SpeedyMatrix} solution n x 1, output
  9640. * @param {SpeedyMatrix} A m x n, m >= n, input
  9641. * @param {SpeedyMatrix} b m x 1, output
  9642. * @param {object} [options]
  9643. * @param {'qr'} [options.method] method of resolution
  9644. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9645. */
  9646. ols(solution, A, b, { method = 'qr' } = {})
  9647. {
  9648. const m = A.rows, n = A.columns;
  9649. const x = solution;
  9650. // validate shapes
  9651. if(m < n || n == 0)
  9652. throw new utils_errors/* IllegalArgumentError */.mG(`Can't solve an underdetermined system of equations`);
  9653. else if(b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1)
  9654. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shapes`);
  9655. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9656. // allocate matrices
  9657. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, A);
  9658. const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, b);
  9659. const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, x);
  9660. // copy input matrices to WASM memory
  9661. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, Aptr, A);
  9662. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, bptr, b);
  9663. // run the WASM routine
  9664. switch(method) {
  9665. case 'qr':
  9666. wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
  9667. break;
  9668. default:
  9669. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid method: "${method}"`);
  9670. }
  9671. // copy output matrix from WASM memory
  9672. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, xptr, x);
  9673. // deallocate matrices
  9674. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, xptr);
  9675. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, bptr);
  9676. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, Aptr);
  9677. // done!
  9678. return solution;
  9679. });
  9680. }
  9681. /**
  9682. * Solve a system of linear equations Ax = b for x
  9683. * @param {SpeedyMatrix} solution m x 1, output
  9684. * @param {SpeedyMatrix} A m x m, input
  9685. * @param {SpeedyMatrix} b m x 1, output
  9686. * @param {object} [options]
  9687. * @param {'qr'} [options.method] method of resolution
  9688. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9689. */
  9690. solve(solution, A, b, { method = 'qr' } = {})
  9691. {
  9692. const m = A.rows, n = A.columns;
  9693. const x = solution;
  9694. // validate shapes
  9695. if(m != n)
  9696. throw new utils_errors/* IllegalArgumentError */.mG(`Can't solve an over or underdetermined system of equations`);
  9697. else if(b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1)
  9698. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shapes`);
  9699. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9700. // select method
  9701. switch(method) {
  9702. case 'qr':
  9703. return this.ols(x, A, b, { method });
  9704. /*case 'lu':
  9705. break;*/
  9706. default:
  9707. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid method: "${method}"`);
  9708. }
  9709. });
  9710. }
  9711. /**
  9712. * Compute a perspective transformation using 4 correspondences of points
  9713. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9714. * @param {SpeedyMatrix} src 2x4 input points - source coordinates
  9715. * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
  9716. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9717. */
  9718. perspective(homography, src, dest)
  9719. {
  9720. // validate shapes
  9721. if(src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4)
  9722. throw new utils_errors/* IllegalArgumentError */.mG(`You need two 2x4 input matrices to compute a perspective transformation`);
  9723. else if(homography.rows != 3 || homography.columns != 3)
  9724. throw new utils_errors/* IllegalArgumentError */.mG(`The output of perspective() is a 3x3 homography`);
  9725. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9726. // allocate matrices
  9727. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, homography);
  9728. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9729. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9730. // copy input matrices to WASM memory
  9731. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9732. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, destptr, dest);
  9733. // run the WASM routine
  9734. wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
  9735. // copy output matrix from WASM memory
  9736. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, homptr, homography);
  9737. // deallocate matrices
  9738. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9739. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9740. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, homptr);
  9741. // done!
  9742. return homography;
  9743. });
  9744. }
  9745. /**
  9746. * Compute a perspective transformation using n >= 4 correspondences of points
  9747. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9748. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9749. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9750. * @param {object} [options]
  9751. * @param {'default'|'pransac'} [options.method] method of computation
  9752. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9753. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9754. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9755. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9756. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9757. */
  9758. findHomography(homography, src, dest, {
  9759. method = 'default',
  9760. mask = null,
  9761. reprojectionError = 3,
  9762. numberOfHypotheses = 512,
  9763. bundleSize = 128,
  9764. } = {})
  9765. {
  9766. // validate shapes
  9767. if(src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns)
  9768. throw new utils_errors/* IllegalArgumentError */.mG(`You need two 2 x n (n >= 4) input matrices to compute a homography`);
  9769. else if(homography.rows != 3 || homography.columns != 3)
  9770. throw new utils_errors/* IllegalArgumentError */.mG(`The output of findHomography() is a 3x3 homography`);
  9771. else if(mask != null && (mask.rows != 1 || mask.columns != src.columns))
  9772. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shape of the inliers mask`);
  9773. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9774. // allocate matrices
  9775. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, homography);
  9776. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9777. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9778. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, mask) : 0;
  9779. // copy input matrices to WASM memory
  9780. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9781. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, destptr, dest);
  9782. // run the WASM routine
  9783. switch(method) {
  9784. case 'pransac':
  9785. utils/* Utils.assert */.c.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9786. wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9787. break;
  9788. case 'default':
  9789. case 'dlt': // obsolete
  9790. wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
  9791. break;
  9792. default:
  9793. throw new utils_errors/* IllegalArgumentError */.mG(`Illegal method for findHomography(): "${method}"`);
  9794. }
  9795. // copy output matrices from WASM memory
  9796. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, homptr, homography);
  9797. if(mask != null)
  9798. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, maskptr, mask);
  9799. // deallocate matrices
  9800. if(mask != null)
  9801. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, maskptr);
  9802. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9803. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9804. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, homptr);
  9805. // done!
  9806. return homography;
  9807. });
  9808. }
  9809. /**
  9810. * Apply a perspective transformation to a set of 2D points
  9811. * @param {SpeedyMatrix} dest 2 x n output matrix
  9812. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9813. * @param {SpeedyMatrix} transform 3x3 homography matrix
  9814. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9815. */
  9816. applyPerspectiveTransform(dest, src, transform)
  9817. {
  9818. // validate shapes
  9819. if(src.rows != 2 || dest.rows != 2 || src.columns != dest.columns)
  9820. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shapes`);
  9821. else if(transform.rows != 3 || transform.columns != 3)
  9822. throw new utils_errors/* IllegalArgumentError */.mG(`The perspective transformation must be a 3x3 matrix`);
  9823. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9824. // allocate matrices
  9825. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, transform);
  9826. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9827. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9828. // copy input matrices to WASM memory
  9829. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9830. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, matptr, transform);
  9831. // run the WASM routine
  9832. wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
  9833. // copy output matrix from WASM memory
  9834. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, destptr, dest);
  9835. // deallocate matrices
  9836. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9837. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9838. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, matptr);
  9839. // done!
  9840. return dest;
  9841. });
  9842. }
  9843. /**
  9844. * Compute an affine transform using 3 correspondences of points
  9845. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9846. * @param {SpeedyMatrix} src 2x3 input points - source coordinates
  9847. * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
  9848. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9849. */
  9850. affine(transform, src, dest)
  9851. {
  9852. // validate shapes
  9853. if(src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3)
  9854. throw new utils_errors/* IllegalArgumentError */.mG(`You need two 2x3 input matrices to compute an affine transform`);
  9855. else if(transform.rows != 2 || transform.columns != 3)
  9856. throw new utils_errors/* IllegalArgumentError */.mG(`The output of affine() is a 2x3 matrix`);
  9857. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9858. // allocate matrices
  9859. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, transform);
  9860. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9861. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9862. // copy input matrices to WASM memory
  9863. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9864. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, destptr, dest);
  9865. // run the WASM routine
  9866. wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
  9867. // copy output matrix from WASM memory
  9868. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, matptr, transform);
  9869. // deallocate matrices
  9870. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9871. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9872. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, matptr);
  9873. // done!
  9874. return transform;
  9875. });
  9876. }
  9877. /**
  9878. * Compute an affine transformation using n >= 3 correspondences of points
  9879. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9880. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9881. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9882. * @param {object} [options]
  9883. * @param {'default'|'pransac'} [options.method] method of computation
  9884. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9885. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9886. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9887. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9888. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
  9889. */
  9890. findAffineTransform(transform, src, dest, {
  9891. method = 'default',
  9892. mask = null,
  9893. reprojectionError = 3,
  9894. numberOfHypotheses = 512,
  9895. bundleSize = 128,
  9896. } = {})
  9897. {
  9898. // validate shapes
  9899. if(src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns)
  9900. throw new utils_errors/* IllegalArgumentError */.mG(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);
  9901. else if(transform.rows != 2 || transform.columns != 3)
  9902. throw new utils_errors/* IllegalArgumentError */.mG(`The output of findAffineTransform() is a 2x3 matrix`);
  9903. else if(mask != null && (mask.rows != 1 || mask.columns != src.columns))
  9904. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shape of the inliers mask`);
  9905. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9906. // allocate matrices
  9907. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, transform);
  9908. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9909. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9910. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, mask) : 0;
  9911. // copy input matrices to WASM memory
  9912. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9913. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, destptr, dest);
  9914. // run the WASM routine
  9915. switch(method) {
  9916. case 'pransac':
  9917. utils/* Utils.assert */.c.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9918. wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9919. break;
  9920. case 'default':
  9921. wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
  9922. break;
  9923. default:
  9924. throw new utils_errors/* IllegalArgumentError */.mG(`Illegal method for findAffineTransform(): "${method}"`);
  9925. }
  9926. // copy output matrices from WASM memory
  9927. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, matptr, transform);
  9928. if(mask != null)
  9929. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, maskptr, mask);
  9930. // deallocate matrices
  9931. if(mask != null)
  9932. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, maskptr);
  9933. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9934. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9935. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, matptr);
  9936. // done!
  9937. return transform;
  9938. });
  9939. }
  9940. /**
  9941. * Apply an affine transformation to a set of 2D points
  9942. * @param {SpeedyMatrix} dest 2 x n output matrix
  9943. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9944. * @param {SpeedyMatrix} transform 2x3 affine transform
  9945. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9946. */
  9947. applyAffineTransform(dest, src, transform)
  9948. {
  9949. // validate shapes
  9950. if(src.rows != 2 || dest.rows != 2 || src.columns != dest.columns)
  9951. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid shapes`);
  9952. else if(transform.rows != 2 || transform.columns != 3)
  9953. throw new utils_errors/* IllegalArgumentError */.mG(`The affine transformation must be a 2x3 matrix`);
  9954. return speedy_matrix_wasm/* SpeedyMatrixWASM.ready */.r.ready().then(({wasm, memory}) => {
  9955. // allocate matrices
  9956. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, transform);
  9957. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, src);
  9958. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM.allocateMat32 */.r.allocateMat32(wasm, memory, dest);
  9959. // copy input matrices to WASM memory
  9960. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, srcptr, src);
  9961. speedy_matrix_wasm/* SpeedyMatrixWASM.copyToMat32 */.r.copyToMat32(wasm, memory, matptr, transform);
  9962. // run the WASM routine
  9963. wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
  9964. // copy output matrix from WASM memory
  9965. speedy_matrix_wasm/* SpeedyMatrixWASM.copyFromMat32 */.r.copyFromMat32(wasm, memory, destptr, dest);
  9966. // deallocate matrices
  9967. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, destptr);
  9968. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, srcptr);
  9969. speedy_matrix_wasm/* SpeedyMatrixWASM.deallocateMat32 */.r.deallocateMat32(wasm, memory, matptr);
  9970. // done!
  9971. return dest;
  9972. });
  9973. }
  9974. }
  9975. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
  9976. /*
  9977. * speedy-vision.js
  9978. * GPU-accelerated Computer Vision for JavaScript
  9979. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  9980. *
  9981. * Licensed under the Apache License, Version 2.0 (the "License");
  9982. * you may not use this file except in compliance with the License.
  9983. * You may obtain a copy of the License at
  9984. *
  9985. * http://www.apache.org/licenses/LICENSE-2.0
  9986. *
  9987. * Unless required by applicable law or agreed to in writing, software
  9988. * distributed under the License is distributed on an "AS IS" BASIS,
  9989. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9990. * See the License for the specific language governing permissions and
  9991. * limitations under the License.
  9992. *
  9993. * pipeline-message.js
  9994. * A message that is shared between nodes of a pipeline
  9995. */
  9996. /**
  9997. * Types of messages
  9998. * @enum {Symbol}
  9999. */
  10000. const SpeedyPipelineMessageType = Object.freeze({
  10001. Nothing: Symbol('Nothing'),
  10002. Image: Symbol('Image'),
  10003. Keypoints: Symbol('Keypoints'),
  10004. Vector2: Symbol('Vector2'),
  10005. LSHTables: Symbol('LSHTables'),
  10006. KeypointMatches: Symbol('KeypointMatches'),
  10007. });
  10008. /**
  10009. * Diagnostic data
  10010. * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
  10011. */
  10012. /**
  10013. * A message that is shared between nodes of a pipeline
  10014. * @abstract
  10015. */
  10016. class SpeedyPipelineMessage
  10017. {
  10018. /**
  10019. * Constructor
  10020. * @param {SpeedyPipelineMessageType} type message type
  10021. */
  10022. constructor(type)
  10023. {
  10024. /** @type {SpeedyPipelineMessageType} message type */
  10025. this._type = type;
  10026. }
  10027. /**
  10028. * Message type
  10029. * @returns {SpeedyPipelineMessageType}
  10030. */
  10031. get type()
  10032. {
  10033. return this._type;
  10034. }
  10035. /**
  10036. * Checks if the type of this message is equal to parameter type
  10037. * @param {SpeedyPipelineMessageType} type
  10038. * @returns {boolean}
  10039. */
  10040. hasType(type)
  10041. {
  10042. return this._type === type;
  10043. }
  10044. /**
  10045. * Is this an empty message?
  10046. * @returns {boolean}
  10047. */
  10048. isEmpty()
  10049. {
  10050. return this.hasType(SpeedyPipelineMessageType.Nothing);
  10051. }
  10052. /**
  10053. * Convert to string
  10054. * @returns {string}
  10055. */
  10056. toString()
  10057. {
  10058. const type = Object.keys(SpeedyPipelineMessageType).find(
  10059. type => SpeedyPipelineMessageType[type] === this.type
  10060. );
  10061. return `message of type ${type}`;
  10062. }
  10063. /**
  10064. * Inspect this message for debugging purposes
  10065. * @param {SpeedyGPU} gpu
  10066. * @returns {SpeedyPipelineMessageDiagnosticData}
  10067. */
  10068. inspect(gpu)
  10069. {
  10070. throw new utils_errors/* AbstractMethodError */.Mi();
  10071. }
  10072. /**
  10073. * Set parameters
  10074. * @abstract
  10075. * @param {...any} args
  10076. * @returns {SpeedyPipelineMessage} this message
  10077. */
  10078. set(...args)
  10079. {
  10080. throw new utils_errors/* AbstractMethodError */.Mi();
  10081. }
  10082. /**
  10083. * Create a message of the specified type
  10084. * @param {SpeedyPipelineMessageType} type
  10085. * @returns {SpeedyPipelineMessage}
  10086. */
  10087. static create(type)
  10088. {
  10089. return createMessage(type);
  10090. }
  10091. }
  10092. /**
  10093. * An empty message carrying nothing
  10094. */
  10095. class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage
  10096. {
  10097. /**
  10098. * Constructor
  10099. */
  10100. constructor()
  10101. {
  10102. super(SpeedyPipelineMessageType.Nothing);
  10103. }
  10104. /**
  10105. * Set parameters
  10106. * @returns {SpeedyPipelineMessage} this message
  10107. */
  10108. set()
  10109. {
  10110. return this;
  10111. }
  10112. /**
  10113. * Inspect this message for debugging purposes
  10114. * @param {SpeedyGPU} gpu
  10115. * @returns {SpeedyPipelineMessageDiagnosticData}
  10116. */
  10117. inspect(gpu)
  10118. {
  10119. return {
  10120. type: this.constructor.name
  10121. };
  10122. }
  10123. }
  10124. /**
  10125. * A message transporting an image
  10126. */
  10127. class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage
  10128. {
  10129. /**
  10130. * Constructor
  10131. */
  10132. constructor()
  10133. {
  10134. super(SpeedyPipelineMessageType.Image);
  10135. /** @type {SpeedyDrawableTexture} the image we carry */
  10136. this._image = null;
  10137. /** @type {ImageFormat} image format */
  10138. this._format = types/* ImageFormat.RGBA */.D3.RGBA;
  10139. }
  10140. /**
  10141. * Set parameters
  10142. * @param {SpeedyDrawableTexture} image the image we carry
  10143. * @param {ImageFormat} [format] image format
  10144. * @returns {SpeedyPipelineMessage} this message
  10145. */
  10146. set(image, format = types/* ImageFormat.RGBA */.D3.RGBA)
  10147. {
  10148. // set parameters
  10149. this._image = image;
  10150. this._format = format;
  10151. // done!
  10152. return this;
  10153. }
  10154. /**
  10155. * Inspect this message for debugging purposes
  10156. * @param {SpeedyGPU} gpu
  10157. * @returns {SpeedyPipelineMessageDiagnosticData}
  10158. */
  10159. inspect(gpu)
  10160. {
  10161. const formatName = Object.keys(types/* ImageFormat */.D3).find(
  10162. format => types/* ImageFormat */.D3[format] === this.format
  10163. );
  10164. return {
  10165. type: this.constructor.name,
  10166. format: String(formatName),
  10167. imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
  10168. image: this.image ? '<image data>' /* possibly MBs of data */ : '',
  10169. hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
  10170. };
  10171. }
  10172. /**
  10173. * The image we carry
  10174. * @returns {SpeedyDrawableTexture}
  10175. */
  10176. get image()
  10177. {
  10178. return this._image;
  10179. }
  10180. /**
  10181. * Image format
  10182. * @returns {ImageFormat}
  10183. */
  10184. get format()
  10185. {
  10186. return this._format;
  10187. }
  10188. }
  10189. /**
  10190. * A message transporting keypoints
  10191. */
  10192. class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage
  10193. {
  10194. /**
  10195. * Constructor
  10196. */
  10197. constructor()
  10198. {
  10199. super(SpeedyPipelineMessageType.Keypoints);
  10200. /** @type {SpeedyDrawableTexture} encoded keypoints */
  10201. this._encodedKeypoints = null;
  10202. /** @type {number} descriptor size in bytes */
  10203. this._descriptorSize = 0;
  10204. /** @type {number} extra size in bytes */
  10205. this._extraSize = 0;
  10206. /** @type {number} encoder length */
  10207. this._encoderLength = 1;
  10208. }
  10209. /**
  10210. * Set parameters
  10211. * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
  10212. * @param {number} descriptorSize in bytes
  10213. * @param {number} extraSize in bytes
  10214. * @param {number} encoderLength positive integer
  10215. * @returns {SpeedyPipelineMessage} this message
  10216. */
  10217. set(encodedKeypoints, descriptorSize, extraSize, encoderLength)
  10218. {
  10219. // set parameters
  10220. this._encodedKeypoints = encodedKeypoints;
  10221. this._descriptorSize = descriptorSize | 0;
  10222. this._extraSize = extraSize | 0;
  10223. this._encoderLength = encoderLength | 0;
  10224. // validate
  10225. utils/* Utils.assert */.c.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
  10226. utils/* Utils.assert */.c.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
  10227. utils/* Utils.assert */.c.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
  10228. // done!
  10229. return this;
  10230. }
  10231. /**
  10232. * Inspect this message for debugging purposes
  10233. * @param {SpeedyGPU} gpu
  10234. * @returns {SpeedyPipelineMessageDiagnosticData}
  10235. */
  10236. inspect(gpu)
  10237. {
  10238. return {
  10239. type: this.constructor.name,
  10240. descriptorSize: this.descriptorSize,
  10241. extraSize: this.extraSize,
  10242. encoderLength: this.encoderLength,
  10243. encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
  10244. encodedKeypoints: this.encodedKeypoints ? utils/* Utils.formatBinaryData */.c.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : '',
  10245. };
  10246. }
  10247. /**
  10248. * Encoded keypoints
  10249. * @returns {SpeedyDrawableTexture}
  10250. */
  10251. get encodedKeypoints()
  10252. {
  10253. return this._encodedKeypoints;
  10254. }
  10255. /**
  10256. * Descriptor size, in bytes
  10257. * @returns {number}
  10258. */
  10259. get descriptorSize()
  10260. {
  10261. return this._descriptorSize;
  10262. }
  10263. /**
  10264. * Extra size, in bytes
  10265. * @returns {number}
  10266. */
  10267. get extraSize()
  10268. {
  10269. return this._extraSize;
  10270. }
  10271. /**
  10272. * Encoder length
  10273. * @returns {number}
  10274. */
  10275. get encoderLength()
  10276. {
  10277. return this._encoderLength;
  10278. }
  10279. }
  10280. /*
  10281. * A message transporting a set of 2D vectors
  10282. */
  10283. class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage
  10284. {
  10285. /**
  10286. * Constructor
  10287. */
  10288. constructor()
  10289. {
  10290. super(SpeedyPipelineMessageType.Vector2);
  10291. /** @type {SpeedyDrawableTexture} the set of vectors */
  10292. this._vectors = null;
  10293. }
  10294. /**
  10295. * Set parameters
  10296. * @param {SpeedyDrawableTexture} vectors the set of vectors
  10297. * @returns {SpeedyPipelineMessage} this message
  10298. */
  10299. set(vectors)
  10300. {
  10301. // set parameters
  10302. this._vectors = vectors;
  10303. // done!
  10304. return this;
  10305. }
  10306. /**
  10307. * Inspect this message for debugging purposes
  10308. * @param {SpeedyGPU} gpu
  10309. * @returns {SpeedyPipelineMessageDiagnosticData}
  10310. */
  10311. inspect(gpu)
  10312. {
  10313. return {
  10314. type: this.constructor.name,
  10315. vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
  10316. vectors: this.vectors ? utils/* Utils.formatBinaryData */.c.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
  10317. };
  10318. }
  10319. /**
  10320. * The set of vectors
  10321. * @returns {SpeedyDrawableTexture}
  10322. */
  10323. get vectors()
  10324. {
  10325. return this._vectors;
  10326. }
  10327. }
  10328. /**
  10329. * A message transporting LSH tables
  10330. */
  10331. class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage
  10332. {
  10333. /**
  10334. * Constructor
  10335. */
  10336. constructor()
  10337. {
  10338. super(SpeedyPipelineMessageType.LSHTables);
  10339. /** @type {SpeedyLSH} LSH data structure */
  10340. this._lsh = null;
  10341. }
  10342. /**
  10343. * Set parameters
  10344. * @param {SpeedyLSH} lsh
  10345. * @returns {SpeedyPipelineMessage} this message
  10346. */
  10347. set(lsh)
  10348. {
  10349. // set parameters
  10350. this._lsh = lsh;
  10351. // done!
  10352. return this;
  10353. }
  10354. /**
  10355. * Inspect this message for debugging purposes
  10356. * @param {SpeedyGPU} gpu
  10357. * @returns {SpeedyPipelineMessageDiagnosticData}
  10358. */
  10359. inspect(gpu)
  10360. {
  10361. return {
  10362. type: this.constructor.name,
  10363. lsh: '<LSH tables>'
  10364. };
  10365. }
  10366. /**
  10367. * LSH data structure
  10368. * @returns {SpeedyLSH}
  10369. */
  10370. get lsh()
  10371. {
  10372. return this._lsh;
  10373. }
  10374. }
  10375. /*
  10376. * A message transporting a set of keypoint matches
  10377. */
  10378. class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage
  10379. {
  10380. /**
  10381. * Constructor
  10382. */
  10383. constructor()
  10384. {
  10385. super(SpeedyPipelineMessageType.KeypointMatches);
  10386. /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
  10387. this._encodedMatches = null;
  10388. /** @type {number} number of matches per keypoint */
  10389. this._matchesPerKeypoint = 1;
  10390. }
  10391. /**
  10392. * Set parameters
  10393. * @param {SpeedyDrawableTexture} encodedMatches
  10394. * @param {number} matchesPerKeypoint
  10395. * @returns {SpeedyPipelineMessage} this message
  10396. */
  10397. set(encodedMatches, matchesPerKeypoint)
  10398. {
  10399. // set parameters
  10400. this._encodedMatches = encodedMatches;
  10401. this._matchesPerKeypoint = matchesPerKeypoint | 0;
  10402. // validate
  10403. utils/* Utils.assert */.c.assert(this._matchesPerKeypoint > 0);
  10404. // done!
  10405. return this;
  10406. }
  10407. /**
  10408. * Inspect this message for debugging purposes
  10409. * @param {SpeedyGPU} gpu
  10410. * @returns {SpeedyPipelineMessageDiagnosticData}
  10411. */
  10412. inspect(gpu)
  10413. {
  10414. return {
  10415. type: this.constructor.name,
  10416. matchesPerKeypoint: this.matchesPerKeypoint,
  10417. encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
  10418. encodedMatches: this.encodedMatches ? utils/* Utils.formatBinaryData */.c.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
  10419. };
  10420. }
  10421. /**
  10422. * The matches
  10423. * @returns {SpeedyDrawableTexture}
  10424. */
  10425. get encodedMatches()
  10426. {
  10427. return this._encodedMatches;
  10428. }
  10429. /**
  10430. * Number of matches per keypoint
  10431. * @returns {number}
  10432. */
  10433. get matchesPerKeypoint()
  10434. {
  10435. return this._matchesPerKeypoint;
  10436. }
  10437. }
  10438. //
  10439. // Utilities
  10440. //
  10441. /** Map message type to message class */
  10442. const MESSAGE_CLASS = Object.freeze({
  10443. [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
  10444. [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
  10445. [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
  10446. [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
  10447. [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
  10448. [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches,
  10449. });
  10450. /**
  10451. * Create a message of the specified type
  10452. * @param {SpeedyPipelineMessageType} type
  10453. * @returns {SpeedyPipelineMessage}
  10454. */
  10455. function createMessage(type)
  10456. {
  10457. //return Reflect.construct(MESSAGE_CLASS[type], []);
  10458. return new MESSAGE_CLASS[
  10459. // error TS2538: Type 'Symbol' cannot be used as an index type.
  10460. // heck, what the hack...
  10461. /** @type {any} */ ( type )
  10462. ];
  10463. }
  10464. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
  10465. /*
  10466. * speedy-vision.js
  10467. * GPU-accelerated Computer Vision for JavaScript
  10468. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  10469. *
  10470. * Licensed under the Apache License, Version 2.0 (the "License");
  10471. * you may not use this file except in compliance with the License.
  10472. * You may obtain a copy of the License at
  10473. *
  10474. * http://www.apache.org/licenses/LICENSE-2.0
  10475. *
  10476. * Unless required by applicable law or agreed to in writing, software
  10477. * distributed under the License is distributed on an "AS IS" BASIS,
  10478. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10479. * See the License for the specific language governing permissions and
  10480. * limitations under the License.
  10481. *
  10482. * pipeline-portspec.js
  10483. * Specification (requirements) of a port of a node of a pipeline
  10484. */
  10485. /**
  10486. * A message constraint is a message validation predicate
  10487. * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
  10488. */
  10489. /**
  10490. * A validation predicate that validates all messages
  10491. * @type {SpeedyPipelineMessageConstraint}
  10492. */
  10493. const none = message => true;
  10494. /**
  10495. * Specification (requirements) of a port of a node of a pipeline
  10496. */
  10497. class SpeedyPipelinePortSpec
  10498. {
  10499. /**
  10500. * Constructor
  10501. * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
  10502. * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
  10503. */
  10504. constructor(expectedMessageType, messageConstraint = none)
  10505. {
  10506. /** @type {SpeedyPipelineMessageType} expected message type */
  10507. this._expectedMessageType = expectedMessageType;
  10508. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10509. this._isValidMessage = (typeof messageConstraint === 'function') ? messageConstraint : none;
  10510. // expect a valid type
  10511. utils/* Utils.assert */.c.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
  10512. }
  10513. /**
  10514. * Checks if two specs have the same expected type
  10515. * @param {SpeedyPipelinePortSpec} spec
  10516. * @returns {boolean}
  10517. */
  10518. isCompatibleWith(spec)
  10519. {
  10520. return this._expectedMessageType == spec._expectedMessageType;
  10521. }
  10522. /**
  10523. * Is the given message accepted by a port that abides by this specification?
  10524. * @param {SpeedyPipelineMessage} message
  10525. * @returns {boolean}
  10526. */
  10527. accepts(message)
  10528. {
  10529. return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
  10530. }
  10531. /**
  10532. * Convert to string
  10533. * @returns {string}
  10534. */
  10535. toString()
  10536. {
  10537. const type = Object.keys(SpeedyPipelineMessageType).find(
  10538. type => SpeedyPipelineMessageType[type] === this._expectedMessageType
  10539. );
  10540. return `Port expects ${type} satisfying ${this._isValidMessage}`;
  10541. }
  10542. /**
  10543. * Expected message type
  10544. * @returns {SpeedyPipelineMessageType}
  10545. */
  10546. get expectedMessageType()
  10547. {
  10548. return this._expectedMessageType;
  10549. }
  10550. }
  10551. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
  10552. /*
  10553. * speedy-vision.js
  10554. * GPU-accelerated Computer Vision for JavaScript
  10555. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  10556. *
  10557. * Licensed under the Apache License, Version 2.0 (the "License");
  10558. * you may not use this file except in compliance with the License.
  10559. * You may obtain a copy of the License at
  10560. *
  10561. * http://www.apache.org/licenses/LICENSE-2.0
  10562. *
  10563. * Unless required by applicable law or agreed to in writing, software
  10564. * distributed under the License is distributed on an "AS IS" BASIS,
  10565. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10566. * See the License for the specific language governing permissions and
  10567. * limitations under the License.
  10568. *
  10569. * pipeline-port.js
  10570. * Port of a node of a pipeline
  10571. */
  10572. // Constants
  10573. const DEFAULT_INPUT_PORT_NAME = 'in';
  10574. const DEFAULT_OUTPUT_PORT_NAME = 'out';
  10575. const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
  10576. const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
  10577. /**
  10578. * Diagnostic data
  10579. * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
  10580. */
  10581. /**
  10582. * Port of a node of a pipeline
  10583. * @abstract
  10584. */
  10585. class SpeedyPipelinePort
  10586. {
  10587. /**
  10588. * Constructor
  10589. * @param {string} name the name of this port
  10590. * @param {SpeedyPipelinePortSpec} spec port specification
  10591. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10592. */
  10593. constructor(name, spec, node)
  10594. {
  10595. /** @type {string} the name of this port */
  10596. this._name = String(name);
  10597. /** @type {SpeedyPipelinePortSpec} the specification of this port */
  10598. this._spec = spec;
  10599. /** @type {SpeedyPipelineNode} the node to which this port belongs */
  10600. this._node = node;
  10601. /** @type {SpeedyPipelineMessage} the message located in this port */
  10602. this._message = EMPTY_MESSAGE;
  10603. // check if we've got an acceptable port name
  10604. utils/* Utils.assert */.c.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
  10605. }
  10606. /**
  10607. * The name of this port
  10608. * @returns {string}
  10609. */
  10610. get name()
  10611. {
  10612. return this._name;
  10613. }
  10614. /**
  10615. * The node to which this port belongs
  10616. * @returns {SpeedyPipelineNode}
  10617. */
  10618. get node()
  10619. {
  10620. return this._node;
  10621. }
  10622. /**
  10623. * Connect this port to another
  10624. * @abstract
  10625. * @param {SpeedyPipelinePort} port
  10626. */
  10627. connectTo(port)
  10628. {
  10629. throw new utils_errors/* AbstractMethodError */.Mi();
  10630. }
  10631. /**
  10632. * Is this an input port?
  10633. * @abstract
  10634. * @returns {boolean}
  10635. */
  10636. isInputPort()
  10637. {
  10638. throw new utils_errors/* AbstractMethodError */.Mi();
  10639. }
  10640. /**
  10641. * Is this an output port?
  10642. * @returns {boolean}
  10643. */
  10644. isOutputPort()
  10645. {
  10646. return !this.isInputPort();
  10647. }
  10648. /**
  10649. * Clear the message stored in this port
  10650. */
  10651. clearMessage()
  10652. {
  10653. this._message = EMPTY_MESSAGE;
  10654. }
  10655. /**
  10656. * Is there a valid message located in this port?
  10657. * @returns {boolean}
  10658. */
  10659. hasMessage()
  10660. {
  10661. return !this._message.isEmpty();
  10662. }
  10663. /**
  10664. * Read the message that is in this port
  10665. * @returns {SpeedyPipelineMessage}
  10666. */
  10667. read()
  10668. {
  10669. if(this._message.isEmpty())
  10670. throw new utils_errors/* IllegalOperationError */.js(`Can't read from port ${this.name}: nothing to read`);
  10671. return this._message;
  10672. }
  10673. /**
  10674. * Write a message to this port
  10675. * @param {SpeedyPipelineMessage} message
  10676. */
  10677. write(message)
  10678. {
  10679. throw new utils_errors/* NotSupportedError */.B8(`Can't write ${message} to port ${this.name}: unsupported operation`);
  10680. }
  10681. /**
  10682. * Inspect this port for debugging purposes
  10683. * @param {SpeedyGPU} gpu
  10684. * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
  10685. */
  10686. inspect(gpu)
  10687. {
  10688. return this._message.inspect(gpu);
  10689. }
  10690. /**
  10691. * Default port name
  10692. * @abstract
  10693. * @returns {string}
  10694. */
  10695. static get DEFAULT_NAME()
  10696. {
  10697. throw new utils_errors/* AbstractMethodError */.Mi();
  10698. }
  10699. }
  10700. /**
  10701. * Output port
  10702. */
  10703. class SpeedyPipelineOutputPort extends SpeedyPipelinePort
  10704. {
  10705. /**
  10706. * Constructor
  10707. * @param {string} name the name of this port
  10708. * @param {SpeedyPipelinePortSpec} spec port specification
  10709. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10710. */
  10711. constructor(name, spec, node)
  10712. {
  10713. super(name, spec, node);
  10714. /** @type {SpeedyPipelineMessage} cached message */
  10715. this._cachedMessage = null;
  10716. }
  10717. /**
  10718. * Connect this port to another
  10719. * @param {SpeedyPipelineInputPort} port
  10720. */
  10721. connectTo(port)
  10722. {
  10723. if(!port.isInputPort())
  10724. throw new utils_errors/* IllegalArgumentError */.mG(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
  10725. port.connectTo(this);
  10726. }
  10727. /**
  10728. * Is this an input port?
  10729. * @returns {boolean}
  10730. */
  10731. isInputPort()
  10732. {
  10733. return false;
  10734. }
  10735. /**
  10736. * Write a message to this port
  10737. * @param {SpeedyPipelineMessage} message
  10738. */
  10739. write(message)
  10740. {
  10741. if(!this._spec.accepts(message))
  10742. throw new utils_errors/* IllegalArgumentError */.mG(`Can't write ${message} to port ${this.name}. ${this._spec}`);
  10743. this._message = message;
  10744. }
  10745. /**
  10746. * Write a message to this port using a cached message object
  10747. * @param {...any} args to be passed to SpeedyPipelineMessage.set()
  10748. */
  10749. swrite(...args)
  10750. {
  10751. if(this._cachedMessage == null)
  10752. this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
  10753. this.write(this._cachedMessage.set(...args));
  10754. }
  10755. /**
  10756. * Default port name
  10757. * @returns {string}
  10758. */
  10759. static get DEFAULT_NAME()
  10760. {
  10761. return DEFAULT_OUTPUT_PORT_NAME;
  10762. }
  10763. }
  10764. /**
  10765. * Input port
  10766. */
  10767. class SpeedyPipelineInputPort extends SpeedyPipelinePort
  10768. {
  10769. /**
  10770. * Constructor
  10771. * @param {string} name the name of this port
  10772. * @param {SpeedyPipelinePortSpec} spec port specification
  10773. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10774. */
  10775. constructor(name, spec, node)
  10776. {
  10777. super(name, spec, node);
  10778. /** @type {SpeedyPipelineOutputPort|null} incoming link */
  10779. this._incomingLink = null;
  10780. }
  10781. /**
  10782. * Incoming link
  10783. * @returns {SpeedyPipelineOutputPort|null}
  10784. */
  10785. get incomingLink()
  10786. {
  10787. return this._incomingLink;
  10788. }
  10789. /**
  10790. * Connect this port to another
  10791. * @param {SpeedyPipelineOutputPort} port
  10792. */
  10793. connectTo(port)
  10794. {
  10795. if(!port.isOutputPort())
  10796. throw new utils_errors/* IllegalArgumentError */.mG(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);
  10797. else if(!this._spec.isCompatibleWith(port._spec))
  10798. throw new utils_errors/* IllegalArgumentError */.mG(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
  10799. this._incomingLink = port;
  10800. }
  10801. /**
  10802. * Unlink this port
  10803. */
  10804. disconnect()
  10805. {
  10806. this._incomingLink = null;
  10807. }
  10808. /**
  10809. * Is this an input port?
  10810. * @returns {boolean}
  10811. */
  10812. isInputPort()
  10813. {
  10814. return true;
  10815. }
  10816. /**
  10817. * Receive a message using the incoming link
  10818. * @param {string} [nodeName]
  10819. * @returns {SpeedyPipelineMessage}
  10820. */
  10821. pullMessage(nodeName = '')
  10822. {
  10823. const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
  10824. if(this._incomingLink == null)
  10825. throw new utils_errors/* IllegalOperationError */.js(`No incoming link for input port ${name}`);
  10826. const message = this._incomingLink.read();
  10827. if(!this._spec.accepts(message))
  10828. throw new utils_errors/* IllegalArgumentError */.mG(`Can't receive ${message} at port ${name}: ${this._spec}`);
  10829. return (this._message = message);
  10830. }
  10831. /**
  10832. * Default port name
  10833. * @returns {string}
  10834. */
  10835. static get DEFAULT_NAME()
  10836. {
  10837. return DEFAULT_INPUT_PORT_NAME;
  10838. }
  10839. }
  10840. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
  10841. /*
  10842. * speedy-vision.js
  10843. * GPU-accelerated Computer Vision for JavaScript
  10844. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  10845. *
  10846. * Licensed under the Apache License, Version 2.0 (the "License");
  10847. * you may not use this file except in compliance with the License.
  10848. * You may obtain a copy of the License at
  10849. *
  10850. * http://www.apache.org/licenses/LICENSE-2.0
  10851. *
  10852. * Unless required by applicable law or agreed to in writing, software
  10853. * distributed under the License is distributed on an "AS IS" BASIS,
  10854. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10855. * See the License for the specific language governing permissions and
  10856. * limitations under the License.
  10857. *
  10858. * pipeline-portbuilder.js
  10859. * Builder of a port of a node of a pipeline
  10860. */
  10861. /**
  10862. * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
  10863. */
  10864. /**
  10865. * Builder of a port of a node of a pipeline
  10866. */
  10867. class SpeedyPipelinePortBuilder
  10868. {
  10869. /**
  10870. * Constructor
  10871. * @param {typeof SpeedyPipelinePort} portClass input or output?
  10872. * @param {string} portName
  10873. */
  10874. constructor(portClass, portName)
  10875. {
  10876. /** @type {typeof SpeedyPipelinePort} input or output? */
  10877. this._class = portClass;
  10878. /** @type {string} port name */
  10879. this._name = String(portName);
  10880. /** @type {SpeedyPipelineMessageType} accepted message type */
  10881. this._type = SpeedyPipelineMessageType.Nothing;
  10882. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10883. this._messageConstraint = undefined;
  10884. }
  10885. /**
  10886. * Declare that the new port expects a certain type of message
  10887. * @param {SpeedyPipelineMessageType} type expected type
  10888. * @returns {SpeedyPipelinePortBuilder} this builder
  10889. */
  10890. expects(type)
  10891. {
  10892. utils/* Utils.assert */.c.assert(this._type == SpeedyPipelineMessageType.Nothing);
  10893. utils/* Utils.assert */.c.assert(type != SpeedyPipelineMessageType.Nothing);
  10894. this._type = type;
  10895. return this;
  10896. }
  10897. /**
  10898. * Declare that the new port expects messages satisfying a constraint
  10899. * @param {SpeedyPipelineMessageConstraint} constraint
  10900. * @returns {SpeedyPipelinePortBuilder} this builder
  10901. */
  10902. satisfying(constraint)
  10903. {
  10904. utils/* Utils.assert */.c.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
  10905. utils/* Utils.assert */.c.assert(this._messageConstraint === undefined);
  10906. utils/* Utils.assert */.c.assert(typeof constraint === 'function');
  10907. this._messageConstraint = constraint;
  10908. return this;
  10909. }
  10910. /**
  10911. * Build a port
  10912. * @param {SpeedyPipelineNode} node the node to which the new port will belong
  10913. * @returns {SpeedyPipelinePort}
  10914. */
  10915. build(node)
  10916. {
  10917. const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
  10918. return Reflect.construct(this._class, [this._name, spec, node]);
  10919. }
  10920. }
  10921. /**
  10922. * Creates a builder for an input port
  10923. * @param {string} [portName]
  10924. * @returns {SpeedyPipelinePortBuilder}
  10925. */
  10926. function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME)
  10927. {
  10928. return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
  10929. }
  10930. /**
  10931. * Creates a builder for an output port
  10932. * @param {string} [portName]
  10933. * @returns {SpeedyPipelinePortBuilder}
  10934. */
  10935. function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME)
  10936. {
  10937. return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
  10938. }
  10939. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
  10940. /*
  10941. * speedy-vision.js
  10942. * GPU-accelerated Computer Vision for JavaScript
  10943. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  10944. *
  10945. * Licensed under the Apache License, Version 2.0 (the "License");
  10946. * you may not use this file except in compliance with the License.
  10947. * You may obtain a copy of the License at
  10948. *
  10949. * http://www.apache.org/licenses/LICENSE-2.0
  10950. *
  10951. * Unless required by applicable law or agreed to in writing, software
  10952. * distributed under the License is distributed on an "AS IS" BASIS,
  10953. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10954. * See the License for the specific language governing permissions and
  10955. * limitations under the License.
  10956. *
  10957. * pipeline-node.js
  10958. * Node of a pipeline
  10959. */
  10960. /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
  10961. /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
  10962. /** Generate a random name for a node */
  10963. const generateRandomName = () => Math.random().toString(16).substr(2);
  10964. /** Create an empty input port dictionary */
  10965. const createInputPortDictionary = () => /** @type {InputPortDictionary} */ ( Object.create(null) );
  10966. /** Create an empty output port dictionary */
  10967. const createOutputPortDictionary = () => /** @type {OutputPortDictionary} */ ( Object.create(null) );
  10968. /**
  10969. * Map an array of input ports to an InputPortDictionary whose keys are their names
  10970. * @param {SpeedyPipelineInputPort[]} ports
  10971. * @returns {InputPortDictionary}
  10972. */
  10973. function InputPortDictionary(ports)
  10974. {
  10975. return ports.reduce((dict, port) => ((dict[port.name] = port), dict), createInputPortDictionary());
  10976. }
  10977. /**
  10978. * Map an array of output ports to an OutputPortDictionary whose keys are their names
  10979. * @param {SpeedyPipelineOutputPort[]} ports
  10980. * @returns {OutputPortDictionary}
  10981. */
  10982. function OutputPortDictionary(ports)
  10983. {
  10984. return ports.reduce((dict, port) => ((dict[port.name] = port), dict), createOutputPortDictionary());
  10985. }
  10986. /** A flag used for debugging purposes */
  10987. let _texView = false;
  10988. /**
  10989. * Node of a pipeline
  10990. * @abstract
  10991. */
  10992. class SpeedyPipelineNode
  10993. {
  10994. /**
  10995. * Constructor
  10996. * @param {string} [name] the name of this node
  10997. * @param {number} [texCount] number of work textures
  10998. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10999. */
  11000. constructor(name = generateRandomName(), texCount = 0, portBuilders = [])
  11001. {
  11002. /** @type {string} the name of this node */
  11003. this._name = String(name);
  11004. /** @type {SpeedyDrawableTexture[]} work texture(s) */
  11005. this._tex = (new Array(texCount)).fill(null);
  11006. // build the ports
  11007. const ports = portBuilders.map(builder => builder.build(this));
  11008. const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ ( ports.filter(port => port.isInputPort()) );
  11009. const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ ( ports.filter(port => port.isOutputPort()) );
  11010. /** @type {InputPortDictionary} input ports */
  11011. this._inputPorts = InputPortDictionary(inputPorts);
  11012. /** @type {OutputPortDictionary} output ports */
  11013. this._outputPorts = OutputPortDictionary(outputPorts);
  11014. // validate
  11015. if(this._name.length == 0)
  11016. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid name "${this._name}" for node ${this.fullName}`);
  11017. else if(portBuilders.length == 0)
  11018. throw new utils_errors/* IllegalArgumentError */.mG(`No ports have been found in node ${this.fullName}`);
  11019. }
  11020. /**
  11021. * The name of this node
  11022. * @returns {string}
  11023. */
  11024. get name()
  11025. {
  11026. return this._name;
  11027. }
  11028. /**
  11029. * Name and type of this node
  11030. * @returns {string}
  11031. */
  11032. get fullName()
  11033. {
  11034. return `${this.constructor.name}[${this.name}]`;
  11035. }
  11036. /**
  11037. * Find input port by name
  11038. * @param {string} [portName]
  11039. * @returns {SpeedyPipelineInputPort}
  11040. */
  11041. input(portName = SpeedyPipelineInputPort.DEFAULT_NAME)
  11042. {
  11043. if(portName in this._inputPorts)
  11044. return this._inputPorts[portName];
  11045. throw new utils_errors/* IllegalArgumentError */.mG(`Can't find input port ${portName} in node ${this.fullName}`);
  11046. }
  11047. /**
  11048. * Find output port by name
  11049. * @param {string} [portName]
  11050. * @returns {SpeedyPipelineOutputPort}
  11051. */
  11052. output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME)
  11053. {
  11054. if(portName in this._outputPorts)
  11055. return this._outputPorts[portName];
  11056. throw new utils_errors/* IllegalArgumentError */.mG(`Can't find output port ${portName} in node ${this.fullName}`);
  11057. }
  11058. /**
  11059. * Get data from the input ports and execute
  11060. * the task that this node is supposed to!
  11061. * @param {SpeedyGPU} gpu
  11062. * @returns {void|SpeedyPromise<void>}
  11063. */
  11064. execute(gpu)
  11065. {
  11066. let portName;
  11067. // clear output ports
  11068. for(portName in this._outputPorts)
  11069. this._outputPorts[portName].clearMessage();
  11070. // let the input ports receive what is due
  11071. for(portName in this._inputPorts)
  11072. this._inputPorts[portName].pullMessage(this.fullName);
  11073. // run the task
  11074. const runTask = this._run(gpu);
  11075. if(typeof runTask === 'undefined')
  11076. return void(this._finishExecution(gpu));
  11077. else
  11078. return runTask.then(() => this._finishExecution(gpu));
  11079. }
  11080. /**
  11081. * Finish the execution of this node;
  11082. * to be called after execute()
  11083. * @param {SpeedyGPU} gpu
  11084. */
  11085. _finishExecution(gpu)
  11086. {
  11087. // ensure that no output ports are empty
  11088. for(const portName in this._outputPorts) {
  11089. utils/* Utils.assert */.c.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
  11090. }
  11091. // diagnosticize the node / pipeline
  11092. if(settings/* Settings.logging */.Z.logging === 'diagnostic') {
  11093. utils/* Utils.log */.c.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
  11094. // Inspecting the data has performance implications.
  11095. // It is for diagnostic purposes only, not meant to be done in production!
  11096. for(const portName in this._inputPorts)
  11097. utils/* Utils.log */.c.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
  11098. for(const portName in this._outputPorts)
  11099. utils/* Utils.log */.c.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
  11100. }
  11101. }
  11102. /**
  11103. * Run the specific task of this node
  11104. * @abstract
  11105. * @param {SpeedyGPU} gpu
  11106. * @returns {void|SpeedyPromise<void>}
  11107. */
  11108. _run(gpu)
  11109. {
  11110. throw new utils_errors/* AbstractMethodError */.Mi();
  11111. }
  11112. /**
  11113. * Initializes this node
  11114. * @param {SpeedyGPU} gpu
  11115. */
  11116. init(gpu)
  11117. {
  11118. gpu.subscribe(this._allocateWorkTextures, this, gpu);
  11119. this._allocateWorkTextures(gpu);
  11120. }
  11121. /**
  11122. * Releases this node
  11123. * @param {SpeedyGPU} gpu
  11124. */
  11125. release(gpu)
  11126. {
  11127. this._deallocateWorkTextures(gpu);
  11128. gpu.unsubscribe(this._allocateWorkTextures, this);
  11129. }
  11130. /**
  11131. * Clear all ports
  11132. */
  11133. clearPorts()
  11134. {
  11135. let portName;
  11136. for(portName in this._inputPorts)
  11137. this._inputPorts[portName].clearMessage();
  11138. for(portName in this._outputPorts)
  11139. this._outputPorts[portName].clearMessage();
  11140. }
  11141. /**
  11142. * Find all nodes that feed input to this node
  11143. * @returns {SpeedyPipelineNode[]}
  11144. */
  11145. inputNodes()
  11146. {
  11147. const nodes = [];
  11148. for(const portName in this._inputPorts) {
  11149. const port = this._inputPorts[portName];
  11150. if(port.incomingLink != null)
  11151. nodes.push(port.incomingLink.node);
  11152. }
  11153. return nodes;
  11154. }
  11155. /**
  11156. * Is this a source of the pipeline?
  11157. * @returns {boolean}
  11158. */
  11159. isSource()
  11160. {
  11161. return false;
  11162. }
  11163. /**
  11164. * Is this a sink of the pipeline?
  11165. * @returns {boolean}
  11166. */
  11167. isSink()
  11168. {
  11169. return false;
  11170. // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
  11171. //return Object.keys(this._outputPorts).length == 0;
  11172. }
  11173. /**
  11174. * Allocate work texture(s)
  11175. * @param {SpeedyGPU} gpu
  11176. */
  11177. _allocateWorkTextures(gpu)
  11178. {
  11179. for(let j = 0; j < this._tex.length; j++)
  11180. this._tex[j] = gpu.texturePool.allocate();
  11181. }
  11182. /**
  11183. * Deallocate work texture(s)
  11184. * @param {SpeedyGPU} gpu
  11185. */
  11186. _deallocateWorkTextures(gpu)
  11187. {
  11188. for(let j = this._tex.length - 1; j >= 0; j--)
  11189. this._tex[j] = gpu.texturePool.free(this._tex[j]);
  11190. }
  11191. /**
  11192. * Visually inspect a texture for debugging purposes
  11193. * @param {SpeedyGPU} gpu
  11194. * @param {SpeedyDrawableTexture} texture
  11195. */
  11196. _visualize(gpu, texture)
  11197. {
  11198. const canvas = gpu.renderToCanvas(texture);
  11199. if(!_texView) {
  11200. document.body.appendChild(canvas);
  11201. _texView = true;
  11202. }
  11203. }
  11204. }
  11205. /**
  11206. * Source node (a node with no input ports)
  11207. * @abstract
  11208. */
  11209. class SpeedyPipelineSourceNode extends SpeedyPipelineNode
  11210. {
  11211. /**
  11212. * Constructor
  11213. * @param {string} [name] the name of this node
  11214. * @param {number} [texCount] number of work textures
  11215. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  11216. */
  11217. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  11218. {
  11219. super(name, texCount, portBuilders);
  11220. utils/* Utils.assert */.c.assert(Object.keys(this._inputPorts).length == 0);
  11221. }
  11222. /**
  11223. * Is this a source of the pipeline?
  11224. * @returns {boolean}
  11225. */
  11226. isSource()
  11227. {
  11228. return true;
  11229. }
  11230. }
  11231. /**
  11232. * Sink node (a node with no output ports)
  11233. * @abstract
  11234. */
  11235. class SpeedyPipelineSinkNode extends SpeedyPipelineNode
  11236. {
  11237. /**
  11238. * Constructor
  11239. * @param {string} [name] the name of this node
  11240. * @param {number} [texCount] number of work textures
  11241. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  11242. */
  11243. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  11244. {
  11245. super(name, texCount, portBuilders);
  11246. utils/* Utils.assert */.c.assert(Object.keys(this._outputPorts).length == 0);
  11247. }
  11248. /**
  11249. * Export data from this node to the user
  11250. * @abstract
  11251. * @returns {SpeedyPromise<any>}
  11252. */
  11253. export()
  11254. {
  11255. throw new utils_errors/* AbstractMethodError */.Mi();
  11256. }
  11257. /**
  11258. * Is this a sink of the pipeline?
  11259. * @returns {boolean}
  11260. */
  11261. isSink()
  11262. {
  11263. return true;
  11264. }
  11265. }
  11266. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
  11267. /*
  11268. * speedy-vision.js
  11269. * GPU-accelerated Computer Vision for JavaScript
  11270. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11271. *
  11272. * Licensed under the Apache License, Version 2.0 (the "License");
  11273. * you may not use this file except in compliance with the License.
  11274. * You may obtain a copy of the License at
  11275. *
  11276. * http://www.apache.org/licenses/LICENSE-2.0
  11277. *
  11278. * Unless required by applicable law or agreed to in writing, software
  11279. * distributed under the License is distributed on an "AS IS" BASIS,
  11280. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11281. * See the License for the specific language governing permissions and
  11282. * limitations under the License.
  11283. *
  11284. * speedy-match.js
  11285. * A match between two keypoint descriptors
  11286. */
  11287. // Constants
  11288. const MATCH_NOT_FOUND = -1;
  11289. /**
  11290. * A match between two keypoint descriptors
  11291. */
  11292. class SpeedyKeypointMatch
  11293. {
  11294. /**
  11295. * Constructor
  11296. * @param {number} index index of the stored keypoint, a non-negative integer
  11297. * @param {number} distance a measure of the quality of the match, a non-negative number
  11298. */
  11299. constructor(index, distance)
  11300. {
  11301. const isValid = distance < globals.MATCH_MAX_DISTANCE;
  11302. /** @type {number} index of the stored keypoint */
  11303. this._index = isValid ? (index | 0) : MATCH_NOT_FOUND;
  11304. /** @type {number} a measure of the quality of the match */
  11305. this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
  11306. // done!
  11307. return Object.freeze(this);
  11308. }
  11309. /**
  11310. * The index of the stored keypoint
  11311. * @returns {number}
  11312. */
  11313. get index()
  11314. {
  11315. return this._index;
  11316. }
  11317. /**
  11318. * A measure of the quality of the match (lower values indicate better matches)
  11319. * @returns {number}
  11320. */
  11321. get distance()
  11322. {
  11323. return this._distance;
  11324. }
  11325. /**
  11326. * A string representation of the keypoint match
  11327. * @returns {string}
  11328. */
  11329. toString()
  11330. {
  11331. return `SpeedyKeypointMatch(${this.index},${this.distance})`;
  11332. }
  11333. }
  11334. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
  11335. /*
  11336. * speedy-vision.js
  11337. * GPU-accelerated Computer Vision for JavaScript
  11338. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11339. *
  11340. * Licensed under the Apache License, Version 2.0 (the "License");
  11341. * you may not use this file except in compliance with the License.
  11342. * You may obtain a copy of the License at
  11343. *
  11344. * http://www.apache.org/licenses/LICENSE-2.0
  11345. *
  11346. * Unless required by applicable law or agreed to in writing, software
  11347. * distributed under the License is distributed on an "AS IS" BASIS,
  11348. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11349. * See the License for the specific language governing permissions and
  11350. * limitations under the License.
  11351. *
  11352. * speedy-keypoint.js
  11353. * Keypoint class
  11354. */
  11355. /**
  11356. * Represents a keypoint
  11357. */
  11358. class SpeedyKeypoint
  11359. {
  11360. /**
  11361. * Constructor
  11362. * @param {number} x X position
  11363. * @param {number} y Y position
  11364. * @param {number} [lod] Level-of-detail
  11365. * @param {number} [rotation] Rotation in radians
  11366. * @param {number} [score] Cornerness measure
  11367. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11368. */
  11369. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null)
  11370. {
  11371. /** @type {SpeedyPoint2} keypoint position */
  11372. this._position = new SpeedyPoint2(+x, +y);
  11373. /** @type {number} level of detail */
  11374. this._lod = +lod;
  11375. /** @type {number} rotation in radians */
  11376. this._rotation = +rotation;
  11377. /** @type {number} a cornerness measure */
  11378. this._score = +score;
  11379. /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
  11380. this._descriptor = descriptor;
  11381. }
  11382. /**
  11383. * Converts this keypoint to a descriptive string
  11384. * @returns {string}
  11385. */
  11386. toString()
  11387. {
  11388. return `SpeedyKeypoint(${this.x},${this.y})`;
  11389. }
  11390. /**
  11391. * The position of this keypoint
  11392. * @returns {SpeedyPoint2}
  11393. */
  11394. get position()
  11395. {
  11396. return this._position;
  11397. }
  11398. /**
  11399. * The x-position of this keypoint
  11400. * @returns {number}
  11401. */
  11402. get x()
  11403. {
  11404. return this._position.x;
  11405. }
  11406. /**
  11407. * The x-position of this keypoint
  11408. * @param {number} value
  11409. */
  11410. set x(value)
  11411. {
  11412. this._position.x = +value;
  11413. }
  11414. /**
  11415. * The y-position of this keypoint
  11416. * @returns {number}
  11417. */
  11418. get y()
  11419. {
  11420. return this._position.y;
  11421. }
  11422. /**
  11423. * The y-position of this keypoint
  11424. * @param {number} value
  11425. */
  11426. set y(value)
  11427. {
  11428. this._position.y = +value;
  11429. }
  11430. /**
  11431. * The pyramid level-of-detail from which this keypoint was extracted
  11432. * @returns {number}
  11433. */
  11434. get lod()
  11435. {
  11436. return this._lod;
  11437. }
  11438. /**
  11439. * Scale: 2^lod
  11440. * @returns {number}
  11441. */
  11442. get scale()
  11443. {
  11444. return Math.pow(2, this._lod);
  11445. }
  11446. /**
  11447. * The orientation of the keypoint, in radians
  11448. * @returns {number} Angle in radians
  11449. */
  11450. get rotation()
  11451. {
  11452. return this._rotation;
  11453. }
  11454. /**
  11455. * Score: a cornerness measure
  11456. * @returns {number} Score
  11457. */
  11458. get score()
  11459. {
  11460. return this._score;
  11461. }
  11462. /**
  11463. * Keypoint descriptor
  11464. * @return {SpeedyKeypointDescriptor|null}
  11465. */
  11466. get descriptor()
  11467. {
  11468. return this._descriptor;
  11469. }
  11470. }
  11471. /**
  11472. * Represents a tracked keypoint
  11473. */
  11474. class SpeedyTrackedKeypoint extends SpeedyKeypoint
  11475. {
  11476. /**
  11477. * Constructor
  11478. * @param {number} x X position
  11479. * @param {number} y Y position
  11480. * @param {number} [lod] Level-of-detail
  11481. * @param {number} [rotation] Rotation in radians
  11482. * @param {number} [score] Cornerness measure
  11483. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11484. * @param {SpeedyVector2} [flow] flow vector
  11485. */
  11486. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0,0))
  11487. {
  11488. super(x, y, lod, rotation, score, descriptor);
  11489. /** @type {SpeedyVector2} flow vector */
  11490. this._flow = flow;
  11491. }
  11492. /**
  11493. * Flow vector
  11494. * @returns {SpeedyVector2}
  11495. */
  11496. get flow()
  11497. {
  11498. return this._flow;
  11499. }
  11500. }
  11501. /**
  11502. * Represents a matched keypoint
  11503. */
  11504. class SpeedyMatchedKeypoint extends SpeedyKeypoint
  11505. {
  11506. /**
  11507. * Constructor
  11508. * @param {number} x X position
  11509. * @param {number} y Y position
  11510. * @param {number} [lod] Level-of-detail
  11511. * @param {number} [rotation] Rotation in radians
  11512. * @param {number} [score] Cornerness measure
  11513. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  11514. * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
  11515. */
  11516. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = [])
  11517. {
  11518. super(x, y, lod, rotation, score, descriptor);
  11519. /** @type {SpeedyKeypointMatch[]} keypoint matches */
  11520. this._matches = matches;
  11521. }
  11522. /**
  11523. * Keypoint matches
  11524. * @returns {SpeedyKeypointMatch[]}
  11525. */
  11526. get matches()
  11527. {
  11528. return this._matches;
  11529. }
  11530. }
  11531. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
  11532. /*
  11533. * speedy-vision.js
  11534. * GPU-accelerated Computer Vision for JavaScript
  11535. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11536. *
  11537. * Licensed under the Apache License, Version 2.0 (the "License");
  11538. * you may not use this file except in compliance with the License.
  11539. * You may obtain a copy of the License at
  11540. *
  11541. * http://www.apache.org/licenses/LICENSE-2.0
  11542. *
  11543. * Unless required by applicable law or agreed to in writing, software
  11544. * distributed under the License is distributed on an "AS IS" BASIS,
  11545. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11546. * See the License for the specific language governing permissions and
  11547. * limitations under the License.
  11548. *
  11549. * pipeline.js
  11550. * A pipeline is a network of nodes in which data flows to a sink
  11551. */
  11552. /**
  11553. * A dictionary indexed by the names of the sink nodes
  11554. * @typedef {Object<string,any>} SpeedyPipelineOutput
  11555. */
  11556. /** @type {SpeedyGPU} shared GPU programs & textures */
  11557. let gpu = null;
  11558. /** @type {number} gpu reference count */
  11559. let referenceCount = 0;
  11560. /**
  11561. * A pipeline is a network of nodes in which data flows to a sink
  11562. */
  11563. class SpeedyPipeline
  11564. {
  11565. /**
  11566. * Constructor
  11567. */
  11568. constructor()
  11569. {
  11570. /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
  11571. this._nodes = [];
  11572. /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
  11573. this._sequence = [];
  11574. /** @type {boolean} are we running the pipeline at this moment? */
  11575. this._busy = false;
  11576. }
  11577. /**
  11578. * Find a node by its name
  11579. * @template T extends SpeedyPipelineNode
  11580. * @param {string} name
  11581. * @returns {T|null}
  11582. */
  11583. node(name)
  11584. {
  11585. for(let i = 0, n = this._nodes.length; i < n; i++) {
  11586. if(this._nodes[i].name === name)
  11587. return this._nodes[i];
  11588. }
  11589. return null;
  11590. }
  11591. /**
  11592. * Initialize the pipeline
  11593. * @param {...SpeedyPipelineNode} nodes
  11594. * @returns {SpeedyPipeline} this pipeline
  11595. */
  11596. init(...nodes)
  11597. {
  11598. // validate
  11599. if(this._nodes.length > 0)
  11600. throw new utils_errors/* IllegalOperationError */.js(`The pipeline has already been initialized`);
  11601. else if(nodes.length == 0)
  11602. throw new utils_errors/* IllegalArgumentError */.mG(`Can't initialize the pipeline. Please specify its nodes`);
  11603. // create a GPU instance and increase the reference count
  11604. if(0 == referenceCount++) {
  11605. utils/* Utils.assert */.c.assert(!gpu, 'Duplicate SpeedyGPU instance');
  11606. gpu = new SpeedyGPU();
  11607. }
  11608. // add nodes to the network
  11609. for(let i = 0; i < nodes.length; i++) {
  11610. const node = nodes[i];
  11611. if(!this._nodes.includes(node))
  11612. this._nodes.push(node);
  11613. }
  11614. // generate the sequence of nodes
  11615. this._sequence = SpeedyPipeline._tsort(this._nodes);
  11616. SpeedyPipeline._validateSequence(this._sequence);
  11617. // initialize nodes
  11618. for(let i = 0; i < this._sequence.length; i++)
  11619. this._sequence[i].init(gpu);
  11620. // done!
  11621. return this;
  11622. }
  11623. /**
  11624. * Release the resources associated with this pipeline
  11625. * @returns {null}
  11626. */
  11627. release()
  11628. {
  11629. if(this._nodes.length == 0)
  11630. throw new utils_errors/* IllegalOperationError */.js(`The pipeline has already been released or has never been initialized`);
  11631. // release nodes
  11632. for(let i = this._sequence.length - 1; i >= 0; i--)
  11633. this._sequence[i].release(gpu);
  11634. this._sequence.length = 0;
  11635. this._nodes.length = 0;
  11636. // decrease reference count and release GPU if necessary
  11637. if(0 == --referenceCount)
  11638. gpu = gpu.release();
  11639. // done!
  11640. return null;
  11641. }
  11642. /**
  11643. * Run the pipeline
  11644. * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
  11645. */
  11646. run()
  11647. {
  11648. utils/* Utils.assert */.c.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
  11649. // is the pipeline busy?
  11650. if(this._busy) {
  11651. // if so, we need to wait 'til it finishes
  11652. return new speedy_promise/* SpeedyPromise */.s((resolve, reject) => {
  11653. setTimeout(() => this.run().then(resolve, reject), 0);
  11654. });
  11655. }
  11656. else {
  11657. // the pipeline is now busy and won't accept concurrent tasks
  11658. // (we allocate textures using a single pool)
  11659. this._busy = true;
  11660. }
  11661. // find the sinks
  11662. const sinks = /** @type {SpeedyPipelineSinkNode[]} */ ( this._sequence.filter(node => node.isSink()) );
  11663. // create output template
  11664. const template = SpeedyPipeline._createOutputTemplate(sinks);
  11665. // diagnostic log
  11666. if(settings/* Settings.logging */.Z.logging === 'diagnostic')
  11667. utils/* Utils.log */.c.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
  11668. // run the pipeline
  11669. return SpeedyPipeline._runSequence(this._sequence).then(() =>
  11670. // export results
  11671. speedy_promise/* SpeedyPromise.all */.s.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
  11672. // aggregate results by the names of the sinks
  11673. results.reduce((obj, val, idx) => ((obj[sinks[idx].name] = val), obj), template)
  11674. )
  11675. ).finally(() => {
  11676. // clear all ports
  11677. for(let i = this._sequence.length - 1; i >= 0; i--)
  11678. this._sequence[i].clearPorts();
  11679. // the pipeline is no longer busy
  11680. this._busy = false;
  11681. // diagnostic log
  11682. if(settings/* Settings.logging */.Z.logging === 'diagnostic') {
  11683. utils/* Utils.log */.c.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
  11684. Object.keys(template).forEach(entry => {
  11685. utils/* Utils.log */.c.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
  11686. });
  11687. }
  11688. }).turbocharge();
  11689. }
  11690. /**
  11691. * @internal
  11692. *
  11693. * GPU instance
  11694. * @returns {SpeedyGPU}
  11695. */
  11696. get _gpu()
  11697. {
  11698. return gpu;
  11699. }
  11700. /**
  11701. * Execute the tasks of a sequence of nodes
  11702. * @param {SpeedyPipelineNode[]} sequence sequence of nodes
  11703. * @param {number} [i] in [0,n)
  11704. * @param {number} [n] number of nodes
  11705. * @returns {SpeedyPromise<void>}
  11706. */
  11707. static _runSequence(sequence, i = 0, n = sequence.length)
  11708. {
  11709. for(; i < n; i++) {
  11710. const runTask = sequence[i].execute(gpu);
  11711. // this call greatly improves performance when downloading pixel data using PBOs
  11712. gpu.gl.flush();
  11713. if(typeof runTask !== 'undefined')
  11714. return runTask.then(() => SpeedyPipeline._runSequence(sequence, i+1, n));
  11715. }
  11716. return speedy_promise/* SpeedyPromise.resolve */.s.resolve();
  11717. }
  11718. /**
  11719. * Topological sorting
  11720. * @param {SpeedyPipelineNode[]} nodes
  11721. * @returns {SpeedyPipelineNode[]}
  11722. */
  11723. static _tsort(nodes)
  11724. {
  11725. /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
  11726. const outlinks = SpeedyPipeline._outlinks(nodes);
  11727. const stack = nodes.map(node => /** @type {StackNode} */ ([ node, false ]) );
  11728. const trash = new Set();
  11729. const sorted = new Array(nodes.length);
  11730. let j = sorted.length;
  11731. while(stack.length > 0) {
  11732. const [ node, done ] = stack.pop();
  11733. if(!done) {
  11734. if(!trash.has(node)) {
  11735. const outnodes = outlinks.get(node);
  11736. trash.add(node);
  11737. stack.push([ node, true ]);
  11738. stack.push(...(outnodes.map(node => /** @type {StackNode} */ ([ node, false ]) )));
  11739. if(outnodes.some(node => trash.has(node) && !sorted.includes(node)))
  11740. throw new utils_errors/* IllegalOperationError */.js(`Pipeline networks cannot have cycles!`);
  11741. }
  11742. }
  11743. else
  11744. sorted[--j] = node;
  11745. }
  11746. return sorted;
  11747. }
  11748. /**
  11749. * Figure out the outgoing links of all nodes
  11750. * @param {SpeedyPipelineNode[]} nodes
  11751. * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
  11752. */
  11753. static _outlinks(nodes)
  11754. {
  11755. const outlinks = new Map();
  11756. for(let k = 0; k < nodes.length; k++)
  11757. outlinks.set(nodes[k], []);
  11758. for(let i = 0; i < nodes.length; i++) {
  11759. const to = nodes[i];
  11760. const inputs = to.inputNodes();
  11761. for(let j = 0; j < inputs.length; j++) {
  11762. const from = inputs[j];
  11763. const links = outlinks.get(from);
  11764. if(!links)
  11765. throw new utils_errors/* IllegalOperationError */.js(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
  11766. if(!links.includes(to))
  11767. links.push(to);
  11768. }
  11769. }
  11770. return outlinks;
  11771. }
  11772. /**
  11773. * Generate the output template by aggregating the names of the sinks
  11774. * @param {SpeedyPipelineNode[]} [sinks]
  11775. * @returns {SpeedyPipelineOutput}
  11776. */
  11777. static _createOutputTemplate(sinks = [])
  11778. {
  11779. const template = Object.create(null);
  11780. for(let i = sinks.length - 1; i >= 0; i--)
  11781. template[sinks[i].name] = null;
  11782. return template;
  11783. }
  11784. /**
  11785. * Validate a sequence of nodes
  11786. * @param {SpeedyPipelineNode[]} sequence
  11787. */
  11788. static _validateSequence(sequence)
  11789. {
  11790. if(sequence.length == 0)
  11791. throw new utils_errors/* IllegalOperationError */.js(`Pipeline doesn't have nodes`);
  11792. else if(!sequence[0].isSource())
  11793. throw new utils_errors/* IllegalOperationError */.js(`Pipeline doesn't have a source`);
  11794. else if(!sequence.find(node => node.isSink()))
  11795. throw new utils_errors/* IllegalOperationError */.js(`Pipeline doesn't have a sink`);
  11796. }
  11797. }
  11798. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
  11799. /*
  11800. * speedy-vision.js
  11801. * GPU-accelerated Computer Vision for JavaScript
  11802. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11803. *
  11804. * Licensed under the Apache License, Version 2.0 (the "License");
  11805. * you may not use this file except in compliance with the License.
  11806. * You may obtain a copy of the License at
  11807. *
  11808. * http://www.apache.org/licenses/LICENSE-2.0
  11809. *
  11810. * Unless required by applicable law or agreed to in writing, software
  11811. * distributed under the License is distributed on an "AS IS" BASIS,
  11812. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11813. * See the License for the specific language governing permissions and
  11814. * limitations under the License.
  11815. *
  11816. * image-input.js
  11817. * Gets an image into a pipeline
  11818. */
  11819. // Constants
  11820. const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
  11821. /**
  11822. * Gets an image into a pipeline
  11823. */
  11824. class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode
  11825. {
  11826. /**
  11827. * Constructor
  11828. * @param {string} [name] name of the node
  11829. */
  11830. constructor(name = undefined)
  11831. {
  11832. super(name, UPLOAD_BUFFER_SIZE, [
  11833. OutputPort().expects(SpeedyPipelineMessageType.Image)
  11834. ]);
  11835. /** @type {SpeedyMedia|null} source media */
  11836. this._media = null;
  11837. /** @type {number} texture index */
  11838. this._textureIndex = 0;
  11839. }
  11840. /**
  11841. * Source media
  11842. * @returns {SpeedyMedia|null}
  11843. */
  11844. get media()
  11845. {
  11846. return this._media;
  11847. }
  11848. /**
  11849. * Source media
  11850. * @param {SpeedyMedia|null} media
  11851. */
  11852. set media(media)
  11853. {
  11854. if(media !== null && !(media instanceof SpeedyMedia))
  11855. throw new utils_errors/* IllegalArgumentError */.mG(`Not a SpeedyMedia: ${media}`);
  11856. this._media = media;
  11857. }
  11858. /**
  11859. * Run the specific task of this node
  11860. * @param {SpeedyGPU} gpu
  11861. * @returns {void|SpeedyPromise<void>}
  11862. */
  11863. _run(gpu)
  11864. {
  11865. if(this._media == null)
  11866. throw new utils_errors/* IllegalOperationError */.js(`Did you forget to set the media of ${this.fullName}?`);
  11867. // use round-robin to mitigate WebGL's implicit synchronization
  11868. // and maybe minimize texture upload times
  11869. this._textureIndex = (this._textureIndex + 1) % this._tex.length;
  11870. // upload texture
  11871. const outputTexture = this._tex[this._textureIndex];
  11872. gpu.upload(this._media._source, outputTexture);
  11873. this.output().swrite(outputTexture, this._media._format);
  11874. }
  11875. }
  11876. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
  11877. /*
  11878. * speedy-vision.js
  11879. * GPU-accelerated Computer Vision for JavaScript
  11880. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11881. *
  11882. * Licensed under the Apache License, Version 2.0 (the "License");
  11883. * you may not use this file except in compliance with the License.
  11884. * You may obtain a copy of the License at
  11885. *
  11886. * http://www.apache.org/licenses/LICENSE-2.0
  11887. *
  11888. * Unless required by applicable law or agreed to in writing, software
  11889. * distributed under the License is distributed on an "AS IS" BASIS,
  11890. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11891. * See the License for the specific language governing permissions and
  11892. * limitations under the License.
  11893. *
  11894. * image-output.js
  11895. * Gets an image out of a pipeline
  11896. */
  11897. /**
  11898. * Gets an image out of a pipeline
  11899. */
  11900. class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode
  11901. {
  11902. /**
  11903. * Constructor
  11904. * @param {string} [name] name of the node
  11905. */
  11906. constructor(name = 'image')
  11907. {
  11908. super(name, 0, [
  11909. InputPort().expects(SpeedyPipelineMessageType.Image)
  11910. ]);
  11911. /** @type {ImageBitmap} output bitmap */
  11912. this._bitmap = null;
  11913. /** @type {ImageFormat} output format */
  11914. this._format = types/* ImageFormat.RGBA */.D3.RGBA;
  11915. }
  11916. /**
  11917. * Export data from this node to the user
  11918. * @returns {SpeedyPromise<SpeedyMedia>}
  11919. */
  11920. export()
  11921. {
  11922. utils/* Utils.assert */.c.assert(this._bitmap != null);
  11923. return SpeedyMedia.load(this._bitmap, { format: this._format }, false);
  11924. }
  11925. /**
  11926. * Run the specific task of this node
  11927. * @param {SpeedyGPU} gpu
  11928. * @returns {void|SpeedyPromise<void>}
  11929. */
  11930. _run(gpu)
  11931. {
  11932. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  11933. return new speedy_promise/* SpeedyPromise */.s(resolve => {
  11934. const canvas = gpu.renderToCanvas(image);
  11935. createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
  11936. this._bitmap = bitmap;
  11937. this._format = format;
  11938. resolve();
  11939. });
  11940. });
  11941. }
  11942. }
  11943. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
  11944. /*
  11945. * speedy-vision.js
  11946. * GPU-accelerated Computer Vision for JavaScript
  11947. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  11948. *
  11949. * Licensed under the Apache License, Version 2.0 (the "License");
  11950. * you may not use this file except in compliance with the License.
  11951. * You may obtain a copy of the License at
  11952. *
  11953. * http://www.apache.org/licenses/LICENSE-2.0
  11954. *
  11955. * Unless required by applicable law or agreed to in writing, software
  11956. * distributed under the License is distributed on an "AS IS" BASIS,
  11957. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11958. * See the License for the specific language governing permissions and
  11959. * limitations under the License.
  11960. *
  11961. * multiplexer.js
  11962. * Image multiplexer
  11963. */
  11964. /** @type {string[]} the names of the input ports indexed by their number */
  11965. const INPUT_PORT = [ 'in0', 'in1' ];
  11966. /**
  11967. * Image multiplexer
  11968. */
  11969. class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode
  11970. {
  11971. /**
  11972. * Constructor
  11973. * @param {string} [name] name of the node
  11974. */
  11975. constructor(name = undefined)
  11976. {
  11977. super(name, 0, [
  11978. ...(INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image))),
  11979. OutputPort().expects(SpeedyPipelineMessageType.Image),
  11980. ]);
  11981. /** @type {number} which port should be linked to the output? */
  11982. this._port = 0;
  11983. }
  11984. /**
  11985. * The number of the port that should be linked to the output
  11986. * @returns {number}
  11987. */
  11988. get port()
  11989. {
  11990. return this._port;
  11991. }
  11992. /**
  11993. * The number of the port that should be linked to the output
  11994. * @param {number} port
  11995. */
  11996. set port(port)
  11997. {
  11998. if(port < 0 || port >= INPUT_PORT.length)
  11999. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid port: ${port}`);
  12000. this._port = port | 0;
  12001. }
  12002. /**
  12003. * Run the specific task of this node
  12004. * @param {SpeedyGPU} gpu
  12005. * @returns {void|SpeedyPromise<void>}
  12006. */
  12007. _run(gpu)
  12008. {
  12009. const message = this.input(INPUT_PORT[this._port]).read();
  12010. this.output().write(message);
  12011. }
  12012. }
  12013. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
  12014. /*
  12015. * speedy-vision.js
  12016. * GPU-accelerated Computer Vision for JavaScript
  12017. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12018. *
  12019. * Licensed under the Apache License, Version 2.0 (the "License");
  12020. * you may not use this file except in compliance with the License.
  12021. * You may obtain a copy of the License at
  12022. *
  12023. * http://www.apache.org/licenses/LICENSE-2.0
  12024. *
  12025. * Unless required by applicable law or agreed to in writing, software
  12026. * distributed under the License is distributed on an "AS IS" BASIS,
  12027. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12028. * See the License for the specific language governing permissions and
  12029. * limitations under the License.
  12030. *
  12031. * buffer.js
  12032. * Image Buffer
  12033. */
  12034. /**
  12035. * Image Buffer: a node with memory.
  12036. * At time t, it outputs the image received at time t-1
  12037. */
  12038. class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode
  12039. {
  12040. /**
  12041. * Constructor
  12042. * @param {string} [name] name of the node
  12043. */
  12044. constructor(name = undefined)
  12045. {
  12046. super(name, 2, [
  12047. InputPort().expects(SpeedyPipelineMessageType.Image),
  12048. OutputPort().expects(SpeedyPipelineMessageType.Image)
  12049. ]);
  12050. /** @type {number} current page: 0 or 1 */
  12051. this._pageIndex = 0;
  12052. /** @type {boolean} first run? */
  12053. this._initialized = false;
  12054. /** @type {ImageFormat} previous image format */
  12055. this._previousFormat = types/* ImageFormat.RGBA */.D3.RGBA;
  12056. /** @type {boolean} frozen buffer? */
  12057. this._frozen = false;
  12058. }
  12059. /**
  12060. * A frozen buffer discards the input, effectively increasing the buffering time
  12061. * @returns {boolean}
  12062. */
  12063. get frozen()
  12064. {
  12065. return this._frozen;
  12066. }
  12067. /**
  12068. * A frozen buffer discards the input, effectively increasing the buffering time
  12069. * @param {boolean} value
  12070. */
  12071. set frozen(value)
  12072. {
  12073. this._frozen = Boolean(value);
  12074. }
  12075. /**
  12076. * Releases this node
  12077. * @param {SpeedyGPU} gpu
  12078. */
  12079. release(gpu)
  12080. {
  12081. this._initialized = false;
  12082. super.release(gpu);
  12083. }
  12084. /**
  12085. * Run the specific task of this node
  12086. * @param {SpeedyGPU} gpu
  12087. * @returns {void|SpeedyPromise<void>}
  12088. */
  12089. _run(gpu)
  12090. {
  12091. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12092. const previousFormat = this._previousFormat;
  12093. const page = this._tex;
  12094. const previousInputTexture = page[1 - this._pageIndex];
  12095. const outputTexture = page[this._pageIndex];
  12096. // can't store pyramids
  12097. if(image.hasMipmaps())
  12098. throw new utils_errors/* NotSupportedError */.B8(`${this.fullName} can't bufferize a pyramid`);
  12099. // bufferize
  12100. if(!this._frozen || !this._initialized) {
  12101. // store input
  12102. this._previousFormat = format;
  12103. previousInputTexture.resize(image.width, image.height);
  12104. image.copyTo(previousInputTexture);
  12105. // page flipping
  12106. this._pageIndex = 1 - this._pageIndex;
  12107. }
  12108. // first run?
  12109. if(!this._initialized) {
  12110. this._initialized = true;
  12111. this.output().swrite(previousInputTexture, format);
  12112. return;
  12113. }
  12114. // done!
  12115. this.output().swrite(outputTexture, previousFormat);
  12116. }
  12117. }
  12118. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
  12119. /*
  12120. * speedy-vision.js
  12121. * GPU-accelerated Computer Vision for JavaScript
  12122. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12123. *
  12124. * Licensed under the Apache License, Version 2.0 (the "License");
  12125. * you may not use this file except in compliance with the License.
  12126. * You may obtain a copy of the License at
  12127. *
  12128. * http://www.apache.org/licenses/LICENSE-2.0
  12129. *
  12130. * Unless required by applicable law or agreed to in writing, software
  12131. * distributed under the License is distributed on an "AS IS" BASIS,
  12132. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12133. * See the License for the specific language governing permissions and
  12134. * limitations under the License.
  12135. *
  12136. * pyramid.js
  12137. * Generate pyramid
  12138. */
  12139. // Constants
  12140. const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
  12141. const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
  12142. /**
  12143. * Generate pyramid
  12144. */
  12145. class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode
  12146. {
  12147. /**
  12148. * Constructor
  12149. * @param {string} [name] name of the node
  12150. */
  12151. constructor(name = undefined)
  12152. {
  12153. super(name, MAX_TEXTURES + 1, [
  12154. InputPort().expects(SpeedyPipelineMessageType.Image),
  12155. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12156. ]);
  12157. }
  12158. /**
  12159. * Run the specific task of this node
  12160. * @param {SpeedyGPU} gpu
  12161. * @returns {void|SpeedyPromise<void>}
  12162. */
  12163. _run(gpu)
  12164. {
  12165. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12166. const outputTexture = this._tex[0];
  12167. const pyramids = gpu.programs.pyramids;
  12168. let width = image.width, height = image.height;
  12169. // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  12170. const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
  12171. // get work textures
  12172. const mip = new Array(MAX_TEXTURES + 1);
  12173. for(let i = MAX_TEXTURES; i >= 1; i--)
  12174. mip[i-1] = this._tex[i];
  12175. // get a copy of the input image
  12176. mip[0].resize(width, height);
  12177. image.copyTo(mip[0]);
  12178. // generate gaussian pyramid
  12179. const numLevels = Math.min(mipLevels, MAX_LEVELS);
  12180. for(let level = 1; level < numLevels; level++) {
  12181. // use max(1, floor(size / 2^lod)), in accordance to
  12182. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  12183. const halfWidth = Math.max(1, width >>> 1);
  12184. const halfHeight = Math.max(1, height >>> 1);
  12185. // reduce operation
  12186. const tmp = (level - 1) + MAX_LEVELS;
  12187. (pyramids.smoothX.outputs(width, height, mip[tmp]))(mip[level-1]);
  12188. (pyramids.smoothY.outputs(width, height, mip[level-1]))(mip[tmp]);
  12189. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[level-1]);
  12190. /*
  12191. (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
  12192. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
  12193. */
  12194. // flush
  12195. gpu.gl.flush();
  12196. // next level
  12197. width = halfWidth;
  12198. height = halfHeight;
  12199. /*
  12200. // debug: view pyramid
  12201. const view = mip[level-1];
  12202. const canvas = gpu.renderToCanvas(view);
  12203. if(!window._ww) document.body.appendChild(canvas);
  12204. window._ww = 1;
  12205. */
  12206. }
  12207. // copy to output & set mipmap
  12208. outputTexture.resize(image.width, image.height);
  12209. outputTexture.clear();
  12210. image.copyTo(outputTexture);
  12211. outputTexture.generateMipmaps(mip.slice(0, numLevels));
  12212. // done!
  12213. this.output().swrite(outputTexture, format);
  12214. }
  12215. }
  12216. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
  12217. /*
  12218. * speedy-vision.js
  12219. * GPU-accelerated Computer Vision for JavaScript
  12220. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12221. *
  12222. * Licensed under the Apache License, Version 2.0 (the "License");
  12223. * you may not use this file except in compliance with the License.
  12224. * You may obtain a copy of the License at
  12225. *
  12226. * http://www.apache.org/licenses/LICENSE-2.0
  12227. *
  12228. * Unless required by applicable law or agreed to in writing, software
  12229. * distributed under the License is distributed on an "AS IS" BASIS,
  12230. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12231. * See the License for the specific language governing permissions and
  12232. * limitations under the License.
  12233. *
  12234. * mixer.js
  12235. * Image Mixer
  12236. */
  12237. /**
  12238. * Image Mixer
  12239. */
  12240. class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode
  12241. {
  12242. /**
  12243. * Constructor
  12244. * @param {string} [name] name of the node
  12245. */
  12246. constructor(name = undefined)
  12247. {
  12248. super(name, 1, [
  12249. InputPort('in0').expects(SpeedyPipelineMessageType.Image),
  12250. InputPort('in1').expects(SpeedyPipelineMessageType.Image),
  12251. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12252. ]);
  12253. /** @type {number} alpha coefficient (applied to image0) */
  12254. this._alpha = 0.5;
  12255. /** @type {number} beta coefficient (applied to image1) */
  12256. this._beta = 0.5;
  12257. /** @type {number} gamma coefficient (brightness control) */
  12258. this._gamma = 0.0;
  12259. }
  12260. /**
  12261. * Alpha coefficient (applied to image0)
  12262. * @returns {number}
  12263. */
  12264. get alpha()
  12265. {
  12266. return this._alpha;
  12267. }
  12268. /**
  12269. * Alpha coefficient (applied to image0)
  12270. * @param {number} value
  12271. */
  12272. set alpha(value)
  12273. {
  12274. this._alpha = +value;
  12275. }
  12276. /**
  12277. * Beta coefficient (applied to image1)
  12278. * @returns {number}
  12279. */
  12280. get beta()
  12281. {
  12282. return this._beta;
  12283. }
  12284. /**
  12285. * Beta coefficient (applied to image1)
  12286. * @param {number} value
  12287. */
  12288. set beta(value)
  12289. {
  12290. this._beta = +value;
  12291. }
  12292. /**
  12293. * Gamma coefficient (brightness control)
  12294. * @returns {number}
  12295. */
  12296. get gamma()
  12297. {
  12298. return this._gamma;
  12299. }
  12300. /**
  12301. * Gamma coefficient (brightness control)
  12302. * @param {number} value
  12303. */
  12304. set gamma(value)
  12305. {
  12306. this._gamma = +value;
  12307. }
  12308. /**
  12309. * Run the specific task of this node
  12310. * @param {SpeedyGPU} gpu
  12311. * @returns {void|SpeedyPromise<void>}
  12312. */
  12313. _run(gpu)
  12314. {
  12315. const in0 = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('in0').read() );
  12316. const in1 = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('in1').read() );
  12317. const image0 = in0.image, image1 = in1.image;
  12318. const format0 = in0.format, format1 = in1.format;
  12319. const width = Math.max(image0.width, image1.width);
  12320. const height = Math.max(image0.height, image1.height);
  12321. const alpha = this._alpha, beta = this._beta, gamma = this._gamma;
  12322. const outputTexture = this._tex[0];
  12323. if(format0 != format1)
  12324. throw new utils_errors/* NotSupportedError */.B8(`Can't mix images of different formats`);
  12325. gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
  12326. gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
  12327. this.output().swrite(outputTexture, format0);
  12328. }
  12329. }
  12330. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
  12331. /*
  12332. * speedy-vision.js
  12333. * GPU-accelerated Computer Vision for JavaScript
  12334. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12335. *
  12336. * Licensed under the Apache License, Version 2.0 (the "License");
  12337. * you may not use this file except in compliance with the License.
  12338. * You may obtain a copy of the License at
  12339. *
  12340. * http://www.apache.org/licenses/LICENSE-2.0
  12341. *
  12342. * Unless required by applicable law or agreed to in writing, software
  12343. * distributed under the License is distributed on an "AS IS" BASIS,
  12344. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12345. * See the License for the specific language governing permissions and
  12346. * limitations under the License.
  12347. *
  12348. * portal.js
  12349. * Image Portals
  12350. */
  12351. /**
  12352. * A sink of an Image Portal
  12353. * This is not a pipeline sink - it doesn't export any data!
  12354. */
  12355. class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode
  12356. {
  12357. /**
  12358. * Constructor
  12359. * @param {string} [name] name of the node
  12360. */
  12361. constructor(name = undefined)
  12362. {
  12363. super(name, 1, [
  12364. InputPort().expects(SpeedyPipelineMessageType.Image),
  12365. ]);
  12366. /** @type {ImageFormat} stored image format */
  12367. this._format = types/* ImageFormat.RGBA */.D3.RGBA;
  12368. /** @type {boolean} is this node initialized? */
  12369. this._initialized = false;
  12370. }
  12371. /**
  12372. * Stored image
  12373. * @returns {SpeedyTexture}
  12374. */
  12375. get image()
  12376. {
  12377. if(!this._initialized)
  12378. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  12379. return this._tex[0];
  12380. }
  12381. /**
  12382. * Stored image format
  12383. * @returns {ImageFormat}
  12384. */
  12385. get format()
  12386. {
  12387. if(!this._initialized)
  12388. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  12389. return this._format;
  12390. }
  12391. /**
  12392. * Initializes this node
  12393. * @param {SpeedyGPU} gpu
  12394. */
  12395. init(gpu)
  12396. {
  12397. super.init(gpu);
  12398. this._tex[0].resize(1, 1).clear(); // initial texture
  12399. this._format = types/* ImageFormat.RGBA */.D3.RGBA;
  12400. this._initialized = true;
  12401. }
  12402. /**
  12403. * Releases this node
  12404. * @param {SpeedyGPU} gpu
  12405. */
  12406. release(gpu)
  12407. {
  12408. this._initialized = false;
  12409. super.release(gpu);
  12410. }
  12411. /**
  12412. * Run the specific task of this node
  12413. * @param {SpeedyGPU} gpu
  12414. * @returns {void|SpeedyPromise<void>}
  12415. */
  12416. _run(gpu)
  12417. {
  12418. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12419. const tex = this._tex[0];
  12420. // can't store pyramids
  12421. if(image.hasMipmaps())
  12422. throw new utils_errors/* NotSupportedError */.B8(`${this.fullName} can't store a pyramid`);
  12423. // copy input
  12424. this._format = format;
  12425. tex.resize(image.width, image.height);
  12426. image.copyTo(tex);
  12427. }
  12428. }
  12429. /**
  12430. * A source of an Image Portal
  12431. */
  12432. class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode
  12433. {
  12434. /**
  12435. * Constructor
  12436. * @param {string} [name] name of the node
  12437. */
  12438. constructor(name = undefined)
  12439. {
  12440. super(name, 0, [
  12441. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12442. ]);
  12443. /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
  12444. this._source = null;
  12445. }
  12446. /**
  12447. * Data source
  12448. * @returns {SpeedyPipelineNodeImagePortalSink|null}
  12449. */
  12450. get source()
  12451. {
  12452. return this._source;
  12453. }
  12454. /**
  12455. * Data source
  12456. * @param {SpeedyPipelineNodeImagePortalSink|null} node
  12457. */
  12458. set source(node)
  12459. {
  12460. if(node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink))
  12461. throw new utils_errors/* IllegalArgumentError */.mG(`Incompatible source for ${this.fullName}`);
  12462. this._source = node;
  12463. }
  12464. /**
  12465. * Run the specific task of this node
  12466. * @param {SpeedyGPU} gpu
  12467. * @returns {void|SpeedyPromise<void>}
  12468. */
  12469. _run(gpu)
  12470. {
  12471. if(this._source == null)
  12472. throw new utils_errors/* IllegalOperationError */.js(`${this.fullName} has no source`);
  12473. this.output().swrite(this._source.image, this._source.format);
  12474. }
  12475. }
  12476. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
  12477. /*
  12478. * speedy-vision.js
  12479. * GPU-accelerated Computer Vision for JavaScript
  12480. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12481. *
  12482. * Licensed under the Apache License, Version 2.0 (the "License");
  12483. * you may not use this file except in compliance with the License.
  12484. * You may obtain a copy of the License at
  12485. *
  12486. * http://www.apache.org/licenses/LICENSE-2.0
  12487. *
  12488. * Unless required by applicable law or agreed to in writing, software
  12489. * distributed under the License is distributed on an "AS IS" BASIS,
  12490. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12491. * See the License for the specific language governing permissions and
  12492. * limitations under the License.
  12493. *
  12494. * image-factory.js
  12495. * Image-related nodes
  12496. */
  12497. /**
  12498. * Portal nodes
  12499. */
  12500. class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.R
  12501. {
  12502. /**
  12503. * Create an image portal source
  12504. * @param {string} [name] name of the node
  12505. * @returns {SpeedyPipelineNodeImagePortalSource}
  12506. */
  12507. static Source(name = undefined)
  12508. {
  12509. return new SpeedyPipelineNodeImagePortalSource(name);
  12510. }
  12511. /**
  12512. * Create an image portal sink
  12513. * @param {string} [name] name of the node
  12514. * @returns {SpeedyPipelineNodeImagePortalSink}
  12515. */
  12516. static Sink(name = undefined)
  12517. {
  12518. return new SpeedyPipelineNodeImagePortalSink(name);
  12519. }
  12520. }
  12521. /**
  12522. * Image nodes
  12523. */
  12524. class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.R
  12525. {
  12526. /**
  12527. * Create an image source
  12528. * @param {string} [name] name of the node
  12529. * @returns {SpeedyPipelineNodeImageSource}
  12530. */
  12531. static Source(name = undefined)
  12532. {
  12533. return new SpeedyPipelineNodeImageSource(name);
  12534. }
  12535. /**
  12536. * Create an image sink
  12537. * @param {string} [name] name of the node
  12538. * @returns {SpeedyPipelineNodeImageSink}
  12539. */
  12540. static Sink(name = undefined)
  12541. {
  12542. return new SpeedyPipelineNodeImageSink(name);
  12543. }
  12544. /**
  12545. * Create an image multiplexer
  12546. * @param {string} [name] name of the node
  12547. * @returns {SpeedyPipelineNodeImageMultiplexer}
  12548. */
  12549. static Multiplexer(name = undefined)
  12550. {
  12551. return new SpeedyPipelineNodeImageMultiplexer(name);
  12552. }
  12553. /**
  12554. * Create an image buffer
  12555. * @param {string} [name] name of the node
  12556. * @returns {SpeedyPipelineNodeImageBuffer}
  12557. */
  12558. static Buffer(name = undefined)
  12559. {
  12560. return new SpeedyPipelineNodeImageBuffer(name);
  12561. }
  12562. /**
  12563. * Image Pyramid
  12564. * @param {string} [name] name of the node
  12565. * @returns {SpeedyPipelineNodeImagePyramid}
  12566. */
  12567. static Pyramid(name = undefined)
  12568. {
  12569. return new SpeedyPipelineNodeImagePyramid(name);
  12570. }
  12571. /**
  12572. * Image Mixer (blending)
  12573. * @param {string} [name] name of the node
  12574. * @returns {SpeedyPipelineNodeImageMixer}
  12575. */
  12576. static Mixer(name = undefined)
  12577. {
  12578. return new SpeedyPipelineNodeImageMixer(name);
  12579. }
  12580. /**
  12581. * Image Portals
  12582. * @returns {typeof SpeedyPipelineImagePortalFactory}
  12583. */
  12584. static get Portal()
  12585. {
  12586. return SpeedyPipelineImagePortalFactory;
  12587. }
  12588. }
  12589. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
  12590. /*
  12591. * speedy-vision.js
  12592. * GPU-accelerated Computer Vision for JavaScript
  12593. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12594. *
  12595. * Licensed under the Apache License, Version 2.0 (the "License");
  12596. * you may not use this file except in compliance with the License.
  12597. * You may obtain a copy of the License at
  12598. *
  12599. * http://www.apache.org/licenses/LICENSE-2.0
  12600. *
  12601. * Unless required by applicable law or agreed to in writing, software
  12602. * distributed under the License is distributed on an "AS IS" BASIS,
  12603. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12604. * See the License for the specific language governing permissions and
  12605. * limitations under the License.
  12606. *
  12607. * greyscale.js
  12608. * Convert an image to greyscale
  12609. */
  12610. /**
  12611. * Convert an image to greyscale
  12612. */
  12613. class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode
  12614. {
  12615. /**
  12616. * Constructor
  12617. * @param {string} [name] name of the node
  12618. */
  12619. constructor(name = undefined)
  12620. {
  12621. super(name, 1, [
  12622. InputPort().expects(SpeedyPipelineMessageType.Image),
  12623. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12624. ]);
  12625. }
  12626. /**
  12627. * Run the specific task of this node
  12628. * @param {SpeedyGPU} gpu
  12629. * @returns {void|SpeedyPromise<void>}
  12630. */
  12631. _run(gpu)
  12632. {
  12633. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12634. const width = image.width, height = image.height;
  12635. const outputTexture = this._tex[0];
  12636. const filters = gpu.programs.filters;
  12637. filters.rgb2grey.outputs(width, height, outputTexture);
  12638. filters.rgb2grey(image);
  12639. this.output().swrite(outputTexture, types/* ImageFormat.GREY */.D3.GREY);
  12640. }
  12641. }
  12642. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
  12643. /*
  12644. * speedy-vision.js
  12645. * GPU-accelerated Computer Vision for JavaScript
  12646. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12647. *
  12648. * Licensed under the Apache License, Version 2.0 (the "License");
  12649. * you may not use this file except in compliance with the License.
  12650. * You may obtain a copy of the License at
  12651. *
  12652. * http://www.apache.org/licenses/LICENSE-2.0
  12653. *
  12654. * Unless required by applicable law or agreed to in writing, software
  12655. * distributed under the License is distributed on an "AS IS" BASIS,
  12656. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12657. * See the License for the specific language governing permissions and
  12658. * limitations under the License.
  12659. *
  12660. * gaussian-blur.js
  12661. * Gaussian Blur
  12662. */
  12663. /**
  12664. * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
  12665. * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
  12666. * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
  12667. * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
  12668. */
  12669. const DEFAULT_KERNEL = Object.freeze({
  12670. 3: [ 0.27901008925473514, 0.44197982149052983, 0.27901008925473514 ], // 1D convolution (sigma = 1)
  12671. 5: [ 0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021 ], // 1D convolution (separable kernel)
  12672. 7: [ 0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274 ],
  12673. 9: [ 0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988 ],
  12674. 11:[ 0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346 ],
  12675. 13:[ 0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363 ],
  12676. 15:[ 0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383 ],
  12677. //3: [ 0.25, 0.5, 0.25 ],
  12678. //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
  12679. });
  12680. /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
  12681. const DEFAULT_SIGMA = new SpeedyVector2(0,0);
  12682. /** convolution programs (x-axis) */
  12683. const CONVOLUTION_X = Object.freeze({
  12684. 3: 'convolution3x',
  12685. 5: 'convolution5x',
  12686. 7: 'convolution7x',
  12687. 9: 'convolution9x',
  12688. 11: 'convolution11x',
  12689. 13: 'convolution13x',
  12690. 15: 'convolution15x',
  12691. });
  12692. /** convolution programs (y-axis) */
  12693. const CONVOLUTION_Y = Object.freeze({
  12694. 3: 'convolution3y',
  12695. 5: 'convolution5y',
  12696. 7: 'convolution7y',
  12697. 9: 'convolution9y',
  12698. 11: 'convolution11y',
  12699. 13: 'convolution13y',
  12700. 15: 'convolution15y',
  12701. });
  12702. /**
  12703. * @typedef {object} SeparableConvolutionKernel
  12704. * @property {number[]} x
  12705. * @property {number[]} y
  12706. */
  12707. /**
  12708. * Gaussian Blur
  12709. */
  12710. class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode
  12711. {
  12712. /**
  12713. * Constructor
  12714. * @param {string} [name] name of the node
  12715. */
  12716. constructor(name = undefined)
  12717. {
  12718. super(name, 2, [
  12719. InputPort().expects(SpeedyPipelineMessageType.Image),
  12720. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12721. ]);
  12722. /** @type {SpeedySize} size of the kernel */
  12723. this._kernelSize = new SpeedySize(5,5);
  12724. /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
  12725. this._sigma = DEFAULT_SIGMA;
  12726. /** @type {SeparableConvolutionKernel} convolution kernel */
  12727. this._kernel = {
  12728. x: DEFAULT_KERNEL[this._kernelSize.width],
  12729. y: DEFAULT_KERNEL[this._kernelSize.height]
  12730. };
  12731. }
  12732. /**
  12733. * Size of the kernel
  12734. * @returns {SpeedySize}
  12735. */
  12736. get kernelSize()
  12737. {
  12738. return this._kernelSize;
  12739. }
  12740. /**
  12741. * Size of the kernel
  12742. * @param {SpeedySize} kernelSize
  12743. */
  12744. set kernelSize(kernelSize)
  12745. {
  12746. utils/* Utils.assert */.c.assert(kernelSize instanceof SpeedySize);
  12747. const kw = kernelSize.width, kh = kernelSize.height;
  12748. if(kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0)
  12749. throw new utils_errors/* NotSupportedError */.B8(`Unsupported kernel size: ${kw}x${kh}`);
  12750. this._kernelSize = kernelSize;
  12751. this._updateKernel();
  12752. }
  12753. /**
  12754. * Sigma of the Gaussian kernel
  12755. * @returns {SpeedyVector2}
  12756. */
  12757. get sigma()
  12758. {
  12759. return this._sigma;
  12760. }
  12761. /**
  12762. * Sigma of the Gaussian kernel
  12763. * @param {SpeedyVector2} sigma
  12764. */
  12765. set sigma(sigma)
  12766. {
  12767. utils/* Utils.assert */.c.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
  12768. utils/* Utils.assert */.c.assert(sigma.x >= 0 && sigma.y >= 0);
  12769. this._sigma = sigma;
  12770. this._updateKernel();
  12771. }
  12772. /**
  12773. * Run the specific task of this node
  12774. * @param {SpeedyGPU} gpu
  12775. * @returns {void|SpeedyPromise<void>}
  12776. */
  12777. _run(gpu)
  12778. {
  12779. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12780. const width = image.width, height = image.height;
  12781. const kernX = this._kernel.x;
  12782. const kernY = this._kernel.y;
  12783. const convX = CONVOLUTION_X[this._kernelSize.width];
  12784. const convY = CONVOLUTION_Y[this._kernelSize.height];
  12785. const tex = this._tex[0];
  12786. const outputTexture = this._tex[1];
  12787. (gpu.programs.filters[convX]
  12788. .outputs(width, height, tex)
  12789. )(image, kernX);
  12790. (gpu.programs.filters[convY]
  12791. .outputs(width, height, outputTexture)
  12792. )(tex, kernY);
  12793. this.output().swrite(outputTexture, format);
  12794. }
  12795. /**
  12796. * Update the internal kernel to match
  12797. * sigma and kernelSize
  12798. */
  12799. _updateKernel()
  12800. {
  12801. if(this._sigma.x == DEFAULT_SIGMA.x)
  12802. this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];
  12803. else
  12804. this._kernel.x = utils/* Utils.gaussianKernel */.c.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
  12805. if(this._sigma.y == DEFAULT_SIGMA.y)
  12806. this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];
  12807. else
  12808. this._kernel.y = utils/* Utils.gaussianKernel */.c.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
  12809. }
  12810. }
  12811. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
  12812. /*
  12813. * speedy-vision.js
  12814. * GPU-accelerated Computer Vision for JavaScript
  12815. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12816. *
  12817. * Licensed under the Apache License, Version 2.0 (the "License");
  12818. * you may not use this file except in compliance with the License.
  12819. * You may obtain a copy of the License at
  12820. *
  12821. * http://www.apache.org/licenses/LICENSE-2.0
  12822. *
  12823. * Unless required by applicable law or agreed to in writing, software
  12824. * distributed under the License is distributed on an "AS IS" BASIS,
  12825. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12826. * See the License for the specific language governing permissions and
  12827. * limitations under the License.
  12828. *
  12829. * simple-blur.js
  12830. * Simple Blur (Box Filter)
  12831. */
  12832. /** 1D convolution filters */
  12833. const BOX_FILTER = Object.freeze({
  12834. 3: (new Array(3)).fill(1/3),
  12835. 5: (new Array(5)).fill(1/5),
  12836. 7: (new Array(7)).fill(1/7),
  12837. 9: (new Array(9)).fill(1/9),
  12838. 11: (new Array(11)).fill(1/11),
  12839. 13: (new Array(13)).fill(1/13),
  12840. 15: (new Array(15)).fill(1/15),
  12841. });
  12842. /** convolution programs (x-axis) */
  12843. const simple_blur_CONVOLUTION_X = Object.freeze({
  12844. 3: 'convolution3x',
  12845. 5: 'convolution5x',
  12846. 7: 'convolution7x',
  12847. 9: 'convolution9x',
  12848. 11: 'convolution11x',
  12849. 13: 'convolution13x',
  12850. 15: 'convolution15x',
  12851. });
  12852. /** convolution programs (y-axis) */
  12853. const simple_blur_CONVOLUTION_Y = Object.freeze({
  12854. 3: 'convolution3y',
  12855. 5: 'convolution5y',
  12856. 7: 'convolution7y',
  12857. 9: 'convolution9y',
  12858. 11: 'convolution11y',
  12859. 13: 'convolution13y',
  12860. 15: 'convolution15y',
  12861. });
  12862. /**
  12863. * @typedef {object} SeparableConvolutionKernel
  12864. * @property {number[]} x
  12865. * @property {number[]} y
  12866. */
  12867. /**
  12868. * Simple Blur (Box Filter)
  12869. */
  12870. class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode
  12871. {
  12872. /**
  12873. * Constructor
  12874. * @param {string} [name] name of the node
  12875. */
  12876. constructor(name = undefined)
  12877. {
  12878. super(name, 2, [
  12879. InputPort().expects(SpeedyPipelineMessageType.Image),
  12880. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12881. ]);
  12882. /** @type {SpeedySize} size of the kernel */
  12883. this._kernelSize = new SpeedySize(5,5);
  12884. /** @type {SeparableConvolutionKernel} convolution kernel */
  12885. this._kernel = {
  12886. x: BOX_FILTER[this._kernelSize.width],
  12887. y: BOX_FILTER[this._kernelSize.height]
  12888. };
  12889. }
  12890. /**
  12891. * Size of the kernel
  12892. * @returns {SpeedySize}
  12893. */
  12894. get kernelSize()
  12895. {
  12896. return this._kernelSize;
  12897. }
  12898. /**
  12899. * Size of the kernel
  12900. * @param {SpeedySize} kernelSize
  12901. */
  12902. set kernelSize(kernelSize)
  12903. {
  12904. utils/* Utils.assert */.c.assert(kernelSize instanceof SpeedySize);
  12905. const kw = kernelSize.width, kh = kernelSize.height;
  12906. if(kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0)
  12907. throw new utils_errors/* NotSupportedError */.B8(`Unsupported kernel size: ${kw}x${kh}`);
  12908. this._kernelSize = kernelSize;
  12909. this._kernel.x = BOX_FILTER[this._kernelSize.width];
  12910. this._kernel.y = BOX_FILTER[this._kernelSize.height];
  12911. }
  12912. /**
  12913. * Run the specific task of this node
  12914. * @param {SpeedyGPU} gpu
  12915. * @returns {void|SpeedyPromise<void>}
  12916. */
  12917. _run(gpu)
  12918. {
  12919. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  12920. const width = image.width, height = image.height;
  12921. const kernX = this._kernel.x;
  12922. const kernY = this._kernel.y;
  12923. const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
  12924. const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
  12925. const tex = this._tex[0];
  12926. const outputTexture = this._tex[1];
  12927. (gpu.programs.filters[convX]
  12928. .outputs(width, height, tex)
  12929. )(image, kernX);
  12930. (gpu.programs.filters[convY]
  12931. .outputs(width, height, outputTexture)
  12932. )(tex, kernY);
  12933. this.output().swrite(outputTexture, format);
  12934. }
  12935. }
  12936. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
  12937. /*
  12938. * speedy-vision.js
  12939. * GPU-accelerated Computer Vision for JavaScript
  12940. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  12941. *
  12942. * Licensed under the Apache License, Version 2.0 (the "License");
  12943. * you may not use this file except in compliance with the License.
  12944. * You may obtain a copy of the License at
  12945. *
  12946. * http://www.apache.org/licenses/LICENSE-2.0
  12947. *
  12948. * Unless required by applicable law or agreed to in writing, software
  12949. * distributed under the License is distributed on an "AS IS" BASIS,
  12950. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12951. * See the License for the specific language governing permissions and
  12952. * limitations under the License.
  12953. *
  12954. * median-blur.js
  12955. * Median Blur
  12956. */
  12957. // Median programs
  12958. const MEDIAN = {
  12959. 3: 'median3',
  12960. 5: 'median5',
  12961. 7: 'median7',
  12962. };
  12963. /**
  12964. * Median Blur
  12965. */
  12966. class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode
  12967. {
  12968. /**
  12969. * Constructor
  12970. * @param {string} [name] name of the node
  12971. */
  12972. constructor(name = undefined)
  12973. {
  12974. super(name, 1, [
  12975. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  12976. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  12977. msg.format === types/* ImageFormat.GREY */.D3.GREY
  12978. ),
  12979. OutputPort().expects(SpeedyPipelineMessageType.Image),
  12980. ]);
  12981. /** @type {SpeedySize} size of the kernel (assumed to be square) */
  12982. this._kernelSize = new SpeedySize(5,5);
  12983. }
  12984. /**
  12985. * Size of the kernel
  12986. * @returns {SpeedySize}
  12987. */
  12988. get kernelSize()
  12989. {
  12990. return this._kernelSize;
  12991. }
  12992. /**
  12993. * Size of the kernel
  12994. * @param {SpeedySize} kernelSize
  12995. */
  12996. set kernelSize(kernelSize)
  12997. {
  12998. utils/* Utils.assert */.c.assert(kernelSize instanceof SpeedySize);
  12999. const ksize = kernelSize.width;
  13000. if(!(ksize == 3 || ksize == 5 || ksize == 7))
  13001. throw new utils_errors/* NotSupportedError */.B8(`Supported kernel sizes: 3x3, 5x5, 7x7`);
  13002. else if(kernelSize.width != kernelSize.height)
  13003. throw new utils_errors/* NotSupportedError */.B8(`Use a square kernel`);
  13004. this._kernelSize = kernelSize;
  13005. }
  13006. /**
  13007. * Run the specific task of this node
  13008. * @param {SpeedyGPU} gpu
  13009. * @returns {void|SpeedyPromise<void>}
  13010. */
  13011. _run(gpu)
  13012. {
  13013. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13014. const width = image.width, height = image.height;
  13015. const ksize = this._kernelSize.width;
  13016. const med = MEDIAN[ksize];
  13017. const outputTexture = this._tex[0];
  13018. (gpu.programs.filters[med]
  13019. .outputs(width, height, outputTexture)
  13020. )(image);
  13021. this.output().swrite(outputTexture, format);
  13022. }
  13023. }
  13024. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
  13025. /*
  13026. * speedy-vision.js
  13027. * GPU-accelerated Computer Vision for JavaScript
  13028. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13029. *
  13030. * Licensed under the Apache License, Version 2.0 (the "License");
  13031. * you may not use this file except in compliance with the License.
  13032. * You may obtain a copy of the License at
  13033. *
  13034. * http://www.apache.org/licenses/LICENSE-2.0
  13035. *
  13036. * Unless required by applicable law or agreed to in writing, software
  13037. * distributed under the License is distributed on an "AS IS" BASIS,
  13038. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13039. * See the License for the specific language governing permissions and
  13040. * limitations under the License.
  13041. *
  13042. * convolution.js
  13043. * Image convolution
  13044. */
  13045. // 2D convolution programs
  13046. const CONVOLUTION = {
  13047. 3: 'convolution3',
  13048. 5: 'convolution5',
  13049. 7: 'convolution7',
  13050. };
  13051. /**
  13052. * Image convolution
  13053. */
  13054. class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode
  13055. {
  13056. /**
  13057. * Constructor
  13058. * @param {string} [name] name of the node
  13059. */
  13060. constructor(name = undefined)
  13061. {
  13062. super(name, 1, [
  13063. InputPort().expects(SpeedyPipelineMessageType.Image),
  13064. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13065. ]);
  13066. /** @type {SpeedyMatrix} convolution kernel (square matrix) */
  13067. this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
  13068. }
  13069. /**
  13070. * Convolution kernel
  13071. * @returns {SpeedyMatrix}
  13072. */
  13073. get kernel()
  13074. {
  13075. return this._kernel;
  13076. }
  13077. /**
  13078. * Convolution kernel
  13079. * @param {SpeedyMatrix} kernel
  13080. */
  13081. set kernel(kernel)
  13082. {
  13083. if(kernel.rows != kernel.columns)
  13084. throw new utils_errors/* NotSupportedError */.B8(`Use a square kernel`);
  13085. else if(!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7))
  13086. throw new utils_errors/* NotSupportedError */.B8(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
  13087. this._kernel = kernel;
  13088. }
  13089. /**
  13090. * Run the specific task of this node
  13091. * @param {SpeedyGPU} gpu
  13092. * @returns {void|SpeedyPromise<void>}
  13093. */
  13094. _run(gpu)
  13095. {
  13096. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13097. const width = image.width, height = image.height;
  13098. const outputTexture = this._tex[0];
  13099. const ksize = this._kernel.rows;
  13100. const conv = CONVOLUTION[ksize];
  13101. const kernel = this._kernel.read();
  13102. (gpu.programs.filters[conv]
  13103. .outputs(width, height, outputTexture)
  13104. )(image, kernel);
  13105. this.output().swrite(outputTexture, format);
  13106. }
  13107. }
  13108. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
  13109. /*
  13110. * speedy-vision.js
  13111. * GPU-accelerated Computer Vision for JavaScript
  13112. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13113. *
  13114. * Licensed under the Apache License, Version 2.0 (the "License");
  13115. * you may not use this file except in compliance with the License.
  13116. * You may obtain a copy of the License at
  13117. *
  13118. * http://www.apache.org/licenses/LICENSE-2.0
  13119. *
  13120. * Unless required by applicable law or agreed to in writing, software
  13121. * distributed under the License is distributed on an "AS IS" BASIS,
  13122. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13123. * See the License for the specific language governing permissions and
  13124. * limitations under the License.
  13125. *
  13126. * nightvision.js
  13127. * Nightvision filter
  13128. */
  13129. /**
  13130. * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
  13131. */
  13132. /**
  13133. * Nightvision filter: "see in the dark"
  13134. */
  13135. class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode
  13136. {
  13137. /**
  13138. * Constructor
  13139. * @param {string} [name] name of the node
  13140. */
  13141. constructor(name = undefined)
  13142. {
  13143. super(name, 3, [
  13144. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  13145. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  13146. msg.format === types/* ImageFormat.RGBA */.D3.RGBA ||
  13147. msg.format === types/* ImageFormat.GREY */.D3.GREY
  13148. ),
  13149. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13150. ]);
  13151. /** @type {number} a value typically in [0,1]: larger number => higher contrast */
  13152. this._gain = 0.5;
  13153. /** @type {number} a value typically in [0,1]: controls brightness */
  13154. this._offset = 0.5;
  13155. /** @type {number} gain decay, a value in [0,1] */
  13156. this._decay = 0.0;
  13157. /** @type {NightvisionQualityLevel} quality level */
  13158. this._quality = 'medium';
  13159. }
  13160. /**
  13161. * Gain, a value typically in [0,1]: larger number => higher contrast
  13162. * @returns {number}
  13163. */
  13164. get gain()
  13165. {
  13166. return this._gain;
  13167. }
  13168. /**
  13169. * Gain, a value typically in [0,1]: larger number => higher contrast
  13170. * @param {number} gain
  13171. */
  13172. set gain(gain)
  13173. {
  13174. this._gain = +gain;
  13175. }
  13176. /**
  13177. * Offset, a value typically in [0,1] that controls the brightness
  13178. * @returns {number}
  13179. */
  13180. get offset()
  13181. {
  13182. return this._offset;
  13183. }
  13184. /**
  13185. * Offset, a value typically in [0,1] that controls the brightness
  13186. * @param {number} offset
  13187. */
  13188. set offset(offset)
  13189. {
  13190. this._offset = +offset;
  13191. }
  13192. /**
  13193. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  13194. * @returns {number}
  13195. */
  13196. get decay()
  13197. {
  13198. return this._decay;
  13199. }
  13200. /**
  13201. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  13202. * @param {number} decay
  13203. */
  13204. set decay(decay)
  13205. {
  13206. this._decay = Math.max(0.0, Math.min(+decay, 1.0));
  13207. }
  13208. /**
  13209. * Quality level of the filter
  13210. * @returns {NightvisionQualityLevel}
  13211. */
  13212. get quality()
  13213. {
  13214. return this._quality;
  13215. }
  13216. /**
  13217. * Quality level of the filter
  13218. * @param {NightvisionQualityLevel} quality
  13219. */
  13220. set quality(quality)
  13221. {
  13222. if(quality === 'high' || quality === 'medium' || quality === 'low')
  13223. this._quality = quality;
  13224. else
  13225. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid quality level for the Nightvision filter: "${quality}"`);
  13226. }
  13227. /**
  13228. * Run the specific task of this node
  13229. * @param {SpeedyGPU} gpu
  13230. * @returns {void|SpeedyPromise<void>}
  13231. */
  13232. _run(gpu)
  13233. {
  13234. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13235. const width = image.width, height = image.height;
  13236. const gain = this._gain;
  13237. const offset = this._offset;
  13238. const decay = this._decay;
  13239. const quality = this._quality;
  13240. const filters = gpu.programs.filters;
  13241. const tmp = this._tex[0];
  13242. const illuminationMap = this._tex[1];
  13243. const outputTexture = this._tex[2];
  13244. // compute illumination map
  13245. if(quality == 'medium') {
  13246. filters.illuminationMapX.outputs(width, height, tmp);
  13247. filters.illuminationMapY.outputs(width, height, illuminationMap);
  13248. filters.illuminationMapX(image);
  13249. filters.illuminationMapY(tmp);
  13250. }
  13251. else if(quality == 'high') {
  13252. filters.illuminationMapHiX.outputs(width, height, tmp);
  13253. filters.illuminationMapHiY.outputs(width, height, illuminationMap);
  13254. filters.illuminationMapHiX(image);
  13255. filters.illuminationMapHiY(tmp);
  13256. }
  13257. else if(quality == 'low') {
  13258. filters.illuminationMapLoX.outputs(width, height, tmp);
  13259. filters.illuminationMapLoY.outputs(width, height, illuminationMap);
  13260. filters.illuminationMapLoX(image);
  13261. filters.illuminationMapLoY(tmp);
  13262. }
  13263. // run nightvision
  13264. if(format === types/* ImageFormat.GREY */.D3.GREY) {
  13265. filters.nightvisionGreyscale.outputs(width, height, outputTexture);
  13266. filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
  13267. }
  13268. else if(format === types/* ImageFormat.RGBA */.D3.RGBA) {
  13269. filters.nightvision.outputs(width, height, outputTexture);
  13270. filters.nightvision(image, illuminationMap, gain, offset, decay);
  13271. }
  13272. // done!
  13273. this.output().swrite(outputTexture, format);
  13274. }
  13275. }
  13276. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
  13277. /*
  13278. * speedy-vision.js
  13279. * GPU-accelerated Computer Vision for JavaScript
  13280. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13281. *
  13282. * Licensed under the Apache License, Version 2.0 (the "License");
  13283. * you may not use this file except in compliance with the License.
  13284. * You may obtain a copy of the License at
  13285. *
  13286. * http://www.apache.org/licenses/LICENSE-2.0
  13287. *
  13288. * Unless required by applicable law or agreed to in writing, software
  13289. * distributed under the License is distributed on an "AS IS" BASIS,
  13290. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13291. * See the License for the specific language governing permissions and
  13292. * limitations under the License.
  13293. *
  13294. * normalize.js
  13295. * Normalize image to a range
  13296. */
  13297. /**
  13298. * Normalize image to a range
  13299. */
  13300. class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode
  13301. {
  13302. /**
  13303. * Constructor
  13304. * @param {string} [name] name of the node
  13305. */
  13306. constructor(name = undefined)
  13307. {
  13308. super(name, 4, [
  13309. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  13310. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  13311. msg.format === types/* ImageFormat.GREY */.D3.GREY
  13312. ),
  13313. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13314. ]);
  13315. /** @type {number} a value in [0,255] */
  13316. this._minValue = 0;
  13317. /** @type {number} a value in [0,255] */
  13318. this._maxValue = 255;
  13319. }
  13320. /**
  13321. * Minimum intensity in the output image, a value in [0,255]
  13322. * @returns {number}
  13323. */
  13324. get minValue()
  13325. {
  13326. return this._minValue;
  13327. }
  13328. /**
  13329. * Minimum intensity in the output image, a value in [0,255]
  13330. * @param {number} minValue
  13331. */
  13332. set minValue(minValue)
  13333. {
  13334. this._minValue = Math.max(0, Math.min(+minValue, 255));
  13335. }
  13336. /**
  13337. * Maximum intensity in the output image, a value in [0,255]
  13338. * @returns {number}
  13339. */
  13340. get maxValue()
  13341. {
  13342. return this._maxValue;
  13343. }
  13344. /**
  13345. * Maximum intensity in the output image, a value in [0,255]
  13346. * @param {number} maxValue
  13347. */
  13348. set maxValue(maxValue)
  13349. {
  13350. this._maxValue = Math.max(0, Math.min(+maxValue, 255));
  13351. }
  13352. /**
  13353. * Run the specific task of this node
  13354. * @param {SpeedyGPU} gpu
  13355. * @returns {void|SpeedyPromise<void>}
  13356. */
  13357. _run(gpu)
  13358. {
  13359. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13360. const width = image.width, height = image.height;
  13361. const outputTexture = this._tex[3];
  13362. let minValue = this._minValue;
  13363. let maxValue = this._maxValue;
  13364. if(minValue > maxValue)
  13365. minValue = maxValue = (minValue + maxValue) / 2;
  13366. const minmax = this._scanMinMax(gpu, image, types/* PixelComponent.GREEN */.hE.GREEN);
  13367. gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
  13368. gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
  13369. this.output().swrite(outputTexture, format);
  13370. }
  13371. /**
  13372. * Scan a single component in all pixels of the image and find the min & max intensities
  13373. * @param {SpeedyGPU} gpu
  13374. * @param {SpeedyTexture} image input image
  13375. * @param {PixelComponent} pixelComponent a single PixelComponent flag
  13376. * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
  13377. */
  13378. _scanMinMax(gpu, image, pixelComponent)
  13379. {
  13380. const tex = this._tex;
  13381. const program = gpu.programs.utils;
  13382. const width = image.width, height = image.height;
  13383. const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
  13384. utils/* Utils.assert */.c.assert(types/* ColorComponentId */.rY[pixelComponent] !== undefined);
  13385. program.copyComponents.outputs(width, height, tex[2]);
  13386. program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
  13387. let texture = program.copyComponents(image, image, types/* PixelComponent.ALL */.hE.ALL, types/* ColorComponentId */.rY[pixelComponent]);
  13388. for(let i = 0; i < numIterations; i++)
  13389. texture = program.scanMinMax2D(texture, i);
  13390. return texture;
  13391. }
  13392. }
  13393. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
  13394. /*
  13395. * speedy-vision.js
  13396. * GPU-accelerated Computer Vision for JavaScript
  13397. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13398. *
  13399. * Licensed under the Apache License, Version 2.0 (the "License");
  13400. * you may not use this file except in compliance with the License.
  13401. * You may obtain a copy of the License at
  13402. *
  13403. * http://www.apache.org/licenses/LICENSE-2.0
  13404. *
  13405. * Unless required by applicable law or agreed to in writing, software
  13406. * distributed under the License is distributed on an "AS IS" BASIS,
  13407. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13408. * See the License for the specific language governing permissions and
  13409. * limitations under the License.
  13410. *
  13411. * filter-factory.js
  13412. * Image filters
  13413. */
  13414. /**
  13415. * Image filters
  13416. */
  13417. class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.R
  13418. {
  13419. /**
  13420. * Convert image to greyscale
  13421. * @param {string} [name]
  13422. * @returns {SpeedyPipelineNodeGreyscale}
  13423. */
  13424. static Greyscale(name = undefined)
  13425. {
  13426. return new SpeedyPipelineNodeGreyscale(name);
  13427. }
  13428. /**
  13429. * Gaussian Blur
  13430. * @param {string} [name]
  13431. * @returns {SpeedyPipelineNodeGaussianBlur}
  13432. */
  13433. static GaussianBlur(name = undefined)
  13434. {
  13435. return new SpeedyPipelineNodeGaussianBlur(name);
  13436. }
  13437. /**
  13438. * Simple Blur (Box Filter)
  13439. * @param {string} [name]
  13440. * @returns {SpeedyPipelineNodeSimpleBlur}
  13441. */
  13442. static SimpleBlur(name = undefined)
  13443. {
  13444. return new SpeedyPipelineNodeSimpleBlur(name);
  13445. }
  13446. /**
  13447. * Median Blur
  13448. * @param {string} [name]
  13449. * @returns {SpeedyPipelineNodeMedianBlur}
  13450. */
  13451. static MedianBlur(name = undefined)
  13452. {
  13453. return new SpeedyPipelineNodeMedianBlur(name);
  13454. }
  13455. /**
  13456. * Image Convolution
  13457. * @param {string} [name]
  13458. * @returns {SpeedyPipelineNodeConvolution}
  13459. */
  13460. static Convolution(name = undefined)
  13461. {
  13462. return new SpeedyPipelineNodeConvolution(name);
  13463. }
  13464. /**
  13465. * Nightvision
  13466. * @param {string} [name]
  13467. * @returns {SpeedyPipelineNodeNightvision}
  13468. */
  13469. static Nightvision(name = undefined)
  13470. {
  13471. return new SpeedyPipelineNodeNightvision(name);
  13472. }
  13473. /**
  13474. * Normalize image
  13475. * @param {string} [name]
  13476. * @returns {SpeedyPipelineNodeNormalize}
  13477. */
  13478. static Normalize(name = undefined)
  13479. {
  13480. return new SpeedyPipelineNodeNormalize(name);
  13481. }
  13482. }
  13483. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
  13484. /*
  13485. * speedy-vision.js
  13486. * GPU-accelerated Computer Vision for JavaScript
  13487. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13488. *
  13489. * Licensed under the Apache License, Version 2.0 (the "License");
  13490. * you may not use this file except in compliance with the License.
  13491. * You may obtain a copy of the License at
  13492. *
  13493. * http://www.apache.org/licenses/LICENSE-2.0
  13494. *
  13495. * Unless required by applicable law or agreed to in writing, software
  13496. * distributed under the License is distributed on an "AS IS" BASIS,
  13497. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13498. * See the License for the specific language governing permissions and
  13499. * limitations under the License.
  13500. *
  13501. * perspective-warp.js
  13502. * Warp an image using a perspective transformation
  13503. */
  13504. // Used when an invalid matrix is provided
  13505. const SINGULAR_MATRIX = [0,0,0,0,0,0,0,0,1];
  13506. /**
  13507. * Warp an image using a perspective transformation
  13508. */
  13509. class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode
  13510. {
  13511. /**
  13512. * Constructor
  13513. * @param {string} [name] name of the node
  13514. */
  13515. constructor(name = undefined)
  13516. {
  13517. super(name, 1, [
  13518. InputPort().expects(SpeedyPipelineMessageType.Image),
  13519. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13520. ]);
  13521. /** @type {SpeedyMatrix} perspective transformation */
  13522. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  13523. }
  13524. /**
  13525. * Perspective transform, a 3x3 homography matrix
  13526. * @returns {SpeedyMatrix}
  13527. */
  13528. get transform()
  13529. {
  13530. return this._transform;
  13531. }
  13532. /**
  13533. * Perspective transform, a 3x3 homography matrix
  13534. * @param {SpeedyMatrix} transform
  13535. */
  13536. set transform(transform)
  13537. {
  13538. if(!(transform.rows == 3 && transform.columns == 3))
  13539. throw new utils_errors/* IllegalArgumentError */.mG(`Not a 3x3 transformation matrix: ${transform}`);
  13540. this._transform = transform;
  13541. }
  13542. /**
  13543. * Run the specific task of this node
  13544. * @param {SpeedyGPU} gpu
  13545. * @returns {void|SpeedyPromise<void>}
  13546. */
  13547. _run(gpu)
  13548. {
  13549. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13550. const width = image.width, height = image.height;
  13551. const outputTexture = this._tex[0];
  13552. const homography = this._transform.read();
  13553. const inverseHomography = this._inverse3(homography);
  13554. const isValidHomography = !Number.isNaN(inverseHomography[0]);
  13555. gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
  13556. gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
  13557. this.output().swrite(outputTexture, format);
  13558. }
  13559. /**
  13560. * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
  13561. * @param {number[]} mat 3x3 matrix in column-major format
  13562. * @param {number} [eps] epsilon
  13563. * @returns {number[]} 3x3 inverse matrix in column-major format
  13564. */
  13565. _inverse3(mat, eps = 1e-6)
  13566. {
  13567. // read the entries of the matrix
  13568. const a11 = mat[0];
  13569. const a21 = mat[1];
  13570. const a31 = mat[2];
  13571. const a12 = mat[3];
  13572. const a22 = mat[4];
  13573. const a32 = mat[5];
  13574. const a13 = mat[6];
  13575. const a23 = mat[7];
  13576. const a33 = mat[8];
  13577. // compute cofactors
  13578. const b1 = a33 * a22 - a32 * a23; // b11
  13579. const b2 = a33 * a12 - a32 * a13; // b21
  13580. const b3 = a23 * a12 - a22 * a13; // b31
  13581. // compute the determinant
  13582. const det = a11 * b1 - a21 * b2 + a31 * b3;
  13583. // set up the inverse
  13584. if(!(Math.abs(det) < eps)) {
  13585. const d = 1.0 / det;
  13586. mat[0] = b1 * d;
  13587. mat[1] = -(a33 * a21 - a31 * a23) * d;
  13588. mat[2] = (a32 * a21 - a31 * a22) * d;
  13589. mat[3] = -b2 * d;
  13590. mat[4] = (a33 * a11 - a31 * a13) * d;
  13591. mat[5] = -(a32 * a11 - a31 * a12) * d;
  13592. mat[6] = b3 * d;
  13593. mat[7] = -(a23 * a11 - a21 * a13) * d;
  13594. mat[8] = (a22 * a11 - a21 * a12) * d;
  13595. }
  13596. else
  13597. mat.fill(Number.NaN, 0, 9);
  13598. // done!
  13599. return mat;
  13600. }
  13601. }
  13602. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
  13603. /*
  13604. * speedy-vision.js
  13605. * GPU-accelerated Computer Vision for JavaScript
  13606. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13607. *
  13608. * Licensed under the Apache License, Version 2.0 (the "License");
  13609. * you may not use this file except in compliance with the License.
  13610. * You may obtain a copy of the License at
  13611. *
  13612. * http://www.apache.org/licenses/LICENSE-2.0
  13613. *
  13614. * Unless required by applicable law or agreed to in writing, software
  13615. * distributed under the License is distributed on an "AS IS" BASIS,
  13616. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13617. * See the License for the specific language governing permissions and
  13618. * limitations under the License.
  13619. *
  13620. * resize.js
  13621. * Resize image
  13622. */
  13623. /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
  13624. /**
  13625. * Resize image
  13626. */
  13627. class SpeedyPipelineNodeResize extends SpeedyPipelineNode
  13628. {
  13629. /**
  13630. * Constructor
  13631. * @param {string} [name] name of the node
  13632. */
  13633. constructor(name = undefined)
  13634. {
  13635. super(name, 1, [
  13636. InputPort().expects(SpeedyPipelineMessageType.Image),
  13637. OutputPort().expects(SpeedyPipelineMessageType.Image),
  13638. ]);
  13639. /** @type {SpeedySize} size of the output image, in pixels */
  13640. this._size = new SpeedySize(0, 0);
  13641. /** @type {SpeedyVector2} size of the output relative to the size of the input */
  13642. this._scale = new SpeedyVector2(1, 1);
  13643. /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
  13644. this._method = 'bilinear';
  13645. }
  13646. /**
  13647. * Size of the output image, in pixels (use 0 to use scale)
  13648. * @returns {SpeedySize}
  13649. */
  13650. get size()
  13651. {
  13652. return this._size;
  13653. }
  13654. /**
  13655. * Size of the output image, in pixels (use 0 to use scale)
  13656. * @param {SpeedySize} size
  13657. */
  13658. set size(size)
  13659. {
  13660. this._size = size;
  13661. }
  13662. /**
  13663. * Size of the output image relative to the size of the input image
  13664. * @returns {SpeedyVector2}
  13665. */
  13666. get scale()
  13667. {
  13668. return this._scale;
  13669. }
  13670. /**
  13671. * Size of the output image relative to the size of the input image
  13672. * @param {SpeedyVector2} scale
  13673. */
  13674. set scale(scale)
  13675. {
  13676. this._scale = scale;
  13677. }
  13678. /**
  13679. * Interpolation method
  13680. * @returns {SpeedyPipelineNodeResizeMethod}
  13681. */
  13682. get method()
  13683. {
  13684. return this._method;
  13685. }
  13686. /**
  13687. * Interpolation method
  13688. * @param {SpeedyPipelineNodeResizeMethod} method
  13689. */
  13690. set method(method)
  13691. {
  13692. if(method !== 'nearest' && method !== 'bilinear')
  13693. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid method method: "${method}"`);
  13694. this._method = method;
  13695. }
  13696. /**
  13697. * Run the specific task of this node
  13698. * @param {SpeedyGPU} gpu
  13699. * @returns {void|SpeedyPromise<void>}
  13700. */
  13701. _run(gpu)
  13702. {
  13703. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  13704. const width = image.width, height = image.height;
  13705. const outputTexture = this._tex[0];
  13706. const method = this._method;
  13707. const newWidth = this._size.width || Math.max(1, this._scale.x * width);
  13708. const newHeight = this._size.height || Math.max(1, this._scale.y * height);
  13709. if(method == 'bilinear') {
  13710. (gpu.programs.transforms.resizeBilinear
  13711. .outputs(newWidth, newHeight, outputTexture)
  13712. )(image);
  13713. }
  13714. else if(method == 'nearest') {
  13715. (gpu.programs.transforms.resizeNearest
  13716. .outputs(newWidth, newHeight, outputTexture)
  13717. )(image);
  13718. }
  13719. this.output().swrite(outputTexture, format);
  13720. }
  13721. }
  13722. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
  13723. /*
  13724. * speedy-vision.js
  13725. * GPU-accelerated Computer Vision for JavaScript
  13726. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13727. *
  13728. * Licensed under the Apache License, Version 2.0 (the "License");
  13729. * you may not use this file except in compliance with the License.
  13730. * You may obtain a copy of the License at
  13731. *
  13732. * http://www.apache.org/licenses/LICENSE-2.0
  13733. *
  13734. * Unless required by applicable law or agreed to in writing, software
  13735. * distributed under the License is distributed on an "AS IS" BASIS,
  13736. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13737. * See the License for the specific language governing permissions and
  13738. * limitations under the License.
  13739. *
  13740. * transform-factory.js
  13741. * Image transforms
  13742. */
  13743. /**
  13744. * Image transforms
  13745. */
  13746. class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.R
  13747. {
  13748. /**
  13749. * Resize image
  13750. * @param {string} [name]
  13751. * @returns {SpeedyPipelineNodeResize}
  13752. */
  13753. static Resize(name = undefined)
  13754. {
  13755. return new SpeedyPipelineNodeResize(name);
  13756. }
  13757. /**
  13758. * Warp an image using a perspective transformation
  13759. * @param {string} [name]
  13760. * @returns {SpeedyPipelineNodePerspectiveWarp}
  13761. */
  13762. static PerspectiveWarp(name = undefined)
  13763. {
  13764. return new SpeedyPipelineNodePerspectiveWarp(name);
  13765. }
  13766. }
  13767. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
  13768. /*
  13769. * speedy-vision.js
  13770. * GPU-accelerated Computer Vision for JavaScript
  13771. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  13772. *
  13773. * Licensed under the Apache License, Version 2.0 (the "License");
  13774. * you may not use this file except in compliance with the License.
  13775. * You may obtain a copy of the License at
  13776. *
  13777. * http://www.apache.org/licenses/LICENSE-2.0
  13778. *
  13779. * Unless required by applicable law or agreed to in writing, software
  13780. * distributed under the License is distributed on an "AS IS" BASIS,
  13781. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13782. * See the License for the specific language governing permissions and
  13783. * limitations under the License.
  13784. *
  13785. * detector.js
  13786. * Abstract keypoint detectors
  13787. */
  13788. // Constants
  13789. const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
  13790. const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
  13791. const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
  13792. const NUMBER_OF_RGBA16_TEXTURES = 2;
  13793. // legacy constants
  13794. const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
  13795. const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
  13796. const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
  13797. /**
  13798. * Abstract keypoint detector
  13799. * @abstract
  13800. */
  13801. class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode
  13802. {
  13803. /**
  13804. * Constructor
  13805. * @param {string} [name] name of the node
  13806. * @param {number} [texCount] number of work textures
  13807. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  13808. */
  13809. constructor(name = undefined, texCount = 0, portBuilders = undefined)
  13810. {
  13811. super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
  13812. /** @type {number} encoder capacity */
  13813. this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
  13814. /** @type {GLint} auxiliary storage */
  13815. this._oldWrapS = 0;
  13816. /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
  13817. this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
  13818. }
  13819. /**
  13820. * Initialize this node
  13821. * @param {SpeedyGPU} gpu
  13822. */
  13823. init(gpu)
  13824. {
  13825. // initialize
  13826. super.init(gpu);
  13827. // encodeKeypointSkipOffsets() relies on this
  13828. this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
  13829. // allocate RGBA16 textures
  13830. this._allocateTex16(gpu);
  13831. gpu.subscribe(this._allocateTex16, this, gpu);
  13832. }
  13833. /**
  13834. * Release this node
  13835. * @param {SpeedyGPU} gpu
  13836. */
  13837. release(gpu)
  13838. {
  13839. // deallocate RGBA16 textures
  13840. gpu.unsubscribe(this._allocateTex16, this);
  13841. this._deallocateTex16(gpu);
  13842. // we need to restore the texture parameter because textures come from a pool!
  13843. this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
  13844. // release
  13845. super.release(gpu);
  13846. }
  13847. /**
  13848. * Set a parameter of the special texture
  13849. * @param {GLenum} pname
  13850. * @param {GLint} param new value
  13851. * @returns {GLint} old value of param
  13852. */
  13853. _setupSpecialTexture(pname, param)
  13854. {
  13855. if(NUMBER_OF_INTERNAL_TEXTURES == 0)
  13856. return;
  13857. // legacy code
  13858. const texture = this._tex[this._tex.length - 1];
  13859. const gl = texture.gl;
  13860. gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
  13861. const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
  13862. gl.texParameteri(gl.TEXTURE_2D, pname, param);
  13863. gl.bindTexture(gl.TEXTURE_2D, null);
  13864. return oldval;
  13865. }
  13866. /**
  13867. * We can encode up to this many keypoints. If you find a
  13868. * tight bound for this, download times will be faster.
  13869. * @returns {number}
  13870. */
  13871. get capacity()
  13872. {
  13873. return this._capacity;
  13874. }
  13875. /**
  13876. * We can encode up to this many keypoints. If you find a
  13877. * tight bound for this, download times will be faster.
  13878. * @param {number} capacity
  13879. */
  13880. set capacity(capacity)
  13881. {
  13882. this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
  13883. }
  13884. /**
  13885. * Create a tiny texture with encoded keypoints out of
  13886. * an encoded corners texture
  13887. * @param {SpeedyGPU} gpu
  13888. * @param {SpeedyTexture} corners input
  13889. * @param {SpeedyDrawableTexture} encodedKeypoints output
  13890. * @param {number} [descriptorSize] in bytes
  13891. * @param {number} [extraSize] in bytes
  13892. * @returns {SpeedyDrawableTexture} encodedKeypoints
  13893. */
  13894. _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  13895. {
  13896. const encoderCapacity = this._capacity;
  13897. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
  13898. const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
  13899. const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
  13900. //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
  13901. const maxSize = Math.max(width, height);
  13902. const keypoints = gpu.programs.keypoints;
  13903. // prepare programs
  13904. keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
  13905. keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
  13906. keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  13907. // compute lookup table
  13908. let lookupTable = keypoints.initLookupTable(corners);
  13909. for(let b = 1; b < maxSize; b *= 2)
  13910. lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
  13911. /*
  13912. // debug: view texture
  13913. const lookupView = (keypoints.viewLookupTable.outputs(
  13914. width, height, this._tex[0]
  13915. ))(lookupTable);
  13916. const canvas = gpu.renderToCanvas(lookupView);
  13917. if(!this._ww) document.body.appendChild(canvas);
  13918. this._ww = 1;
  13919. */
  13920. // encode keypoints
  13921. return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
  13922. }
  13923. _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  13924. {
  13925. const capacity = this._capacity;
  13926. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  13927. const width = corners.width, height = corners.height;
  13928. const imageSize = [ width, height ];
  13929. const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
  13930. const keypoints = gpu.programs.keypoints;
  13931. const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
  13932. // prepare programs
  13933. keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
  13934. keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
  13935. keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
  13936. keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
  13937. // copy the input corners to a special texture
  13938. // that is needed by encodeKeypointSkipOffsets()
  13939. corners = (gpu.programs.utils.copy
  13940. .outputs(width, height, specialTexture)
  13941. )(corners);
  13942. // encode skip offsets
  13943. let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
  13944. for(let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) { // to boost performance
  13945. // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
  13946. // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
  13947. offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
  13948. }
  13949. /*
  13950. // debug: view corners
  13951. let cornerview = offsets;
  13952. const canvas = gpu.renderToCanvas(cornerview);
  13953. if(!window._ww) document.body.appendChild(canvas);
  13954. window._ww = 1;
  13955. */
  13956. // encode keypoint positions
  13957. let encodedKps = tex[3].clear();
  13958. for(let j = 0; j < ENCODER_PASSES; j++)
  13959. encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
  13960. // encode keypoint properties
  13961. return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
  13962. }
  13963. /**
  13964. * Create a tiny texture with zero encoded keypoints
  13965. * @param {SpeedyGPU} gpu
  13966. * @param {SpeedyDrawableTexture} encodedKeypoints output texture
  13967. * @param {number} [descriptorSize] in bytes
  13968. * @param {number} [extraSize] in bytes
  13969. * @returns {SpeedyDrawableTexture} encodedKeypoints
  13970. */
  13971. _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0)
  13972. {
  13973. const capacity = 0;
  13974. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  13975. const keypoints = gpu.programs.keypoints;
  13976. keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  13977. return keypoints.encodeNullKeypoints();
  13978. }
  13979. /**
  13980. * Allocate RGBA16 textures
  13981. * @param {SpeedyGPU} gpu
  13982. */
  13983. _allocateTex16(gpu)
  13984. {
  13985. const gl = gpu.gl;
  13986. // RGBA16UI is color renderable according to the OpenGL ES 3 spec
  13987. for(let i = 0; i < this._tex16.length; i++)
  13988. this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
  13989. }
  13990. /**
  13991. * Deallocate RGBA16 textures
  13992. * @param {SpeedyGPU} gpu
  13993. */
  13994. _deallocateTex16(gpu)
  13995. {
  13996. for(let i = 0; i < this._tex16.length; i++)
  13997. this._tex16[i] = this._tex16[i].release();
  13998. }
  13999. /**
  14000. * Compute the length of the keypoint encoder, given its capacity
  14001. * @param {number} encoderCapacity how many keypoints can we fit?
  14002. * @param {number} descriptorSize in bytes
  14003. * @param {number} extraSize in bytes
  14004. */
  14005. static encoderLength(encoderCapacity, descriptorSize, extraSize)
  14006. {
  14007. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  14008. const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
  14009. return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
  14010. }
  14011. /**
  14012. * The maximum number of keypoints we can store using
  14013. * a particular configuration of a keypoint encoder
  14014. * @param {number} descriptorSize in bytes
  14015. * @param {number} extraSize in bytes
  14016. * @param {number} encoderLength
  14017. */
  14018. static encoderCapacity(descriptorSize, extraSize, encoderLength)
  14019. {
  14020. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  14021. const numberOfPixels = encoderLength * encoderLength;
  14022. return Math.floor(numberOfPixels / pixelsPerKeypoint);
  14023. }
  14024. }
  14025. /**
  14026. * Abstract scale-space keypoint detector
  14027. * @abstract
  14028. */
  14029. class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector
  14030. {
  14031. /**
  14032. * Constructor
  14033. * @param {string} [name] name of the node
  14034. * @param {number} [texCount] number of work textures
  14035. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14036. */
  14037. constructor(name = undefined, texCount = undefined, portBuilders = undefined)
  14038. {
  14039. super(name, texCount, portBuilders);
  14040. /** @type {number} number of pyramid levels */
  14041. this._levels = 1;
  14042. /** @type {number} scale factor between two pyramid levels */
  14043. this._scaleFactor = DEFAULT_SCALE_FACTOR;
  14044. }
  14045. /**
  14046. * Number of pyramid levels
  14047. * @returns {number}
  14048. */
  14049. get levels()
  14050. {
  14051. return this._levels;
  14052. }
  14053. /**
  14054. * Number of pyramid levels
  14055. * @param {number} levels
  14056. */
  14057. set levels(levels)
  14058. {
  14059. this._levels = Math.max(1, levels | 0);
  14060. }
  14061. /**
  14062. * Scale factor between two pyramid levels
  14063. * @returns {number}
  14064. */
  14065. get scaleFactor()
  14066. {
  14067. return this._scaleFactor;
  14068. }
  14069. /**
  14070. * Scale factor between two pyramid levels
  14071. * @param {number} scaleFactor should be greater than 1
  14072. */
  14073. set scaleFactor(scaleFactor)
  14074. {
  14075. this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
  14076. }
  14077. }
  14078. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
  14079. /*
  14080. * speedy-vision.js
  14081. * GPU-accelerated Computer Vision for JavaScript
  14082. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14083. *
  14084. * Licensed under the Apache License, Version 2.0 (the "License");
  14085. * you may not use this file except in compliance with the License.
  14086. * You may obtain a copy of the License at
  14087. *
  14088. * http://www.apache.org/licenses/LICENSE-2.0
  14089. *
  14090. * Unless required by applicable law or agreed to in writing, software
  14091. * distributed under the License is distributed on an "AS IS" BASIS,
  14092. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14093. * See the License for the specific language governing permissions and
  14094. * limitations under the License.
  14095. *
  14096. * source.js
  14097. * Gets keypoints into the pipeline
  14098. */
  14099. // Constants
  14100. const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
  14101. const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
  14102. const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
  14103. /**
  14104. * Gets keypoints into the pipeline
  14105. */
  14106. class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode
  14107. {
  14108. /**
  14109. * Constructor
  14110. * @param {string} [name] name of the node
  14111. */
  14112. constructor(name = undefined)
  14113. {
  14114. super(name, 2, [
  14115. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14116. ]);
  14117. /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
  14118. this._keypoints = [];
  14119. /** @type {Float32Array} upload buffer (UBO) */
  14120. this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
  14121. /** @type {number} maximum number of keypoints */
  14122. this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
  14123. }
  14124. /**
  14125. * Keypoints to be uploaded
  14126. * @returns {SpeedyKeypoint[]}
  14127. */
  14128. get keypoints()
  14129. {
  14130. return this._keypoints;
  14131. }
  14132. /**
  14133. * Keypoints to be uploaded
  14134. * @param {SpeedyKeypoint[]} keypoints
  14135. */
  14136. set keypoints(keypoints)
  14137. {
  14138. if(!Array.isArray(keypoints))
  14139. throw new utils_errors/* IllegalArgumentError */.mG(`Not an array of keypoints`);
  14140. this._keypoints = keypoints;
  14141. }
  14142. /**
  14143. * The maximum number of keypoints we'll accept.
  14144. * This should be a tight bound for better performance.
  14145. * @returns {number}
  14146. */
  14147. get capacity()
  14148. {
  14149. return this._capacity;
  14150. }
  14151. /**
  14152. * The maximum number of keypoints we'll accept.
  14153. * This should be a tight bound for better performance.
  14154. * @param {number} capacity
  14155. */
  14156. set capacity(capacity)
  14157. {
  14158. this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
  14159. }
  14160. /**
  14161. * Run the specific task of this node
  14162. * @param {SpeedyGPU} gpu
  14163. * @returns {void|SpeedyPromise<void>}
  14164. */
  14165. _run(gpu)
  14166. {
  14167. // Orientation, descriptors and extra bytes will be lost
  14168. const descriptorSize = 0, extraSize = 0;
  14169. const keypoints = this._keypoints;
  14170. const maxKeypoints = this._capacity;
  14171. const numKeypoints = Math.min(keypoints.length, maxKeypoints);
  14172. const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
  14173. const buffer = this._buffer;
  14174. const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
  14175. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
  14176. uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
  14177. let startIndex = 0, encodedKeypoints = uploadKeypoints.clear();
  14178. for(let i = 0; i < numPasses; i++) {
  14179. const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
  14180. const endIndex = startIndex + n;
  14181. uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
  14182. encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
  14183. startIndex = endIndex;
  14184. }
  14185. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14186. }
  14187. /**
  14188. * Create an upload buffer
  14189. * @param {number} bufferSize number of keypoints
  14190. * @returns {Float32Array}
  14191. */
  14192. static _createUploadBuffer(bufferSize)
  14193. {
  14194. const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
  14195. utils/* Utils.assert */.c.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
  14196. return new Float32Array(internalBuffer);
  14197. }
  14198. /**
  14199. * Fill upload buffer with keypoint data
  14200. * @param {Float32Array} buffer
  14201. * @param {SpeedyKeypoint[]} keypoints
  14202. * @param {number} start index, inclusive
  14203. * @param {number} end index, exclusive
  14204. * @returns {Float32Array} buffer
  14205. */
  14206. static _fillUploadBuffer(buffer, keypoints, start, end)
  14207. {
  14208. const n = end - start;
  14209. for(let i = 0; i < n; i++) {
  14210. const keypoint = keypoints[start + i];
  14211. const hasPos = keypoint.position !== undefined;
  14212. const j = i * 4;
  14213. // Format data as follows:
  14214. // vec4(xpos, ypos, lod, score)
  14215. buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
  14216. buffer[j+1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
  14217. buffer[j+2] = +(keypoint.lod) || 0;
  14218. buffer[j+3] = +(keypoint.score) || 0;
  14219. }
  14220. // done!
  14221. return buffer;
  14222. }
  14223. }
  14224. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
  14225. /*
  14226. * speedy-vision.js
  14227. * GPU-accelerated Computer Vision for JavaScript
  14228. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14229. *
  14230. * Licensed under the Apache License, Version 2.0 (the "License");
  14231. * you may not use this file except in compliance with the License.
  14232. * You may obtain a copy of the License at
  14233. *
  14234. * http://www.apache.org/licenses/LICENSE-2.0
  14235. *
  14236. * Unless required by applicable law or agreed to in writing, software
  14237. * distributed under the License is distributed on an "AS IS" BASIS,
  14238. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14239. * See the License for the specific language governing permissions and
  14240. * limitations under the License.
  14241. *
  14242. * speedy-keypoint-descriptor.js
  14243. * Keypoint descriptor
  14244. */
  14245. /**
  14246. * Represents a keypoint descriptor
  14247. */
  14248. class SpeedyKeypointDescriptor
  14249. {
  14250. /**
  14251. * Constructor
  14252. * @param {Uint8Array} data descriptor bytes
  14253. */
  14254. constructor(data)
  14255. {
  14256. this._data = data;
  14257. return Object.freeze(this);
  14258. }
  14259. /**
  14260. * Descriptor data
  14261. * @returns {Uint8Array}
  14262. */
  14263. get data()
  14264. {
  14265. return this._data;
  14266. }
  14267. /**
  14268. * The size of the descriptor, in bytes
  14269. * @returns {number}
  14270. */
  14271. get size()
  14272. {
  14273. return this._data.byteLength;
  14274. }
  14275. /**
  14276. * A string representation of the keypoint descriptor
  14277. * @returns {string}
  14278. */
  14279. toString()
  14280. {
  14281. return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
  14282. }
  14283. }
  14284. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
  14285. /*
  14286. * speedy-vision.js
  14287. * GPU-accelerated Computer Vision for JavaScript
  14288. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14289. *
  14290. * Licensed under the Apache License, Version 2.0 (the "License");
  14291. * you may not use this file except in compliance with the License.
  14292. * You may obtain a copy of the License at
  14293. *
  14294. * http://www.apache.org/licenses/LICENSE-2.0
  14295. *
  14296. * Unless required by applicable law or agreed to in writing, software
  14297. * distributed under the License is distributed on an "AS IS" BASIS,
  14298. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14299. * See the License for the specific language governing permissions and
  14300. * limitations under the License.
  14301. *
  14302. * sink.js
  14303. * Gets keypoints out of the pipeline
  14304. */
  14305. /** next power of 2 */
  14306. const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  14307. /** empty array of bytes */
  14308. const ZERO_BYTES = new Uint8Array([]);
  14309. /**
  14310. * Gets keypoints out of the pipeline
  14311. * @template {SpeedyKeypoint} T
  14312. * @abstract
  14313. */
  14314. class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode
  14315. {
  14316. /**
  14317. * Constructor
  14318. * @param {string} [name] name of the node
  14319. * @param {number} [texCount]
  14320. * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
  14321. */
  14322. constructor(name = 'keypoints', texCount = 0, portBuilders = [])
  14323. {
  14324. super(name, texCount + 2, portBuilders);
  14325. /** @type {Array<T|null>} keypoints (output) */
  14326. this._keypoints = [];
  14327. /** @type {SpeedyTextureReader} texture reader */
  14328. this._textureReader = new SpeedyTextureReader();
  14329. /** @type {number} page flipping index */
  14330. this._page = 0;
  14331. /** @type {boolean} accelerate GPU-CPU transfers */
  14332. this._turbo = false;
  14333. /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
  14334. this._includeDiscarded = false;
  14335. }
  14336. /**
  14337. * Accelerate GPU-CPU transfers
  14338. * @returns {boolean}
  14339. */
  14340. get turbo()
  14341. {
  14342. return this._turbo;
  14343. }
  14344. /**
  14345. * Accelerate GPU-CPU transfers
  14346. * @param {boolean} value
  14347. */
  14348. set turbo(value)
  14349. {
  14350. this._turbo = Boolean(value);
  14351. }
  14352. /**
  14353. * Should discarded keypoints be exported as null or dropped altogether?
  14354. * @returns {boolean}
  14355. */
  14356. get includeDiscarded()
  14357. {
  14358. return this._includeDiscarded;
  14359. }
  14360. /**
  14361. * Should discarded keypoints be exported as null or dropped altogether?
  14362. * @param {boolean} value
  14363. */
  14364. set includeDiscarded(value)
  14365. {
  14366. this._includeDiscarded = Boolean(value);
  14367. }
  14368. /**
  14369. * Initializes this node
  14370. * @param {SpeedyGPU} gpu
  14371. */
  14372. init(gpu)
  14373. {
  14374. super.init(gpu);
  14375. this._textureReader.init(gpu);
  14376. }
  14377. /**
  14378. * Releases this node
  14379. * @param {SpeedyGPU} gpu
  14380. */
  14381. release(gpu)
  14382. {
  14383. this._textureReader.release(gpu);
  14384. super.release(gpu);
  14385. }
  14386. /**
  14387. * Export data from this node to the user
  14388. * @returns {SpeedyPromise<Array<T|null>>}
  14389. */
  14390. export()
  14391. {
  14392. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(this._keypoints);
  14393. }
  14394. /**
  14395. * Run the specific task of this node
  14396. * @param {SpeedyGPU} gpu
  14397. * @returns {void|SpeedyPromise<void>}
  14398. */
  14399. _run(gpu)
  14400. {
  14401. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14402. return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14403. }
  14404. /**
  14405. * Download and decode keypoints from the GPU
  14406. * @param {SpeedyGPU} gpu
  14407. * @param {SpeedyDrawableTexture} encodedKeypoints
  14408. * @param {number} descriptorSize
  14409. * @param {number} extraSize
  14410. * @param {number} encoderLength
  14411. * @returns {SpeedyPromise<void>}
  14412. */
  14413. _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength)
  14414. {
  14415. const useBufferedDownloads = this._turbo;
  14416. /*
  14417. I have found experimentally that, in Firefox, readPixelsAsync()
  14418. performs MUCH better if the width of the target texture is a power
  14419. of two. I have no idea why this is the case, nor if it's related to
  14420. some interaction with the GL drivers, somehow. This seems to make no
  14421. difference on Chrome, however. In any case, let's convert the input
  14422. texture to POT.
  14423. */
  14424. const encoderWidth = sink_nextPot(encoderLength);
  14425. //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  14426. const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
  14427. //const encoderWidth=encoderLength,encoderHeight=encoderLength;
  14428. // copy the set of keypoints to an internal texture
  14429. const copiedTexture = this._tex[(this._tex.length - 1) - this._page];
  14430. (gpu.programs.utils.copyKeypoints
  14431. .outputs(encoderWidth, encoderHeight, copiedTexture)
  14432. )(encodedKeypoints);
  14433. // flip page
  14434. this._page = 1 - this._page;
  14435. // download the internal texture
  14436. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  14437. // decode the keypoints and store them in this._keypoints
  14438. this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
  14439. });
  14440. }
  14441. /**
  14442. * Decode a sequence of keypoints, given a flattened image of encoded pixels
  14443. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  14444. * @param {number} descriptorSize in bytes
  14445. * @param {number} extraSize in bytes
  14446. * @param {number} encoderWidth
  14447. * @param {number} encoderHeight
  14448. * @returns {Array<T|null>} keypoints
  14449. */
  14450. _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight)
  14451. {
  14452. const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
  14453. const m = globals.LOG2_PYRAMID_MAX_SCALE, h = globals.PYRAMID_MAX_LEVELS;
  14454. const piOver255 = Math.PI / 255.0;
  14455. const keypoints = /** @type {Array<T|null>} */ ( [] );
  14456. const includeDiscarded = this._includeDiscarded;
  14457. let descriptorBytes = ZERO_BYTES, extraBytes = ZERO_BYTES;
  14458. let x, y, z, w, lod, rotation, score;
  14459. let keypoint;
  14460. // validate
  14461. if(descriptorSize % 4 != 0 || extraSize % 4 != 0)
  14462. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
  14463. // how many bytes should we read?
  14464. const e2 = encoderWidth * encoderHeight * 4;
  14465. const size = pixels.byteLength;
  14466. if(size != e2)
  14467. utils/* Utils.warning */.c.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
  14468. // copy the data (we use shared buffers when receiving pixels[])
  14469. if(descriptorSize + extraSize > 0)
  14470. pixels = new Uint8Array(pixels);
  14471. // for each encoded keypoint
  14472. for(let i = 0; i < size; i += bytesPerKeypoint) {
  14473. // extract encoded header
  14474. x = (pixels[i+1] << 8) | pixels[i];
  14475. y = (pixels[i+3] << 8) | pixels[i+2];
  14476. z = (pixels[i+5] << 8) | pixels[i+4];
  14477. w = (pixels[i+7] << 8) | pixels[i+6];
  14478. // the keypoint is "null": we have reached the end of the list
  14479. if(x == 0xFFFF && y == 0xFFFF)
  14480. break;
  14481. // the header is zero: discard the keypoint
  14482. if(x + y + z + w == 0) {
  14483. if(includeDiscarded)
  14484. keypoints.push(null);
  14485. continue;
  14486. }
  14487. // extract extra & descriptor bytes
  14488. if(extraSize > 0) {
  14489. extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
  14490. if(extraBytes.byteLength < extraSize) {
  14491. utils/* Utils.warning */.c.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i/bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
  14492. continue; // something is off here; discard
  14493. }
  14494. }
  14495. if(descriptorSize > 0) {
  14496. descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
  14497. if(descriptorBytes.byteLength < descriptorSize) {
  14498. utils/* Utils.warning */.c.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i/bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
  14499. continue; // something is off here; discard
  14500. }
  14501. }
  14502. // decode position: convert from fixed-point
  14503. x /= globals.FIX_RESOLUTION;
  14504. y /= globals.FIX_RESOLUTION;
  14505. // decode level-of-detail
  14506. lod = (pixels[i+4] < 255) ? -m + ((m + h) * pixels[i+4]) / 255.0 : 0.0;
  14507. // decode orientation
  14508. rotation = (2 * pixels[i+5] - 255) * piOver255;
  14509. // decode score
  14510. score = utils/* Utils.decodeFloat16 */.c.decodeFloat16(w);
  14511. // create keypoint
  14512. keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
  14513. // register keypoint
  14514. keypoints.push(keypoint);
  14515. }
  14516. // done!
  14517. return keypoints;
  14518. }
  14519. /**
  14520. * Instantiate a new keypoint
  14521. * @param {number} x
  14522. * @param {number} y
  14523. * @param {number} lod
  14524. * @param {number} rotation
  14525. * @param {number} score
  14526. * @param {Uint8Array} descriptorBytes
  14527. * @param {Uint8Array} extraBytes
  14528. * @returns {T}
  14529. */
  14530. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14531. {
  14532. throw new utils_errors/* AbstractMethodError */.Mi();
  14533. }
  14534. /**
  14535. * Allocate extra soace
  14536. * @param {SpeedyGPU} gpu
  14537. * @param {SpeedyDrawableTexture} output output texture
  14538. * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
  14539. * @param {number} inputDescriptorSize in bytes, must be positive
  14540. * @param {number} inputExtraSize must be 0
  14541. * @param {number} outputDescriptorSize must be inputDescriptorSize
  14542. * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
  14543. * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
  14544. */
  14545. _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize)
  14546. {
  14547. utils/* Utils.assert */.c.assert(inputExtraSize === 0);
  14548. utils/* Utils.assert */.c.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
  14549. const inputEncoderLength = inputEncodedKeypoints.width;
  14550. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  14551. const outputEncoderCapacity = inputEncoderCapacity;
  14552. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  14553. return (gpu.programs.keypoints.allocateExtra
  14554. .outputs(outputEncoderLength, outputEncoderLength, output)
  14555. )(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  14556. }
  14557. }
  14558. /**
  14559. * Gets standard keypoints out of the pipeline
  14560. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
  14561. */
  14562. class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  14563. {
  14564. /**
  14565. * Constructor
  14566. * @param {string} [name] name of the node
  14567. */
  14568. constructor(name = 'keypoints')
  14569. {
  14570. super(name, 0, [
  14571. InputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14572. ]);
  14573. }
  14574. /**
  14575. * Instantiate a new keypoint
  14576. * @param {number} x
  14577. * @param {number} y
  14578. * @param {number} lod
  14579. * @param {number} rotation
  14580. * @param {number} score
  14581. * @param {Uint8Array} descriptorBytes
  14582. * @param {Uint8Array} extraBytes
  14583. * @returns {SpeedyKeypoint}
  14584. */
  14585. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14586. {
  14587. const descriptorSize = descriptorBytes.byteLength;
  14588. // read descriptor, if any
  14589. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  14590. // create keypoint
  14591. return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
  14592. }
  14593. }
  14594. /**
  14595. * Gets tracked keypoints out of the pipeline
  14596. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
  14597. */
  14598. class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  14599. {
  14600. /**
  14601. * Constructor
  14602. * @param {string} [name] name of the node
  14603. */
  14604. constructor(name = 'keypoints')
  14605. {
  14606. super(name, 2, [
  14607. InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  14608. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  14609. msg.extraSize == 0
  14610. ),
  14611. InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)
  14612. ]);
  14613. }
  14614. /**
  14615. * Run the specific task of this node
  14616. * @param {SpeedyGPU} gpu
  14617. * @returns {void|SpeedyPromise<void>}
  14618. */
  14619. _run(gpu)
  14620. {
  14621. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14622. const { vectors } = /** @type {SpeedyPipelineMessageWith2DVectors} */ ( this.input('flow').read() );
  14623. // allocate extra space
  14624. const newDescriptorSize = descriptorSize;
  14625. const newExtraSize = 4; // 1 pixel per flow vector per keypoint
  14626. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  14627. // attach flow vectors
  14628. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  14629. const newEncodedKeypoints = (gpu.programs.keypoints.transferToExtra
  14630. .outputs(newEncoderLength, newEncoderLength, this._tex[1])
  14631. )(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  14632. // done!
  14633. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  14634. }
  14635. /**
  14636. * Instantiate a new keypoint
  14637. * @param {number} x
  14638. * @param {number} y
  14639. * @param {number} lod
  14640. * @param {number} rotation
  14641. * @param {number} score
  14642. * @param {Uint8Array} descriptorBytes
  14643. * @param {Uint8Array} extraBytes
  14644. * @returns {SpeedyTrackedKeypoint}
  14645. */
  14646. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14647. {
  14648. const descriptorSize = descriptorBytes.byteLength;
  14649. const extraSize = extraBytes.byteLength;
  14650. // read descriptor, if any
  14651. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  14652. // read flow vector
  14653. const fx = utils/* Utils.decodeFloat16 */.c.decodeFloat16((extraBytes[1] << 8) | extraBytes[0]);
  14654. const fy = utils/* Utils.decodeFloat16 */.c.decodeFloat16((extraBytes[3] << 8) | extraBytes[2]);
  14655. const flow = new SpeedyVector2(fx, fy);
  14656. // create keypoint
  14657. return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
  14658. }
  14659. }
  14660. /**
  14661. * Gets matched keypoints out of the pipeline
  14662. * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
  14663. */
  14664. class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink
  14665. {
  14666. /**
  14667. * Constructor
  14668. * @param {string} [name] name of the node
  14669. */
  14670. constructor(name = 'keypoints')
  14671. {
  14672. super(name, 2, [
  14673. InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  14674. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  14675. msg.extraSize == 0
  14676. ),
  14677. InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)
  14678. ]);
  14679. }
  14680. /**
  14681. * Run the specific task of this node
  14682. * @param {SpeedyGPU} gpu
  14683. * @returns {void|SpeedyPromise<void>}
  14684. */
  14685. _run(gpu)
  14686. {
  14687. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14688. const { encodedMatches, matchesPerKeypoint } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */ ( this.input('matches').read() );
  14689. // allocate space for the matches
  14690. const newDescriptorSize = descriptorSize;
  14691. const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
  14692. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  14693. // transfer matches to a new texture
  14694. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  14695. const newEncodedKeypoints = (gpu.programs.keypoints.transferToExtra
  14696. .outputs(newEncoderLength, newEncoderLength, this._tex[1])
  14697. )(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  14698. // done!
  14699. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  14700. }
  14701. /**
  14702. * Instantiate a new keypoint
  14703. * @param {number} x
  14704. * @param {number} y
  14705. * @param {number} lod
  14706. * @param {number} rotation
  14707. * @param {number} score
  14708. * @param {Uint8Array} descriptorBytes
  14709. * @param {Uint8Array} extraBytes
  14710. * @returns {SpeedyMatchedKeypoint}
  14711. */
  14712. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes)
  14713. {
  14714. const descriptorSize = descriptorBytes.byteLength;
  14715. const extraSize = extraBytes.byteLength;
  14716. // read descriptor, if any
  14717. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  14718. // decode matches
  14719. const matchesPerKeypoint = extraSize / 4;
  14720. const matches = /** @type {SpeedyKeypointMatch[]} */ ( new Array(matchesPerKeypoint) );
  14721. for(let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
  14722. const base = matchIndex * 4;
  14723. const u32 = extraBytes[base] | (extraBytes[base+1] << 8) | (extraBytes[base+2] << 16) | (extraBytes[base+3] << 24);
  14724. const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
  14725. matches[matchIndex] = match;
  14726. }
  14727. // done!
  14728. return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
  14729. }
  14730. }
  14731. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
  14732. /*
  14733. * speedy-vision.js
  14734. * GPU-accelerated Computer Vision for JavaScript
  14735. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14736. *
  14737. * Licensed under the Apache License, Version 2.0 (the "License");
  14738. * you may not use this file except in compliance with the License.
  14739. * You may obtain a copy of the License at
  14740. *
  14741. * http://www.apache.org/licenses/LICENSE-2.0
  14742. *
  14743. * Unless required by applicable law or agreed to in writing, software
  14744. * distributed under the License is distributed on an "AS IS" BASIS,
  14745. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14746. * See the License for the specific language governing permissions and
  14747. * limitations under the License.
  14748. *
  14749. * clipper.js
  14750. * Keypoint clipper
  14751. */
  14752. // Constants
  14753. const LOG2_STRIDE = 5;
  14754. const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
  14755. /**
  14756. * Keypoint clipper: filters the best keypoints from a stream
  14757. */
  14758. class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode
  14759. {
  14760. /**
  14761. * Constructor
  14762. * @param {string} [name] name of the node
  14763. */
  14764. constructor(name = undefined)
  14765. {
  14766. super(name, 4, [
  14767. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  14768. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14769. ]);
  14770. /** @type {number} the maximum number of keypoints in the output */
  14771. this._size = MAX_SIZE;
  14772. }
  14773. /**
  14774. * The maximum number of keypoints in the output
  14775. * @returns {number}
  14776. */
  14777. get size()
  14778. {
  14779. return this._size;
  14780. }
  14781. /**
  14782. * The maximum number of keypoints in the output
  14783. * @param {number} size
  14784. */
  14785. set size(size)
  14786. {
  14787. this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
  14788. }
  14789. /**
  14790. * Run the specific task of this node
  14791. * @param {SpeedyGPU} gpu
  14792. * @returns {void|SpeedyPromise<void>}
  14793. */
  14794. _run(gpu)
  14795. {
  14796. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14797. const keypoints = gpu.programs.keypoints;
  14798. const clipValue = this._size;
  14799. const tex = this._tex;
  14800. const outputTexture = this._tex[3];
  14801. // find the minimum power of 2 pot such that pot >= capacity
  14802. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14803. //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
  14804. // find the dimensions of the sorting shaders
  14805. const stride = 1 << LOG2_STRIDE; // must be a power of 2
  14806. //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
  14807. const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
  14808. const numberOfPixels = stride * height;
  14809. // find the dimensions of the output texture
  14810. const newCapacity = Math.min(capacity, clipValue);
  14811. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
  14812. // generate permutation of keypoints
  14813. keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
  14814. let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14815. // sort permutation
  14816. const numPasses = Math.ceil(Math.log2(numberOfPixels));
  14817. keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
  14818. for(let i = 1; i <= numPasses; i++) {
  14819. const blockSize = 1 << i; // 2, 4, 8...
  14820. const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
  14821. permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
  14822. }
  14823. // apply permutation
  14824. keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
  14825. keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
  14826. /*
  14827. // debug (read the contents of the permutation)
  14828. const pixels = permutation.inspect(gpu), debug = [];
  14829. for(let i = 0; i < pixels.length; i += 4) {
  14830. let id = pixels[i] | (pixels[i+1] << 8);
  14831. let score = pixels[i+2] / 255.0;
  14832. let valid = pixels[i+3] / 255.0;
  14833. debug.push([ id, valid, score, ].join(', '));
  14834. }
  14835. console.log(debug);
  14836. */
  14837. // done!
  14838. this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
  14839. }
  14840. }
  14841. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
  14842. /*
  14843. * speedy-vision.js
  14844. * GPU-accelerated Computer Vision for JavaScript
  14845. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14846. *
  14847. * Licensed under the Apache License, Version 2.0 (the "License");
  14848. * you may not use this file except in compliance with the License.
  14849. * You may obtain a copy of the License at
  14850. *
  14851. * http://www.apache.org/licenses/LICENSE-2.0
  14852. *
  14853. * Unless required by applicable law or agreed to in writing, software
  14854. * distributed under the License is distributed on an "AS IS" BASIS,
  14855. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14856. * See the License for the specific language governing permissions and
  14857. * limitations under the License.
  14858. *
  14859. * border-clipper.js
  14860. * Keypoint Border Clipper
  14861. */
  14862. /**
  14863. * The Border Clipper removes all keypoints within a border of the edges of an image
  14864. */
  14865. class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode
  14866. {
  14867. /**
  14868. * Constructor
  14869. * @param {string} [name] name of the node
  14870. */
  14871. constructor(name = undefined)
  14872. {
  14873. super(name, 5, [
  14874. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  14875. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  14876. ]);
  14877. /** @type {SpeedySize} image size, in pixels */
  14878. this._imageSize = new SpeedySize(0,0);
  14879. /** @type {SpeedyVector2} border size, in pixels */
  14880. this._borderSize = new SpeedyVector2(0,0);
  14881. }
  14882. /**
  14883. * Image size, in pixels
  14884. * @returns {SpeedySize}
  14885. */
  14886. get imageSize()
  14887. {
  14888. return this._imageSize;
  14889. }
  14890. /**
  14891. * Image size, in pixels
  14892. * @param {SpeedySize} imageSize
  14893. */
  14894. set imageSize(imageSize)
  14895. {
  14896. this._imageSize = imageSize;
  14897. }
  14898. /**
  14899. * Border size, in pixels
  14900. * @returns {SpeedyVector2}
  14901. */
  14902. get borderSize()
  14903. {
  14904. return this._borderSize;
  14905. }
  14906. /**
  14907. * Border size, in pixels
  14908. * @param {SpeedyVector2} borderSize
  14909. */
  14910. set borderSize(borderSize)
  14911. {
  14912. this._borderSize = borderSize;
  14913. }
  14914. /**
  14915. * Run the specific task of this node
  14916. * @param {SpeedyGPU} gpu
  14917. * @returns {void|SpeedyPromise<void>}
  14918. */
  14919. _run(gpu)
  14920. {
  14921. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  14922. const keypoints = gpu.programs.keypoints;
  14923. const imageSize = this._imageSize;
  14924. const borderSize = this._borderSize;
  14925. const imageWidth = imageSize.width, imageHeight = imageSize.height;
  14926. const borderLeft = borderSize.x, borderRight = borderSize.x;
  14927. const borderTop = borderSize.y, borderBottom = borderSize.y;
  14928. const tex = this._tex;
  14929. // validate
  14930. if(imageWidth == 0 || imageHeight == 0)
  14931. throw new utils_errors/* IllegalOperationError */.js(`BorderClipper: did you forget to set the image size?`);
  14932. // find the capacity of the keypoint stream
  14933. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14934. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  14935. // prepare programs
  14936. keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
  14937. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14938. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  14939. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  14940. // clip keypoints
  14941. let clippedKeypoints = keypoints.clipBorder(
  14942. imageWidth, imageHeight,
  14943. borderTop, borderRight, borderBottom, borderLeft,
  14944. encodedKeypoints, descriptorSize, extraSize, encoderLength
  14945. );
  14946. // sort keypoints
  14947. let sortedKeypoints = keypoints.mixKeypointsInit(
  14948. clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  14949. );
  14950. for(let b = 1; b < capacity; b *= 2)
  14951. sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  14952. clippedKeypoints = keypoints.mixKeypointsApply(
  14953. sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength
  14954. );
  14955. /*
  14956. // debug: view keypoints
  14957. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14958. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  14959. */
  14960. // done!
  14961. this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
  14962. }
  14963. }
  14964. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
  14965. /*
  14966. * speedy-vision.js
  14967. * GPU-accelerated Computer Vision for JavaScript
  14968. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  14969. *
  14970. * Licensed under the Apache License, Version 2.0 (the "License");
  14971. * you may not use this file except in compliance with the License.
  14972. * You may obtain a copy of the License at
  14973. *
  14974. * http://www.apache.org/licenses/LICENSE-2.0
  14975. *
  14976. * Unless required by applicable law or agreed to in writing, software
  14977. * distributed under the License is distributed on an "AS IS" BASIS,
  14978. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14979. * See the License for the specific language governing permissions and
  14980. * limitations under the License.
  14981. *
  14982. * buffer.js
  14983. * Keypoint Buffer
  14984. */
  14985. /**
  14986. * Keypoint Buffer: a node with memory.
  14987. * At time t, it outputs the keypoints received at time t-1
  14988. */
  14989. class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode
  14990. {
  14991. /**
  14992. * Constructor
  14993. * @param {string} [name] name of the node
  14994. */
  14995. constructor(name = undefined)
  14996. {
  14997. super(name, 2, [
  14998. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  14999. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15000. ]);
  15001. /** @type {number} current page: 0 or 1 */
  15002. this._pageIndex = 0;
  15003. /** @type {boolean} first run? */
  15004. this._initialized = false;
  15005. /** @type {number} previous descriptor size, in bytes */
  15006. this._previousDescriptorSize = 0;
  15007. /** @type {number} previous extra size, in bytes */
  15008. this._previousExtraSize = 0;
  15009. /** @type {number} previous encoder length */
  15010. this._previousEncoderLength = 0;
  15011. /** @type {boolean} frozen buffer? */
  15012. this._frozen = false;
  15013. }
  15014. /**
  15015. * A frozen buffer discards the input, effectively increasing the buffering time
  15016. * @returns {boolean}
  15017. */
  15018. get frozen()
  15019. {
  15020. return this._frozen;
  15021. }
  15022. /**
  15023. * A frozen buffer discards the input, effectively increasing the buffering time
  15024. * @param {boolean} value
  15025. */
  15026. set frozen(value)
  15027. {
  15028. this._frozen = Boolean(value);
  15029. }
  15030. /**
  15031. * Releases this node
  15032. * @param {SpeedyGPU} gpu
  15033. */
  15034. release(gpu)
  15035. {
  15036. this._initialized = false;
  15037. super.release(gpu);
  15038. }
  15039. /**
  15040. * Run the specific task of this node
  15041. * @param {SpeedyGPU} gpu
  15042. * @returns {void|SpeedyPromise<void>}
  15043. */
  15044. _run(gpu)
  15045. {
  15046. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15047. const previousDescriptorSize = this._previousDescriptorSize;
  15048. const previousExtraSize = this._previousExtraSize;
  15049. const previousEncoderLength = this._previousEncoderLength;
  15050. const page = this._tex;
  15051. const previousInputTexture = page[1 - this._pageIndex];
  15052. const outputTexture = page[this._pageIndex];
  15053. // bufferize
  15054. if(!this._frozen || !this._initialized) {
  15055. // store input
  15056. this._previousDescriptorSize = descriptorSize;
  15057. this._previousExtraSize = extraSize;
  15058. this._previousEncoderLength = encoderLength;
  15059. previousInputTexture.resize(encoderLength, encoderLength);
  15060. encodedKeypoints.copyTo(previousInputTexture);
  15061. // page flipping
  15062. this._pageIndex = 1 - this._pageIndex;
  15063. }
  15064. // first run?
  15065. if(!this._initialized) {
  15066. this._initialized = true;
  15067. this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
  15068. return;
  15069. }
  15070. // done!
  15071. this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
  15072. }
  15073. }
  15074. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
  15075. /*
  15076. * speedy-vision.js
  15077. * GPU-accelerated Computer Vision for JavaScript
  15078. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15079. *
  15080. * Licensed under the Apache License, Version 2.0 (the "License");
  15081. * you may not use this file except in compliance with the License.
  15082. * You may obtain a copy of the License at
  15083. *
  15084. * http://www.apache.org/licenses/LICENSE-2.0
  15085. *
  15086. * Unless required by applicable law or agreed to in writing, software
  15087. * distributed under the License is distributed on an "AS IS" BASIS,
  15088. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15089. * See the License for the specific language governing permissions and
  15090. * limitations under the License.
  15091. *
  15092. * mixer.js
  15093. * Keypoint Mixer
  15094. */
  15095. /**
  15096. * Keypoint Mixer: merges two sets of keypoints
  15097. */
  15098. class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode
  15099. {
  15100. /**
  15101. * Constructor
  15102. * @param {string} [name] name of the node
  15103. */
  15104. constructor(name = undefined)
  15105. {
  15106. super(name, 5, [
  15107. InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints),
  15108. InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints),
  15109. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15110. ]);
  15111. }
  15112. /**
  15113. * Run the specific task of this node
  15114. * @param {SpeedyGPU} gpu
  15115. * @returns {void|SpeedyPromise<void>}
  15116. */
  15117. _run(gpu)
  15118. {
  15119. const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in0').read() );
  15120. const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in1').read() );
  15121. const descriptorSize = kps0.descriptorSize;
  15122. const extraSize = kps0.extraSize;
  15123. const keypoints = gpu.programs.keypoints;
  15124. const tex = this._tex;
  15125. // ensure that the format of kps0 equals the format of kps1
  15126. if(!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize))
  15127. throw new utils_errors/* IllegalOperationError */.js(`Can't merge two sets of keypoints that have different formats`);
  15128. // find the capacity of kps0 + kps1
  15129. const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
  15130. const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
  15131. const capacity = cap0 + cap1;
  15132. // find the dimensions of the output texture
  15133. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  15134. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15135. // prepare programs
  15136. keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
  15137. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15138. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  15139. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  15140. // mix keypoints
  15141. let mixedKeypoints = keypoints.mixKeypointsPreInit(
  15142. kps0.encodedKeypoints, kps1.encodedKeypoints,
  15143. kps0.encoderLength, kps1.encoderLength,
  15144. cap0, cap1,
  15145. descriptorSize,
  15146. extraSize,
  15147. encoderLength
  15148. );
  15149. let sortedKeypoints = keypoints.mixKeypointsInit(
  15150. mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  15151. );
  15152. for(let b = 1; b < capacity; b *= 2)
  15153. sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  15154. mixedKeypoints = keypoints.mixKeypointsApply(
  15155. sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength
  15156. );
  15157. /*
  15158. // debug: view keypoints
  15159. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  15160. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  15161. */
  15162. this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
  15163. }
  15164. }
  15165. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
  15166. /*
  15167. * speedy-vision.js
  15168. * GPU-accelerated Computer Vision for JavaScript
  15169. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15170. *
  15171. * Licensed under the Apache License, Version 2.0 (the "License");
  15172. * you may not use this file except in compliance with the License.
  15173. * You may obtain a copy of the License at
  15174. *
  15175. * http://www.apache.org/licenses/LICENSE-2.0
  15176. *
  15177. * Unless required by applicable law or agreed to in writing, software
  15178. * distributed under the License is distributed on an "AS IS" BASIS,
  15179. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15180. * See the License for the specific language governing permissions and
  15181. * limitations under the License.
  15182. *
  15183. * shuffler.js
  15184. * Keypoint Shuffler
  15185. */
  15186. /**
  15187. * The Keypoint Shuffler shuffles a list of keypoints
  15188. */
  15189. class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode
  15190. {
  15191. /**
  15192. * Constructor
  15193. * @param {string} [name] name of the node
  15194. */
  15195. constructor(name = undefined)
  15196. {
  15197. super(name, 6, [
  15198. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15199. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15200. ]);
  15201. /** @type {number} maximum number of keypoints */
  15202. this._maxKeypoints = Number.NaN;
  15203. }
  15204. /**
  15205. * Maximum number of keypoints (optional)
  15206. * @returns {number}
  15207. */
  15208. get maxKeypoints()
  15209. {
  15210. return this._maxKeypoints;
  15211. }
  15212. /**
  15213. * Maximum number of keypoints (optional)
  15214. * @param {number} value
  15215. */
  15216. set maxKeypoints(value)
  15217. {
  15218. if(!Number.isNaN(value))
  15219. this._maxKeypoints = Math.max(0, value | 0);
  15220. else
  15221. this._maxKeypoints = Number.NaN;
  15222. }
  15223. /**
  15224. * Run the specific task of this node
  15225. * @param {SpeedyGPU} gpu
  15226. * @returns {void|SpeedyPromise<void>}
  15227. */
  15228. _run(gpu)
  15229. {
  15230. let { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15231. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15232. const maxKeypoints = this._maxKeypoints;
  15233. // shuffle the keypoints (including nulls)
  15234. const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
  15235. const permutationLength = Math.min(permutationMaxLength, capacity);
  15236. const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
  15237. encodedKeypoints = (gpu.programs.keypoints.shuffle
  15238. .setUBO('Permutation', permutation)
  15239. .outputs(encoderLength, encoderLength, this._tex[0])
  15240. )(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15241. // sort the keypoints
  15242. gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
  15243. gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
  15244. gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
  15245. let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(
  15246. encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity
  15247. );
  15248. for(let b = 1; b < capacity; b *= 2)
  15249. sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
  15250. encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(
  15251. sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength
  15252. );
  15253. // clip the output?
  15254. if(!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
  15255. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
  15256. encodedKeypoints = (gpu.programs.keypoints.clip
  15257. .outputs(newEncoderLength, newEncoderLength, this._tex[5])
  15258. )(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
  15259. encoderLength = newEncoderLength;
  15260. }
  15261. // done!
  15262. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15263. }
  15264. /**
  15265. * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
  15266. * @param {number} n positive integer
  15267. * @param {number} [bufsize] size of the output array
  15268. * @returns {Int32Array} permutation
  15269. */
  15270. _generatePermutation(n, bufsize = n)
  15271. {
  15272. const array = new Int32Array(bufsize);
  15273. const p = array.subarray(0, n).fill(-1);
  15274. const q = utils/* Utils.shuffle */.c.shuffle(utils/* Utils.range */.c.range(n));
  15275. for(let i = 0, j = 0; i < n; i++) {
  15276. if(p[i] < 0) {
  15277. do { p[i] = q[j++]; } while(p[i] < i);
  15278. p[p[i]] = i;
  15279. }
  15280. }
  15281. return array; // padded with zeros
  15282. }
  15283. }
  15284. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
  15285. /*
  15286. * speedy-vision.js
  15287. * GPU-accelerated Computer Vision for JavaScript
  15288. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15289. *
  15290. * Licensed under the Apache License, Version 2.0 (the "License");
  15291. * you may not use this file except in compliance with the License.
  15292. * You may obtain a copy of the License at
  15293. *
  15294. * http://www.apache.org/licenses/LICENSE-2.0
  15295. *
  15296. * Unless required by applicable law or agreed to in writing, software
  15297. * distributed under the License is distributed on an "AS IS" BASIS,
  15298. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15299. * See the License for the specific language governing permissions and
  15300. * limitations under the License.
  15301. *
  15302. * multiplexer.js
  15303. * Keypoint multiplexer
  15304. */
  15305. /** @type {string[]} the names of the input ports indexed by their number */
  15306. const multiplexer_INPUT_PORT = [ 'in0', 'in1' ];
  15307. /**
  15308. * Keypoint multiplexer
  15309. */
  15310. class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode
  15311. {
  15312. /**
  15313. * Constructor
  15314. * @param {string} [name] name of the node
  15315. */
  15316. constructor(name = undefined)
  15317. {
  15318. super(name, 0, [
  15319. ...(multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints))),
  15320. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15321. ]);
  15322. /** @type {number} which port should be linked to the output? */
  15323. this._port = 0;
  15324. }
  15325. /**
  15326. * The number of the port that should be linked to the output
  15327. * @returns {number}
  15328. */
  15329. get port()
  15330. {
  15331. return this._port;
  15332. }
  15333. /**
  15334. * The number of the port that should be linked to the output
  15335. * @param {number} port
  15336. */
  15337. set port(port)
  15338. {
  15339. if(port < 0 || port >= multiplexer_INPUT_PORT.length)
  15340. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid port: ${port}`);
  15341. this._port = port | 0;
  15342. }
  15343. /**
  15344. * Run the specific task of this node
  15345. * @param {SpeedyGPU} gpu
  15346. * @returns {void|SpeedyPromise<void>}
  15347. */
  15348. _run(gpu)
  15349. {
  15350. const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
  15351. this.output().write(message);
  15352. }
  15353. }
  15354. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
  15355. /*
  15356. * speedy-vision.js
  15357. * GPU-accelerated Computer Vision for JavaScript
  15358. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15359. *
  15360. * Licensed under the Apache License, Version 2.0 (the "License");
  15361. * you may not use this file except in compliance with the License.
  15362. * You may obtain a copy of the License at
  15363. *
  15364. * http://www.apache.org/licenses/LICENSE-2.0
  15365. *
  15366. * Unless required by applicable law or agreed to in writing, software
  15367. * distributed under the License is distributed on an "AS IS" BASIS,
  15368. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15369. * See the License for the specific language governing permissions and
  15370. * limitations under the License.
  15371. *
  15372. * transformer.js
  15373. * Apply a transformation matrix to a set of keypoints
  15374. */
  15375. /**
  15376. * Apply a transformation matrix to a set of keypoints
  15377. */
  15378. class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode
  15379. {
  15380. /**
  15381. * Constructor
  15382. * @param {string} [name] name of the node
  15383. */
  15384. constructor(name = undefined)
  15385. {
  15386. super(name, 1, [
  15387. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15388. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  15389. ]);
  15390. /** @type {SpeedyMatrix} transformation matrix */
  15391. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  15392. }
  15393. /**
  15394. * Transformation matrix
  15395. * @returns {SpeedyMatrix}
  15396. */
  15397. get transform()
  15398. {
  15399. return this._transform;
  15400. }
  15401. /**
  15402. * Transformation matrix. Must be 3x3
  15403. * @param {SpeedyMatrix} transform
  15404. */
  15405. set transform(transform)
  15406. {
  15407. if(!(transform.rows == 3 && transform.columns == 3))
  15408. throw new utils_errors/* IllegalArgumentError */.mG(`Not a 3x3 transformation matrix: ${transform}`);
  15409. this._transform = transform;
  15410. }
  15411. /**
  15412. * Run the specific task of this node
  15413. * @param {SpeedyGPU} gpu
  15414. * @returns {void|SpeedyPromise<void>}
  15415. */
  15416. _run(gpu)
  15417. {
  15418. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  15419. const outputTexture = this._tex[0];
  15420. const homography = this._transform.read();
  15421. // apply homography
  15422. (gpu.programs.keypoints.applyHomography
  15423. .outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)
  15424. )(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15425. // done!
  15426. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15427. }
  15428. }
  15429. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
  15430. /*
  15431. * speedy-vision.js
  15432. * GPU-accelerated Computer Vision for JavaScript
  15433. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15434. *
  15435. * Licensed under the Apache License, Version 2.0 (the "License");
  15436. * you may not use this file except in compliance with the License.
  15437. * You may obtain a copy of the License at
  15438. *
  15439. * http://www.apache.org/licenses/LICENSE-2.0
  15440. *
  15441. * Unless required by applicable law or agreed to in writing, software
  15442. * distributed under the License is distributed on an "AS IS" BASIS,
  15443. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15444. * See the License for the specific language governing permissions and
  15445. * limitations under the License.
  15446. *
  15447. * subpixel.js
  15448. * Subpixel refinement of keypoint location
  15449. */
  15450. /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
  15451. /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
  15452. const METHOD2PROGRAM = Object.freeze({
  15453. 'quadratic1d': 'subpixelQuadratic1d',
  15454. 'taylor2d': 'subpixelTaylor2d',
  15455. 'bicubic-upsample': 'subpixelBicubic',
  15456. 'bilinear-upsample': 'subpixelBilinear',
  15457. });
  15458. /**
  15459. * Subpixel refinement of keypoint location
  15460. */
  15461. class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode
  15462. {
  15463. /**
  15464. * Constructor
  15465. * @param {string} [name] name of the node
  15466. */
  15467. constructor(name = undefined)
  15468. {
  15469. super(name, 2, [
  15470. InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(
  15471. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15472. msg.format === types/* ImageFormat.GREY */.D3.GREY
  15473. ),
  15474. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints),
  15475. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15476. OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2),
  15477. ]);
  15478. /** @type {SubpixelRefinementMethod} subpixel refinement method */
  15479. this._method = 'quadratic1d';
  15480. /** @type {number} max iterations for the upsampling methods */
  15481. this._maxIterations = 6;
  15482. /** @type {number} convergence threshold for the upsampling methods */
  15483. this._epsilon = 0.1;
  15484. }
  15485. /**
  15486. * Subpixel refinement method
  15487. * @returns {SubpixelRefinementMethod}
  15488. */
  15489. get method()
  15490. {
  15491. return this._method;
  15492. }
  15493. /**
  15494. * Subpixel refinement method
  15495. * @param {SubpixelRefinementMethod} name
  15496. */
  15497. set method(name)
  15498. {
  15499. if(!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name))
  15500. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid method: "${name}"`);
  15501. this._method = name;
  15502. }
  15503. /**
  15504. * Max. iterations for the upsampling methods
  15505. * @returns {number}
  15506. */
  15507. get maxIterations()
  15508. {
  15509. return this._maxIterations;
  15510. }
  15511. /**
  15512. * Max. iterations for the upsampling methods
  15513. * @param {number} value
  15514. */
  15515. set maxIterations(value)
  15516. {
  15517. this._maxIterations = Math.max(0, +value);
  15518. }
  15519. /**
  15520. * Convergence threshold for the upsampling methods
  15521. * @returns {number}
  15522. */
  15523. get epsilon()
  15524. {
  15525. return this._epsilon;
  15526. }
  15527. /**
  15528. * Convergence threshold for the upsampling methods
  15529. * @param {number} value
  15530. */
  15531. set epsilon(value)
  15532. {
  15533. this._epsilon = Math.max(0, +value);
  15534. }
  15535. /**
  15536. * Run the specific task of this node
  15537. * @param {SpeedyGPU} gpu
  15538. * @returns {void|SpeedyPromise<void>}
  15539. */
  15540. _run(gpu)
  15541. {
  15542. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  15543. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('image').read() );
  15544. const tex = this._tex;
  15545. const program = METHOD2PROGRAM[this._method];
  15546. const maxIterations = this._maxIterations;
  15547. const epsilon = this._epsilon;
  15548. // note: if you detected the keypoints using a pyramid,
  15549. // you need to pass that pyramid as input!
  15550. // we'll compute the offsets for each keypoint
  15551. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15552. const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
  15553. const offsets = (gpu.programs.keypoints[program]
  15554. .outputs(offsetEncoderLength, offsetEncoderLength, tex[0])
  15555. )(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
  15556. // apply the offsets to the keypoints
  15557. const refinedKeypoints = (gpu.programs.keypoints.transferFlow
  15558. .outputs(encoderLength, encoderLength, tex[1])
  15559. )(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15560. // done!
  15561. this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
  15562. this.output('displacements').swrite(offsets);
  15563. }
  15564. }
  15565. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
  15566. /*
  15567. * speedy-vision.js
  15568. * GPU-accelerated Computer Vision for JavaScript
  15569. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15570. *
  15571. * Licensed under the Apache License, Version 2.0 (the "License");
  15572. * you may not use this file except in compliance with the License.
  15573. * You may obtain a copy of the License at
  15574. *
  15575. * http://www.apache.org/licenses/LICENSE-2.0
  15576. *
  15577. * Unless required by applicable law or agreed to in writing, software
  15578. * distributed under the License is distributed on an "AS IS" BASIS,
  15579. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15580. * See the License for the specific language governing permissions and
  15581. * limitations under the License.
  15582. *
  15583. * fast.js
  15584. * FAST corner detector
  15585. */
  15586. // Constants
  15587. const DEFAULT_THRESHOLD = 20;
  15588. /**
  15589. * FAST corner detector
  15590. */
  15591. class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector
  15592. {
  15593. /**
  15594. * Constructor
  15595. * @param {string} [name] name of the node
  15596. */
  15597. constructor(name = undefined)
  15598. {
  15599. super(name, 5, [
  15600. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  15601. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15602. msg.format === types/* ImageFormat.GREY */.D3.GREY
  15603. ),
  15604. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15605. ]);
  15606. /** @type {number} FAST threshold in [0,255] */
  15607. this._threshold = DEFAULT_THRESHOLD;
  15608. }
  15609. /**
  15610. * FAST threshold in [0,255]
  15611. * @returns {number}
  15612. */
  15613. get threshold()
  15614. {
  15615. return this._threshold;
  15616. }
  15617. /**
  15618. * FAST threshold in [0,255]
  15619. * @param {number} threshold
  15620. */
  15621. set threshold(threshold)
  15622. {
  15623. this._threshold = Math.max(0, Math.min(threshold | 0, 255));
  15624. }
  15625. /**
  15626. * Run the specific task of this node
  15627. * @param {SpeedyGPU} gpu
  15628. * @returns {void|SpeedyPromise<void>}
  15629. */
  15630. _run(gpu)
  15631. {
  15632. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  15633. const width = image.width, height = image.height;
  15634. const tex = this._tex;
  15635. const capacity = this._capacity;
  15636. const threshold = this._threshold;
  15637. const lodStep = Math.log2(this.scaleFactor);
  15638. const levels = this.levels;
  15639. // validate pyramid
  15640. if(!(levels == 1 || image.hasMipmaps()))
  15641. throw new utils_errors/* IllegalOperationError */.js(`Expected a pyramid in ${this.fullName}`);
  15642. // skip if the capacity is zero
  15643. if(capacity == 0) {
  15644. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
  15645. const encoderLength = encodedKeypoints.width;
  15646. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  15647. return;
  15648. }
  15649. // FAST
  15650. gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
  15651. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
  15652. let corners = tex[1].clear();
  15653. let numPasses = Math.max(1, Math.min(levels, (globals.PYRAMID_MAX_LEVELS / lodStep) | 0));
  15654. for(let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  15655. corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
  15656. //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  15657. }
  15658. // Same-scale non-maximum suppression
  15659. // *nicer results inside the loop; faster outside
  15660. // Hard to notice a difference when using FAST
  15661. corners = gpu.programs.keypoints.nonmaxSpace(corners);
  15662. // Multi-scale non-maximum suppression
  15663. // (doesn't seem to remove many keypoints)
  15664. if(levels > 1) {
  15665. corners = (gpu.programs.keypoints.nonmaxScaleSimple
  15666. .outputs(width, height, tex[1])
  15667. )(corners, image, lodStep);
  15668. }
  15669. // encode keypoints
  15670. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
  15671. const encoderLength = encodedKeypoints.width;
  15672. // scale refinement
  15673. if(levels > 1) {
  15674. encodedKeypoints = (gpu.programs.keypoints.refineScaleFAST916
  15675. .outputs(encoderLength, encoderLength, tex[4])
  15676. )(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
  15677. }
  15678. // done!
  15679. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  15680. }
  15681. }
  15682. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
  15683. /*
  15684. * speedy-vision.js
  15685. * GPU-accelerated Computer Vision for JavaScript
  15686. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15687. *
  15688. * Licensed under the Apache License, Version 2.0 (the "License");
  15689. * you may not use this file except in compliance with the License.
  15690. * You may obtain a copy of the License at
  15691. *
  15692. * http://www.apache.org/licenses/LICENSE-2.0
  15693. *
  15694. * Unless required by applicable law or agreed to in writing, software
  15695. * distributed under the License is distributed on an "AS IS" BASIS,
  15696. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15697. * See the License for the specific language governing permissions and
  15698. * limitations under the License.
  15699. *
  15700. * harris.js
  15701. * Harris corner detector
  15702. */
  15703. /** Window size helper */
  15704. const HARRIS = Object.freeze({
  15705. 1: 'harris1',
  15706. 3: 'harris3',
  15707. 5: 'harris5',
  15708. 7: 'harris7',
  15709. });
  15710. /**
  15711. * Harris corner detector
  15712. */
  15713. class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector
  15714. {
  15715. /**
  15716. * Constructor
  15717. * @param {string} [name] name of the node
  15718. */
  15719. constructor(name = undefined)
  15720. {
  15721. super(name, 6, [
  15722. InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(
  15723. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15724. msg.format === types/* ImageFormat.GREY */.D3.GREY
  15725. ),
  15726. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15727. ]);
  15728. /** @type {SpeedySize} neighborhood size */
  15729. this._windowSize = new SpeedySize(3, 3);
  15730. /** @type {number} min corner quality in [0,1] */
  15731. this._quality = 0.1;
  15732. }
  15733. /**
  15734. * Minimum corner quality in [0,1] - this is a fraction of
  15735. * the largest min. eigenvalue of the autocorrelation matrix
  15736. * over the entire image
  15737. * @returns {number}
  15738. */
  15739. get quality()
  15740. {
  15741. return this._quality;
  15742. }
  15743. /**
  15744. * Minimum corner quality in [0,1]
  15745. * @param {number} quality
  15746. */
  15747. set quality(quality)
  15748. {
  15749. this._quality = Math.max(0.0, Math.min(+quality, 1.0));
  15750. }
  15751. /**
  15752. * Neighborhood size
  15753. * @returns {SpeedySize}
  15754. */
  15755. get windowSize()
  15756. {
  15757. return this._windowSize;
  15758. }
  15759. /**
  15760. * Neighborhood size
  15761. * @param {SpeedySize} windowSize
  15762. */
  15763. set windowSize(windowSize)
  15764. {
  15765. const d = windowSize.width;
  15766. if(!((d == windowSize.height) && (d == 1 || d == 3 || d == 5 || d == 7)))
  15767. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
  15768. this._windowSize = windowSize;
  15769. }
  15770. /**
  15771. * Run the specific task of this node
  15772. * @param {SpeedyGPU} gpu
  15773. * @returns {void|SpeedyPromise<void>}
  15774. */
  15775. _run(gpu)
  15776. {
  15777. const { image, format } = /** @type {SpeedyPipelineMessageWithImage} */ ( this.input().read() );
  15778. const width = image.width, height = image.height;
  15779. const capacity = this._capacity;
  15780. const quality = this._quality;
  15781. const windowSize = this._windowSize.width;
  15782. const levels = this.levels;
  15783. const lodStep = Math.log2(this.scaleFactor);
  15784. const intFactor = levels > 1 ? this.scaleFactor : 1;
  15785. const harris = gpu.programs.keypoints[HARRIS[windowSize]];
  15786. const tex = this._tex;
  15787. // validate pyramid
  15788. if(!(levels == 1 || image.hasMipmaps()))
  15789. throw new utils_errors/* IllegalOperationError */.js(`Expected a pyramid in ${this.fullName}`);
  15790. // skip if the capacity is zero
  15791. if(capacity == 0) {
  15792. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
  15793. const encoderLength = encodedKeypoints.width;
  15794. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  15795. return;
  15796. }
  15797. // compute corner response map
  15798. harris.outputs(width, height, tex[0], tex[1]);
  15799. gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
  15800. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
  15801. let corners = tex[1].clear();
  15802. let numPasses = Math.max(1, Math.min(levels, (globals.PYRAMID_MAX_LEVELS / lodStep) | 0));
  15803. for(let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  15804. const gaussian = utils/* Utils.gaussianKernel */.c.gaussianKernel(intFactor * (1 + lod), windowSize);
  15805. const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
  15806. corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
  15807. corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  15808. }
  15809. // Same-scale non-maximum suppression
  15810. // *performs better inside the loop
  15811. //corners = gpu.programs.keypoints.nonmaxSpace(corners);
  15812. // Multi-scale non-maximum suppression
  15813. // (doesn't seem to remove many keypoints)
  15814. if(levels > 1) {
  15815. const laplacian = (gpu.programs.keypoints.laplacian
  15816. .outputs(width, height, tex[0])
  15817. )(corners, image, lodStep, 0);
  15818. corners = (gpu.programs.keypoints.nonmaxScale
  15819. .outputs(width, height, tex[2])
  15820. )(corners, image, laplacian, lodStep);
  15821. }
  15822. // find the maximum corner response over the entire image
  15823. gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
  15824. numPasses = Math.ceil(Math.log2(Math.max(width, height)));
  15825. let maxScore = corners;
  15826. for(let j = 0; j < numPasses; j++)
  15827. maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
  15828. // discard corners below a quality level
  15829. corners = (gpu.programs.keypoints.harrisScoreCutoff
  15830. .outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])
  15831. )(corners, maxScore, quality);
  15832. // encode keypoints
  15833. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
  15834. const encoderLength = encodedKeypoints.width;
  15835. // scale refinement
  15836. if(levels > 1) {
  15837. encodedKeypoints = (gpu.programs.keypoints.refineScaleLoG
  15838. .outputs(encoderLength, encoderLength, tex[5])
  15839. )(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
  15840. }
  15841. // done!
  15842. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  15843. }
  15844. }
  15845. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
  15846. /*
  15847. * speedy-vision.js
  15848. * GPU-accelerated Computer Vision for JavaScript
  15849. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15850. *
  15851. * Licensed under the Apache License, Version 2.0 (the "License");
  15852. * you may not use this file except in compliance with the License.
  15853. * You may obtain a copy of the License at
  15854. *
  15855. * http://www.apache.org/licenses/LICENSE-2.0
  15856. *
  15857. * Unless required by applicable law or agreed to in writing, software
  15858. * distributed under the License is distributed on an "AS IS" BASIS,
  15859. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15860. * See the License for the specific language governing permissions and
  15861. * limitations under the License.
  15862. *
  15863. * descriptor.js
  15864. * Abstract keypoint descriptor
  15865. */
  15866. /**
  15867. * Abstract keypoint descriptor
  15868. * @abstract
  15869. */
  15870. class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode
  15871. {
  15872. /**
  15873. * Constructor
  15874. * @param {string} [name] name of the node
  15875. * @param {number} [texCount] number of work textures
  15876. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  15877. */
  15878. constructor(name = undefined, texCount = 0, portBuilders = undefined)
  15879. {
  15880. super(name, texCount + 1, portBuilders);
  15881. }
  15882. /**
  15883. *
  15884. * Allocate space for keypoint descriptors
  15885. * @param {SpeedyGPU} gpu
  15886. * @param {number} inputDescriptorSize should be 0
  15887. * @param {number} inputExtraSize must be non-negative
  15888. * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
  15889. * @param {number} outputExtraSize must be inputExtraSize
  15890. * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
  15891. * @returns {SpeedyDrawableTexture} encodedKeypoints
  15892. */
  15893. _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints)
  15894. {
  15895. utils/* Utils.assert */.c.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
  15896. utils/* Utils.assert */.c.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
  15897. const inputEncoderLength = inputEncodedKeypoints.width;
  15898. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  15899. const outputEncoderCapacity = inputEncoderCapacity;
  15900. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  15901. const tex = this._tex[this._tex.length - 1];
  15902. return (gpu.programs.keypoints.allocateDescriptors
  15903. .outputs(outputEncoderLength, outputEncoderLength, tex)
  15904. )(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  15905. }
  15906. }
  15907. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
  15908. /*
  15909. * speedy-vision.js
  15910. * GPU-accelerated Computer Vision for JavaScript
  15911. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15912. *
  15913. * Licensed under the Apache License, Version 2.0 (the "License");
  15914. * you may not use this file except in compliance with the License.
  15915. * You may obtain a copy of the License at
  15916. *
  15917. * http://www.apache.org/licenses/LICENSE-2.0
  15918. *
  15919. * Unless required by applicable law or agreed to in writing, software
  15920. * distributed under the License is distributed on an "AS IS" BASIS,
  15921. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15922. * See the License for the specific language governing permissions and
  15923. * limitations under the License.
  15924. *
  15925. * orb.js
  15926. * ORB descriptors
  15927. */
  15928. // Constants
  15929. const DESCRIPTOR_SIZE = 32; // 256 bits
  15930. /**
  15931. * ORB descriptors
  15932. */
  15933. class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor
  15934. {
  15935. /**
  15936. * Constructor
  15937. * @param {string} [name] name of the node
  15938. */
  15939. constructor(name = undefined)
  15940. {
  15941. super(name, 3, [
  15942. InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(
  15943. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  15944. msg.format === types/* ImageFormat.GREY */.D3.GREY
  15945. ),
  15946. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints),
  15947. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  15948. ]);
  15949. }
  15950. /**
  15951. * Run the specific task of this node
  15952. * @param {SpeedyGPU} gpu
  15953. * @returns {void|SpeedyPromise<void>}
  15954. */
  15955. _run(gpu)
  15956. {
  15957. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  15958. const image = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('image').read() ) ).image;
  15959. const tex = this._tex;
  15960. const outputTexture = this._tex[2];
  15961. // compute orientation
  15962. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15963. const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
  15964. const encodedOrientations = (gpu.programs.keypoints.orbOrientation
  15965. .outputs(orientationEncoderLength, orientationEncoderLength, tex[0])
  15966. )(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15967. const orientedKeypoints = (gpu.programs.keypoints.transferOrientation
  15968. .outputs(encoderLength, encoderLength, tex[1])
  15969. )(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15970. // allocate space
  15971. const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
  15972. const newEncoderLength = encodedKps.width;
  15973. // compute descriptors (it's a good idea to blur the image)
  15974. const describedKeypoints = (gpu.programs.keypoints.orbDescriptor
  15975. .outputs(newEncoderLength, newEncoderLength, outputTexture)
  15976. )(image, encodedKps, extraSize, newEncoderLength);
  15977. // done!
  15978. this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
  15979. }
  15980. }
  15981. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
  15982. /*
  15983. * speedy-vision.js
  15984. * GPU-accelerated Computer Vision for JavaScript
  15985. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  15986. *
  15987. * Licensed under the Apache License, Version 2.0 (the "License");
  15988. * you may not use this file except in compliance with the License.
  15989. * You may obtain a copy of the License at
  15990. *
  15991. * http://www.apache.org/licenses/LICENSE-2.0
  15992. *
  15993. * Unless required by applicable law or agreed to in writing, software
  15994. * distributed under the License is distributed on an "AS IS" BASIS,
  15995. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15996. * See the License for the specific language governing permissions and
  15997. * limitations under the License.
  15998. *
  15999. * lk.js
  16000. * LK optical-flow
  16001. */
  16002. // Constants
  16003. const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
  16004. const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
  16005. const DEFAULT_NUMBER_OF_ITERATIONS = 30;
  16006. const DEFAULT_DISCARD_THRESHOLD = 0.0001;
  16007. const DEFAULT_EPSILON = 0.01;
  16008. const LK_PROGRAM = {
  16009. 3: 'lk3',
  16010. 5: 'lk5',
  16011. 7: 'lk7',
  16012. 9: 'lk9',
  16013. 11: 'lk11',
  16014. 13: 'lk13',
  16015. 15: 'lk15',
  16016. 17: 'lk17',
  16017. 19: 'lk19',
  16018. 21: 'lk21',
  16019. };
  16020. /**
  16021. * LK optical-flow
  16022. */
  16023. class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode
  16024. {
  16025. /**
  16026. * Constructor
  16027. * @param {string} [name] name of the node
  16028. */
  16029. constructor(name = undefined)
  16030. {
  16031. super(name, 3, [
  16032. InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(
  16033. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16034. msg.format === types/* ImageFormat.GREY */.D3.GREY
  16035. ),
  16036. InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(
  16037. ( /** @type {SpeedyPipelineMessageWithImage} */ msg ) =>
  16038. msg.format === types/* ImageFormat.GREY */.D3.GREY
  16039. ),
  16040. InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints),
  16041. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16042. OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2),
  16043. ]);
  16044. /** @type {SpeedySize} window size */
  16045. this._windowSize = DEFAULT_WINDOW_SIZE;
  16046. /** @type {number} number of pyramid levels to use */
  16047. this._levels = DEFAULT_DEPTH;
  16048. /** @type {number} minimum acceptable corner response */
  16049. this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
  16050. /** @type {number} number of iterations per pyramid level (termination criteria) */
  16051. this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
  16052. /** @type {number} minimum increment per iteration (termination criteria) */
  16053. this._epsilon = DEFAULT_EPSILON;
  16054. }
  16055. /**
  16056. * Window size (use odd numbers)
  16057. * @returns {SpeedySize}
  16058. */
  16059. get windowSize()
  16060. {
  16061. return this._windowSize;
  16062. }
  16063. /**
  16064. * Window size (use odd numbers)
  16065. * @param {SpeedySize} windowSize must be a square window
  16066. */
  16067. set windowSize(windowSize)
  16068. {
  16069. if(windowSize.width != windowSize.height) {
  16070. throw new utils_errors/* NotSupportedError */.B8(`LK: window ${this._windowSize.toString()} is not square!`);
  16071. }
  16072. else if(!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
  16073. const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a,b) => a-b).map(k => k+'x'+k).join(', ');
  16074. throw new utils_errors/* NotSupportedError */.B8(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
  16075. }
  16076. this._windowSize = windowSize;
  16077. }
  16078. /**
  16079. * Number of pyramid levels to use
  16080. * @returns {number}
  16081. */
  16082. get levels()
  16083. {
  16084. return this._levels;
  16085. }
  16086. /**
  16087. * Number of pyramid levels to use
  16088. * @param {number} levels
  16089. */
  16090. set levels(levels)
  16091. {
  16092. utils/* Utils.assert */.c.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
  16093. this._levels = levels | 0;
  16094. }
  16095. /**
  16096. * Get the discard threshold, used to discard "bad" keypoints
  16097. * @returns {number}
  16098. */
  16099. get discardThreshold()
  16100. {
  16101. return this._discardThreshold;
  16102. }
  16103. /**
  16104. * Set the discard threshold, used to discard "bad" keypoints
  16105. * @param {number} value typically 10^(-4) - increase to discard more
  16106. */
  16107. set discardThreshold(value)
  16108. {
  16109. utils/* Utils.assert */.c.assert(value >= 0);
  16110. this._discardThreshold = +value;
  16111. }
  16112. /**
  16113. * Get the maximum number of iterations of the pyramidal LK algorithm
  16114. * @returns {number}
  16115. */
  16116. get numberOfIterations()
  16117. {
  16118. return this._numberOfIterations;
  16119. }
  16120. /**
  16121. * Set the maximum number of iterations of the pyramidal LK algorithm
  16122. * @param {number} value
  16123. */
  16124. set numberOfIterations(value)
  16125. {
  16126. utils/* Utils.assert */.c.assert(value >= 1);
  16127. this._numberOfIterations = value | 0;
  16128. }
  16129. /**
  16130. * Get the accuracy threshold, used to stop LK iterations
  16131. * @returns {number}
  16132. */
  16133. get epsilon()
  16134. {
  16135. return this._epsilon;
  16136. }
  16137. /**
  16138. * Get the accuracy threshold, used to stop LK iterations
  16139. * @param {number} value typically 0.01
  16140. */
  16141. set epsilon(value)
  16142. {
  16143. utils/* Utils.assert */.c.assert(value >= 0);
  16144. this._epsilon = +value;
  16145. }
  16146. /**
  16147. * Run the specific task of this node
  16148. * @param {SpeedyGPU} gpu
  16149. * @returns {void|SpeedyPromise<void>}
  16150. */
  16151. _run(gpu)
  16152. {
  16153. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('previousKeypoints').read() );
  16154. const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('previousImage').read() )).image;
  16155. const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */ ( this.input('nextImage').read() )).image;
  16156. const previousKeypoints = encodedKeypoints;
  16157. const levels = this._levels;
  16158. const windowSize = this._windowSize;
  16159. const wsize = windowSize.width; // square window
  16160. const numberOfIterations = this._numberOfIterations;
  16161. const discardThreshold = this._discardThreshold;
  16162. const epsilon = this._epsilon;
  16163. const keypoints = gpu.programs.keypoints;
  16164. const tex = this._tex;
  16165. // do we need a pyramid?
  16166. if(!(levels == 1 || (previousImage.hasMipmaps() && nextImage.hasMipmaps())))
  16167. throw new utils_errors/* IllegalOperationError */.js(`LK: a pyramid is required if levels > 1`);
  16168. else if(previousImage.width !== nextImage.width || previousImage.height !== nextImage.height)
  16169. throw new utils_errors/* IllegalOperationError */.js(`LK: can't use input images of different size`);
  16170. // select the appropriate program
  16171. const lk = keypoints[LK_PROGRAM[wsize]];
  16172. // find the dimensions of the flow texture (1 pixel per flow vector)
  16173. const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16174. const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
  16175. lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
  16176. // compute optical-flow
  16177. let flow = lk.clear();
  16178. for(let lod = levels - 1; lod >= 0; lod--)
  16179. flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
  16180. // transfer optical-flow to nextKeypoints
  16181. keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
  16182. const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
  16183. // done!
  16184. this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
  16185. this.output('flow').swrite(flow);
  16186. }
  16187. }
  16188. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
  16189. /*
  16190. * speedy-vision.js
  16191. * GPU-accelerated Computer Vision for JavaScript
  16192. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16193. *
  16194. * Licensed under the Apache License, Version 2.0 (the "License");
  16195. * you may not use this file except in compliance with the License.
  16196. * You may obtain a copy of the License at
  16197. *
  16198. * http://www.apache.org/licenses/LICENSE-2.0
  16199. *
  16200. * Unless required by applicable law or agreed to in writing, software
  16201. * distributed under the License is distributed on an "AS IS" BASIS,
  16202. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16203. * See the License for the specific language governing permissions and
  16204. * limitations under the License.
  16205. *
  16206. * lsh-static-tables.js
  16207. * Static LSH tables
  16208. */
  16209. /**
  16210. * Static LSH tables
  16211. */
  16212. class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode
  16213. {
  16214. /**
  16215. * Constructor
  16216. * @param {string} [name] name of the node
  16217. */
  16218. constructor(name = undefined)
  16219. {
  16220. super(name, 2, [
  16221. OutputPort().expects(SpeedyPipelineMessageType.LSHTables)
  16222. ]);
  16223. /** @type {SpeedyKeypoint[]} "training" keypoints */
  16224. this._keypoints = [];
  16225. /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
  16226. this._keypointsCopy = [];
  16227. /** @type {number} number of tables in the LSH data structure */
  16228. this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
  16229. /** @type {number} number of bits of a hash */
  16230. this._hashSize = LSH_DEFAULT_HASH_SIZE;
  16231. /** @type {SpeedyLSH|null} LSH data structure */
  16232. this._lsh = null;
  16233. }
  16234. /**
  16235. * "Training" keypoints
  16236. * @returns {SpeedyKeypoint[]}
  16237. */
  16238. get keypoints()
  16239. {
  16240. return this._keypoints;
  16241. }
  16242. /**
  16243. * "Training" keypoints
  16244. * @param {SpeedyKeypoint[]} keypoints
  16245. */
  16246. set keypoints(keypoints)
  16247. {
  16248. if(!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint)))
  16249. throw new utils_errors/* IllegalArgumentError */.mG(`Static LSH tables: an invalid set of keypoints has been provided`);
  16250. if(this._keypoints !== keypoints) {
  16251. this._keypoints = keypoints; // update internal pointer
  16252. this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
  16253. this._lsh = null; // (re)train the model
  16254. }
  16255. }
  16256. /**
  16257. * Number of tables in the LSH data structure
  16258. * @returns {number}
  16259. */
  16260. get numberOfTables()
  16261. {
  16262. return this._numberOfTables;
  16263. }
  16264. /**
  16265. * Number of tables in the LSH data structure
  16266. * @param {number} n
  16267. */
  16268. set numberOfTables(n)
  16269. {
  16270. if(!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n))
  16271. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
  16272. if(n !== this._numberOfTables) {
  16273. this._numberOfTables = n | 0;
  16274. this._lsh = null; // need to retrain the model
  16275. }
  16276. }
  16277. /**
  16278. * Number of bits of a hash
  16279. * @returns {number}
  16280. */
  16281. get hashSize()
  16282. {
  16283. return this._hashSize;
  16284. }
  16285. /**
  16286. * Number of bits of a hash
  16287. * @param {number} h
  16288. */
  16289. set hashSize(h)
  16290. {
  16291. if(!LSH_ACCEPTABLE_HASH_SIZES.includes(h))
  16292. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
  16293. if(h !== this._hashSize) {
  16294. this._hashSize = h | 0;
  16295. this._lsh = null; // need to retrain the model
  16296. }
  16297. }
  16298. /**
  16299. * Run the specific task of this node
  16300. * @param {SpeedyGPU} gpu
  16301. * @returns {void|SpeedyPromise<void>}
  16302. */
  16303. _run(gpu)
  16304. {
  16305. // Need to train the model?
  16306. if(this._lsh == null) {
  16307. // internal work textures are only available after initialization,
  16308. // i.e., after calling this._init()
  16309. this._lsh = this._train();
  16310. }
  16311. // Pass it forward
  16312. this.output().swrite(this._lsh);
  16313. }
  16314. /**
  16315. * Train the model
  16316. * @returns {SpeedyLSH}
  16317. */
  16318. _train()
  16319. {
  16320. const keypoints = this._keypointsCopy;
  16321. const numberOfTables = this._numberOfTables;
  16322. const hashSize = this._hashSize;
  16323. if(keypoints.find(keypoint => keypoint.descriptor == null))
  16324. throw new utils_errors/* IllegalOperationError */.js(`Static LSH tables: can't train the model with no keypoint descriptors!`);
  16325. const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
  16326. const lshTables = this._tex[0];
  16327. const descriptorDB = this._tex[1];
  16328. return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
  16329. }
  16330. }
  16331. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
  16332. /*
  16333. * speedy-vision.js
  16334. * GPU-accelerated Computer Vision for JavaScript
  16335. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16336. *
  16337. * Licensed under the Apache License, Version 2.0 (the "License");
  16338. * you may not use this file except in compliance with the License.
  16339. * You may obtain a copy of the License at
  16340. *
  16341. * http://www.apache.org/licenses/LICENSE-2.0
  16342. *
  16343. * Unless required by applicable law or agreed to in writing, software
  16344. * distributed under the License is distributed on an "AS IS" BASIS,
  16345. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16346. * See the License for the specific language governing permissions and
  16347. * limitations under the License.
  16348. *
  16349. * lsh-knn.js
  16350. * K approximate nearest neighbors matcher
  16351. */
  16352. /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
  16353. /** @type {number} how many neighbors to search for, by default */
  16354. const DEFAULT_K = 1;
  16355. /** @type {LSHKNNQualityLevel} default quality level */
  16356. const DEFAULT_QUALITY = 'default';
  16357. /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
  16358. const NUMBER_OF_BIT_SWAPS = {
  16359. 'fastest': 0,
  16360. 'default': 1,
  16361. 'demanding': 2,
  16362. };
  16363. /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
  16364. const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o,d) => ((o[d] = fd(d)), o), {}))(
  16365. d => ((fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o,h) => ((o[h] = fh(h)), o), {}))(
  16366. h => ((fl => [0,1,2].reduce((o,l) => ((o[l] = fl(l)), o), {}))(
  16367. l => `lshKnn${d}h${h}lv${l}`
  16368. ))
  16369. ))
  16370. );
  16371. /**
  16372. * K approximate nearest neighbors matcher
  16373. */
  16374. class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode
  16375. {
  16376. /**
  16377. * Constructor
  16378. * @param {string} [name] name of the node
  16379. */
  16380. constructor(name = undefined)
  16381. {
  16382. super(name, 6, [
  16383. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16384. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16385. msg.descriptorSize > 0
  16386. ),
  16387. InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables),
  16388. OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches),
  16389. ]);
  16390. /** @type {number} how many neighbors do you want? */
  16391. this._k = DEFAULT_K;
  16392. /** @type {LSHKNNQualityLevel} quality of the matching */
  16393. this._quality = DEFAULT_QUALITY;
  16394. }
  16395. /**
  16396. * How many neighbors do you want?
  16397. * @returns {number}
  16398. */
  16399. get k()
  16400. {
  16401. return this._k;
  16402. }
  16403. /**
  16404. * How many neighbors do you want?
  16405. * @param {number} k number of neighbors
  16406. */
  16407. set k(k)
  16408. {
  16409. this._k = Math.max(1, k | 0);
  16410. }
  16411. /**
  16412. * Quality of the matching
  16413. * @returns {LSHKNNQualityLevel}
  16414. */
  16415. get quality()
  16416. {
  16417. return this._quality;
  16418. }
  16419. /**
  16420. * Quality of the matching
  16421. * @param {LSHKNNQualityLevel} quality
  16422. */
  16423. set quality(quality)
  16424. {
  16425. if(!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality))
  16426. throw new utils_errors/* IllegalArgumentError */.mG(`Invalid quality level: "${quality}"`);
  16427. this._quality = quality;
  16428. }
  16429. /**
  16430. * Run the specific task of this node
  16431. * @param {SpeedyGPU} gpu
  16432. * @returns {void|SpeedyPromise<void>}
  16433. */
  16434. _run(gpu)
  16435. {
  16436. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  16437. /** @type {SpeedyLSH} */ const lsh = this.input('lsh').read().lsh;
  16438. const keypoints = gpu.programs.keypoints;
  16439. const tables = lsh.tables;
  16440. const descriptorDB = lsh.descriptorDB;
  16441. const tablesStride = tables.width;
  16442. const descriptorDBStride = descriptorDB.width;
  16443. const tableCount = lsh.tableCount;
  16444. const hashSize = lsh.hashSize;
  16445. const bucketCapacity = lsh.bucketCapacity;
  16446. const bucketsPerTable = lsh.bucketsPerTable;
  16447. const sequences = lsh.sequences;
  16448. const candidatesA = this._tex[0];
  16449. const candidatesB = this._tex[1];
  16450. const candidatesC = this._tex[2];
  16451. const filters = this._tex[3];
  16452. const transferA = this._tex[4];
  16453. const transferB = this._tex[5];
  16454. const level = NUMBER_OF_BIT_SWAPS[this._quality];
  16455. const matchesPerKeypoint = this._k;
  16456. // validate parameters
  16457. if(descriptorSize !== lsh.descriptorSize)
  16458. throw new utils_errors/* IllegalArgumentError */.mG(`Can't match different types of descriptors in ${this.fullName}`);
  16459. utils/* Utils.assert */.c.assert(LSH_KNN[descriptorSize] != undefined);
  16460. utils/* Utils.assert */.c.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
  16461. utils/* Utils.assert */.c.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
  16462. // configure the output texture
  16463. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16464. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  16465. let encodedMatches = transferB;
  16466. keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
  16467. // prepare the LSH matching
  16468. const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  16469. keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
  16470. keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
  16471. const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
  16472. lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
  16473. lshKnn.setUBO('LSHSequences', sequences);
  16474. // match keypoints
  16475. encodedMatches.clear();
  16476. keypoints.lshKnnInitFilters();
  16477. for(let i = 0; i < matchesPerKeypoint; i++) {
  16478. // find the (i+1)-th best match
  16479. let candidates = keypoints.lshKnnInitCandidates();
  16480. for(let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
  16481. candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  16482. gpu.gl.flush();
  16483. }
  16484. candidates.copyTo(filters);
  16485. // transfer matches to an encoded matches texture
  16486. encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
  16487. }
  16488. // done
  16489. this.output().swrite(encodedMatches, matchesPerKeypoint);
  16490. /*
  16491. // debug
  16492. let data = filters.inspect32(gpu), debug = [];
  16493. for(let i = 0; i < data.length; i++) {
  16494. const bits = MATCH_INDEX_BITS;
  16495. const mask = (1 << bits) - 1;
  16496. const u32 = data[i];
  16497. const index = u32 & mask, distance = u32 >>> bits;
  16498. //debug.push('|'+[ u32 ].toString());
  16499. debug.push('|'+[ index, distance ].toString());
  16500. }
  16501. console.log(debug.join(','));
  16502. */
  16503. }
  16504. }
  16505. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
  16506. /*
  16507. * speedy-vision.js
  16508. * GPU-accelerated Computer Vision for JavaScript
  16509. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16510. *
  16511. * Licensed under the Apache License, Version 2.0 (the "License");
  16512. * you may not use this file except in compliance with the License.
  16513. * You may obtain a copy of the License at
  16514. *
  16515. * http://www.apache.org/licenses/LICENSE-2.0
  16516. *
  16517. * Unless required by applicable law or agreed to in writing, software
  16518. * distributed under the License is distributed on an "AS IS" BASIS,
  16519. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16520. * See the License for the specific language governing permissions and
  16521. * limitations under the License.
  16522. *
  16523. * bf-knn.js
  16524. * Brute Force KNN Keypoint Matcher
  16525. */
  16526. /** @type {Object<number,string>} program name indexed by descriptor size */
  16527. const PROGRAM_NAME = {
  16528. 32: 'bfMatcher32',
  16529. 64: 'bfMatcher64',
  16530. };
  16531. /**
  16532. * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
  16533. * invoking this (use a database of 50 keypoints or so - your mileage may vary)
  16534. */
  16535. class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode
  16536. {
  16537. /**
  16538. * Constructor
  16539. * @param {string} [name] name of the node
  16540. */
  16541. constructor(name = undefined)
  16542. {
  16543. super(name, 6, [
  16544. InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16545. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16546. msg.descriptorSize > 0
  16547. ),
  16548. InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16549. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16550. msg.descriptorSize > 0
  16551. ),
  16552. OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches),
  16553. ]);
  16554. /** @type {number} number of matches per keypoint (the "k" of knn) */
  16555. this._matchesPerKeypoint = 1;
  16556. }
  16557. /**
  16558. * Number of matches per keypoint
  16559. * @returns {number}
  16560. */
  16561. get k()
  16562. {
  16563. return this._matchesPerKeypoint;
  16564. }
  16565. /**
  16566. * Number of matches per keypoint
  16567. * @param {number} value
  16568. */
  16569. set k(value)
  16570. {
  16571. this._matchesPerKeypoint = Math.max(1, value | 0);
  16572. }
  16573. /**
  16574. * Run the specific task of this node
  16575. * @param {SpeedyGPU} gpu
  16576. * @returns {void|SpeedyPromise<void>}
  16577. */
  16578. _run(gpu)
  16579. {
  16580. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('keypoints').read() );
  16581. const database = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('database').read() );
  16582. const candidatesA = this._tex[0];
  16583. const candidatesB = this._tex[1];
  16584. const candidatesC = this._tex[2];
  16585. const encodedFiltersA = this._tex[3];
  16586. const encodedMatchesA = this._tex[4];
  16587. const encodedMatchesB = this._tex[5];
  16588. const matchesPerKeypoint = this._matchesPerKeypoint;
  16589. const keypoints = gpu.programs.keypoints;
  16590. // validate parameters
  16591. if(descriptorSize !== database.descriptorSize)
  16592. throw new utils_errors/* IllegalArgumentError */.mG(`Incompatible descriptors in ${this.fullName}`);
  16593. else if(!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize))
  16594. throw new utils_errors/* NotSupportedError */.B8(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
  16595. // prepare the brute force matching
  16596. const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
  16597. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  16598. const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
  16599. const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
  16600. const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
  16601. const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  16602. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  16603. keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
  16604. keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
  16605. keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
  16606. bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
  16607. // match keypoints
  16608. let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
  16609. let encodedFilters = keypoints.bfMatcherInitFilters();
  16610. for(let k = 0; k < matchesPerKeypoint; k++) {
  16611. let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
  16612. // find the (k+1)-th best match
  16613. for(let passId = 0; passId < numberOfPasses; passId++) {
  16614. encodedPartialMatches = bfMatcher(
  16615. encodedPartialMatches, encodedFilters, partialMatcherLength,
  16616. database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength,
  16617. encodedKeypoints, descriptorSize, extraSize, encoderLength,
  16618. passId
  16619. );
  16620. gpu.gl.flush();
  16621. }
  16622. //gpu.gl.flush();
  16623. // copy the (k+1)-th best match to the filter
  16624. if(matchesPerKeypoint > 1)
  16625. encodedPartialMatches.copyTo(encodedFilters);
  16626. // aggregate matches
  16627. encodedMatches = keypoints.bfMatcherTransfer(
  16628. encodedMatches, encodedPartialMatches, matchesPerKeypoint, k
  16629. );
  16630. }
  16631. // done!
  16632. this.output().swrite(encodedMatches, matchesPerKeypoint);
  16633. }
  16634. }
  16635. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
  16636. /*
  16637. * speedy-vision.js
  16638. * GPU-accelerated Computer Vision for JavaScript
  16639. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16640. *
  16641. * Licensed under the Apache License, Version 2.0 (the "License");
  16642. * you may not use this file except in compliance with the License.
  16643. * You may obtain a copy of the License at
  16644. *
  16645. * http://www.apache.org/licenses/LICENSE-2.0
  16646. *
  16647. * Unless required by applicable law or agreed to in writing, software
  16648. * distributed under the License is distributed on an "AS IS" BASIS,
  16649. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16650. * See the License for the specific language governing permissions and
  16651. * limitations under the License.
  16652. *
  16653. * distance-filter.js
  16654. * Given a set of pairs of keypoints, discard all pairs whose distance is
  16655. * above a user-defined threshold. Useful for bidirectional optical-flow.
  16656. */
  16657. /**
  16658. * Given a set of pairs of keypoints, discard all pairs whose distance is
  16659. * above a user-defined threshold. Useful for bidirectional optical-flow.
  16660. *
  16661. * The pairs of keypoints are provided as two separate sets, "in" and
  16662. * "reference". Keypoints that are kept will have their data extracted
  16663. * from the "in" set.
  16664. */
  16665. class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode
  16666. {
  16667. /**
  16668. * Constructor
  16669. * @param {string} [name] name of the node
  16670. */
  16671. constructor(name = undefined)
  16672. {
  16673. super(name, 1, [
  16674. InputPort('in').expects(SpeedyPipelineMessageType.Keypoints),
  16675. InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints),
  16676. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  16677. ]);
  16678. /** @type {number} maximum accepted distance */
  16679. this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
  16680. }
  16681. /**
  16682. * Maximum accepted distance
  16683. * @returns {number}
  16684. */
  16685. get threshold()
  16686. {
  16687. return this._threshold;
  16688. }
  16689. /**
  16690. * Maximum accepted distance
  16691. * @param {number} value
  16692. */
  16693. set threshold(value)
  16694. {
  16695. this._threshold = Math.max(0, +value);
  16696. }
  16697. /**
  16698. * Run the specific task of this node
  16699. * @param {SpeedyGPU} gpu
  16700. * @returns {void|SpeedyPromise<void>}
  16701. */
  16702. _run(gpu)
  16703. {
  16704. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in').read() );
  16705. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('reference').read() );
  16706. const threshold = this._threshold;
  16707. // validate shapes
  16708. if(set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize)
  16709. throw new utils_errors/* IllegalOperationError */.js(`The distance filter requires two compatible shapes of keypoint streams`);
  16710. // calculate the shape of the output
  16711. const outputTexture = this._tex[0];
  16712. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  16713. const descriptorSize = set0.descriptorSize;
  16714. const extraSize = set0.extraSize;
  16715. // apply the distance filter
  16716. (gpu.programs.keypoints.distanceFilter
  16717. .outputs(encoderLength, encoderLength, outputTexture)
  16718. )(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  16719. // done!
  16720. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  16721. }
  16722. }
  16723. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
  16724. /*
  16725. * speedy-vision.js
  16726. * GPU-accelerated Computer Vision for JavaScript
  16727. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16728. *
  16729. * Licensed under the Apache License, Version 2.0 (the "License");
  16730. * you may not use this file except in compliance with the License.
  16731. * You may obtain a copy of the License at
  16732. *
  16733. * http://www.apache.org/licenses/LICENSE-2.0
  16734. *
  16735. * Unless required by applicable law or agreed to in writing, software
  16736. * distributed under the License is distributed on an "AS IS" BASIS,
  16737. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16738. * See the License for the specific language governing permissions and
  16739. * limitations under the License.
  16740. *
  16741. * hamming-distance-filter.js
  16742. * Given a set of pairs of keypoints, discard all pairs whose hamming
  16743. * distance (of descriptor) is above a user-defined threshold
  16744. */
  16745. /** @type {Object<number,string>} Program names */
  16746. const hamming_distance_filter_PROGRAM_NAME = {
  16747. 32: 'hammingDistanceFilter32',
  16748. 64: 'hammingDistanceFilter64',
  16749. };
  16750. /**
  16751. * Given a set of pairs of keypoints, discard all pairs whose hamming
  16752. * distance (of descriptor) is above a user-defined threshold
  16753. *
  16754. * The pairs of keypoints are provided as two separate sets, "in" and
  16755. * "reference". Keypoints that are kept will have their data extracted
  16756. * from the "in" set.
  16757. */
  16758. class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode
  16759. {
  16760. /**
  16761. * Constructor
  16762. * @param {string} [name] name of the node
  16763. */
  16764. constructor(name = undefined)
  16765. {
  16766. super(name, 1, [
  16767. InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16768. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16769. msg.descriptorSize > 0
  16770. ),
  16771. InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(
  16772. ( /** @type {SpeedyPipelineMessageWithKeypoints} */ msg ) =>
  16773. msg.descriptorSize > 0
  16774. ),
  16775. OutputPort().expects(SpeedyPipelineMessageType.Keypoints)
  16776. ]);
  16777. /** @type {number} distance threshold, an integer */
  16778. this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
  16779. }
  16780. /**
  16781. * Distance threshold, an integer
  16782. * @returns {number}
  16783. */
  16784. get threshold()
  16785. {
  16786. return this._threshold;
  16787. }
  16788. /**
  16789. * Distance threshold, an integer
  16790. * @param {number} value
  16791. */
  16792. set threshold(value)
  16793. {
  16794. this._threshold = Math.max(0, value | 0);
  16795. }
  16796. /**
  16797. * Run the specific task of this node
  16798. * @param {SpeedyGPU} gpu
  16799. * @returns {void|SpeedyPromise<void>}
  16800. */
  16801. _run(gpu)
  16802. {
  16803. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('in').read() );
  16804. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input('reference').read() );
  16805. const threshold = this._threshold;
  16806. // validate shapes
  16807. if(set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize)
  16808. throw new utils_errors/* IllegalOperationError */.js(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
  16809. // validate descriptor size
  16810. if(!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize))
  16811. throw new utils_errors/* NotSupportedError */.B8(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
  16812. // calculate the shape of the output
  16813. const outputTexture = this._tex[0];
  16814. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  16815. const descriptorSize = set0.descriptorSize;
  16816. const extraSize = set0.extraSize;
  16817. // apply the distance filter
  16818. const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
  16819. (gpu.programs.keypoints[program]
  16820. .outputs(encoderLength, encoderLength, outputTexture)
  16821. )(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  16822. // done!
  16823. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  16824. }
  16825. }
  16826. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
  16827. /*
  16828. * speedy-vision.js
  16829. * GPU-accelerated Computer Vision for JavaScript
  16830. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  16831. *
  16832. * Licensed under the Apache License, Version 2.0 (the "License");
  16833. * you may not use this file except in compliance with the License.
  16834. * You may obtain a copy of the License at
  16835. *
  16836. * http://www.apache.org/licenses/LICENSE-2.0
  16837. *
  16838. * Unless required by applicable law or agreed to in writing, software
  16839. * distributed under the License is distributed on an "AS IS" BASIS,
  16840. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16841. * See the License for the specific language governing permissions and
  16842. * limitations under the License.
  16843. *
  16844. * portal.js
  16845. * Keypoint Portals
  16846. */
  16847. /**
  16848. * A sink of a Keypoint Portal
  16849. * This is not a pipeline sink - it doesn't export any data!
  16850. */
  16851. class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode
  16852. {
  16853. /**
  16854. * Constructor
  16855. * @param {string} [name] name of the node
  16856. */
  16857. constructor(name = undefined)
  16858. {
  16859. super(name, 1, [
  16860. InputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16861. ]);
  16862. /** @type {number} descriptor size, in bytes */
  16863. this._descriptorSize = 0;
  16864. /** @type {number} extra size, in bytes */
  16865. this._extraSize = 0;
  16866. /** @type {number} extra size */
  16867. this._encoderLength = 0;
  16868. /** @type {boolean} is this node initialized? */
  16869. this._initialized = false;
  16870. }
  16871. /**
  16872. * Encoded keypoints
  16873. * @returns {SpeedyTexture}
  16874. */
  16875. get encodedKeypoints()
  16876. {
  16877. if(!this._initialized)
  16878. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  16879. return this._tex[0];
  16880. }
  16881. /**
  16882. * Descriptor size, in bytes
  16883. * @returns {number}
  16884. */
  16885. get descriptorSize()
  16886. {
  16887. if(!this._initialized)
  16888. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  16889. return this._descriptorSize;
  16890. }
  16891. /**
  16892. * Extra size, in bytes
  16893. * @returns {number}
  16894. */
  16895. get extraSize()
  16896. {
  16897. if(!this._initialized)
  16898. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  16899. return this._extraSize;
  16900. }
  16901. /**
  16902. * Encoder length
  16903. * @returns {number}
  16904. */
  16905. get encoderLength()
  16906. {
  16907. if(!this._initialized)
  16908. throw new utils_errors/* IllegalOperationError */.js(`Portal error: ${this.fullName} holds no data`);
  16909. return this._encoderLength;
  16910. }
  16911. /**
  16912. * Initializes this node
  16913. * @param {SpeedyGPU} gpu
  16914. */
  16915. init(gpu)
  16916. {
  16917. super.init(gpu);
  16918. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
  16919. this._tex[0].resize(encoderLength, encoderLength).clearToColor(1,1,1,1); // initial texture
  16920. this._descriptorSize = this._extraSize = 0;
  16921. this._encoderLength = encoderLength;
  16922. this._initialized = true;
  16923. }
  16924. /**
  16925. * Releases this node
  16926. * @param {SpeedyGPU} gpu
  16927. */
  16928. release(gpu)
  16929. {
  16930. this._initialized = false;
  16931. super.release(gpu);
  16932. }
  16933. /**
  16934. * Run the specific task of this node
  16935. * @param {SpeedyGPU} gpu
  16936. * @returns {void|SpeedyPromise<void>}
  16937. */
  16938. _run(gpu)
  16939. {
  16940. const { encodedKeypoints, descriptorSize, extraSize, encoderLength } = /** @type {SpeedyPipelineMessageWithKeypoints} */ ( this.input().read() );
  16941. const tex = this._tex[0];
  16942. // copy input
  16943. tex.resize(encodedKeypoints.width, encodedKeypoints.height);
  16944. encodedKeypoints.copyTo(tex);
  16945. this._descriptorSize = descriptorSize;
  16946. this._extraSize = extraSize;
  16947. this._encoderLength = encoderLength;
  16948. }
  16949. }
  16950. /**
  16951. * A source of a Keypoint Portal
  16952. */
  16953. class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode
  16954. {
  16955. /**
  16956. * Constructor
  16957. * @param {string} [name] name of the node
  16958. */
  16959. constructor(name = undefined)
  16960. {
  16961. super(name, 0, [
  16962. OutputPort().expects(SpeedyPipelineMessageType.Keypoints),
  16963. ]);
  16964. /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
  16965. this._source = null;
  16966. }
  16967. /**
  16968. * Data source
  16969. * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
  16970. */
  16971. get source()
  16972. {
  16973. return this._source;
  16974. }
  16975. /**
  16976. * Data source
  16977. * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
  16978. */
  16979. set source(node)
  16980. {
  16981. if(node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink))
  16982. throw new utils_errors/* IllegalArgumentError */.mG(`Incompatible source for ${this.fullName}`);
  16983. this._source = node;
  16984. }
  16985. /**
  16986. * Run the specific task of this node
  16987. * @param {SpeedyGPU} gpu
  16988. * @returns {void|SpeedyPromise<void>}
  16989. */
  16990. _run(gpu)
  16991. {
  16992. if(this._source == null)
  16993. throw new utils_errors/* IllegalOperationError */.js(`${this.fullName} has no source`);
  16994. this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
  16995. }
  16996. }
  16997. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
  16998. /*
  16999. * speedy-vision.js
  17000. * GPU-accelerated Computer Vision for JavaScript
  17001. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  17002. *
  17003. * Licensed under the Apache License, Version 2.0 (the "License");
  17004. * you may not use this file except in compliance with the License.
  17005. * You may obtain a copy of the License at
  17006. *
  17007. * http://www.apache.org/licenses/LICENSE-2.0
  17008. *
  17009. * Unless required by applicable law or agreed to in writing, software
  17010. * distributed under the License is distributed on an "AS IS" BASIS,
  17011. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17012. * See the License for the specific language governing permissions and
  17013. * limitations under the License.
  17014. *
  17015. * keypoint-factory.js
  17016. * Keypoint-related nodes
  17017. */
  17018. /**
  17019. * Keypoint detectors
  17020. */
  17021. class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.R
  17022. {
  17023. /**
  17024. * FAST corner detector
  17025. * @param {string} [name]
  17026. * @returns {SpeedyPipelineNodeFASTKeypointDetector}
  17027. */
  17028. static FAST(name = undefined)
  17029. {
  17030. return new SpeedyPipelineNodeFASTKeypointDetector(name);
  17031. }
  17032. /**
  17033. * Harris corner detector
  17034. * @param {string} [name]
  17035. * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
  17036. */
  17037. static Harris(name = undefined)
  17038. {
  17039. return new SpeedyPipelineNodeHarrisKeypointDetector(name);
  17040. }
  17041. }
  17042. /**
  17043. * Keypoint descriptors
  17044. */
  17045. class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.R
  17046. {
  17047. /**
  17048. * ORB descriptors
  17049. * @param {string} [name]
  17050. * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
  17051. */
  17052. static ORB(name = undefined)
  17053. {
  17054. return new SpeedyPipelineNodeORBKeypointDescriptor(name);
  17055. }
  17056. }
  17057. /**
  17058. * Keypoint trackers
  17059. */
  17060. class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.R
  17061. {
  17062. /**
  17063. * LK optical-flow
  17064. * @param {string} [name]
  17065. * @returns {SpeedyPipelineNodeLKKeypointTracker}
  17066. */
  17067. static LK(name = undefined)
  17068. {
  17069. return new SpeedyPipelineNodeLKKeypointTracker(name);
  17070. }
  17071. }
  17072. /**
  17073. * Keypoint matchers
  17074. */
  17075. class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.R
  17076. {
  17077. /**
  17078. * Static LSH tables
  17079. * @param {string} [name]
  17080. * @returns {SpeedyPipelineNodeStaticLSHTables}
  17081. */
  17082. static StaticLSHTables(name = undefined)
  17083. {
  17084. return new SpeedyPipelineNodeStaticLSHTables(name);
  17085. }
  17086. /**
  17087. * LSH-based K-approximate nearest neighbors
  17088. * @param {string} [name]
  17089. * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
  17090. */
  17091. static LSHKNN(name = undefined)
  17092. {
  17093. return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
  17094. }
  17095. /**
  17096. * Brute-force K-nearest neighbors keypoint matcher
  17097. * @param {string} [name]
  17098. * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
  17099. */
  17100. static BFKNN(name = undefined)
  17101. {
  17102. return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
  17103. }
  17104. }
  17105. /**
  17106. * Portal nodes
  17107. */
  17108. class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.R
  17109. {
  17110. /**
  17111. * Create an image portal source
  17112. * @param {string} [name] name of the node
  17113. * @returns {SpeedyPipelineNodeKeypointPortalSource}
  17114. */
  17115. static Source(name = undefined)
  17116. {
  17117. return new SpeedyPipelineNodeKeypointPortalSource(name);
  17118. }
  17119. /**
  17120. * Create an image portal sink
  17121. * @param {string} [name] name of the node
  17122. * @returns {SpeedyPipelineNodeKeypointPortalSink}
  17123. */
  17124. static Sink(name = undefined)
  17125. {
  17126. return new SpeedyPipelineNodeKeypointPortalSink(name);
  17127. }
  17128. }
  17129. /**
  17130. * Keypoint-related nodes
  17131. */
  17132. class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.R
  17133. {
  17134. /**
  17135. * Keypoint detectors
  17136. * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
  17137. */
  17138. static get Detector()
  17139. {
  17140. return SpeedyPipelineKeypointDetectorFactory;
  17141. }
  17142. /**
  17143. * Keypoint descriptors
  17144. * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
  17145. */
  17146. static get Descriptor()
  17147. {
  17148. return SpeedyPipelineKeypointDescriptorFactory;
  17149. }
  17150. /**
  17151. * Keypoint trackers
  17152. * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
  17153. */
  17154. static get Tracker()
  17155. {
  17156. return SpeedyPipelineKeypointTrackerFactory;
  17157. }
  17158. /**
  17159. * Keypoint matchers
  17160. * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
  17161. */
  17162. static get Matcher()
  17163. {
  17164. return SpeedyPipelineKeypointMatcherFactory;
  17165. }
  17166. /**
  17167. * Keypoint Portals
  17168. * @returns {typeof SpeedyPipelineKeypointPortalFactory}
  17169. */
  17170. static get Portal()
  17171. {
  17172. return SpeedyPipelineKeypointPortalFactory;
  17173. }
  17174. /**
  17175. * Create a keypoint source
  17176. * @param {string} [name]
  17177. * @returns {SpeedyPipelineNodeKeypointSource}
  17178. */
  17179. static Source(name = undefined)
  17180. {
  17181. return new SpeedyPipelineNodeKeypointSource(name);
  17182. }
  17183. /**
  17184. * Create a keypoint sink
  17185. * @param {string} [name]
  17186. * @returns {SpeedyPipelineNodeKeypointSink}
  17187. */
  17188. static Sink(name = undefined)
  17189. {
  17190. return new SpeedyPipelineNodeKeypointSink(name);
  17191. }
  17192. /**
  17193. * Create a sink of tracked keypoints
  17194. * @param {string} [name]
  17195. * @returns {SpeedyPipelineNodeTrackedKeypointSink}
  17196. */
  17197. static SinkOfTrackedKeypoints(name = undefined)
  17198. {
  17199. return new SpeedyPipelineNodeTrackedKeypointSink(name);
  17200. }
  17201. /**
  17202. * Create a sink of matched keypoints
  17203. * @param {string} [name]
  17204. * @returns {SpeedyPipelineNodeMatchedKeypointSink}
  17205. */
  17206. static SinkOfMatchedKeypoints(name = undefined)
  17207. {
  17208. return new SpeedyPipelineNodeMatchedKeypointSink(name);
  17209. }
  17210. /**
  17211. * Keypoint clipper
  17212. * @param {string} [name]
  17213. * @returns {SpeedyPipelineNodeKeypointClipper}
  17214. */
  17215. static Clipper(name = undefined)
  17216. {
  17217. return new SpeedyPipelineNodeKeypointClipper(name);
  17218. }
  17219. /**
  17220. * Border Clipper
  17221. * @param {string} [name]
  17222. * @returns {SpeedyPipelineNodeKeypointBorderClipper}
  17223. */
  17224. static BorderClipper(name = undefined)
  17225. {
  17226. return new SpeedyPipelineNodeKeypointBorderClipper(name);
  17227. }
  17228. /**
  17229. * Create a keypoint buffer
  17230. * @param {string} [name]
  17231. * @returns {SpeedyPipelineNodeKeypointBuffer}
  17232. */
  17233. static Buffer(name = undefined)
  17234. {
  17235. return new SpeedyPipelineNodeKeypointBuffer(name);
  17236. }
  17237. /**
  17238. * Create a keypoint mixer
  17239. * @param {string} [name]
  17240. * @returns {SpeedyPipelineNodeKeypointMixer}
  17241. */
  17242. static Mixer(name = undefined)
  17243. {
  17244. return new SpeedyPipelineNodeKeypointMixer(name);
  17245. }
  17246. /**
  17247. * Create a keypoint shuffler
  17248. * @param {string} [name]
  17249. * @returns {SpeedyPipelineNodeKeypointShuffler}
  17250. */
  17251. static Shuffler(name = undefined)
  17252. {
  17253. return new SpeedyPipelineNodeKeypointShuffler(name);
  17254. }
  17255. /**
  17256. * Create a keypoint multiplexer
  17257. * @param {string} [name]
  17258. * @returns {SpeedyPipelineNodeKeypointMultiplexer}
  17259. */
  17260. static Multiplexer(name = undefined)
  17261. {
  17262. return new SpeedyPipelineNodeKeypointMultiplexer(name);
  17263. }
  17264. /**
  17265. * Create a keypoint transformer
  17266. * @param {string} [name]
  17267. * @returns {SpeedyPipelineNodeKeypointTransformer}
  17268. */
  17269. static Transformer(name = undefined)
  17270. {
  17271. return new SpeedyPipelineNodeKeypointTransformer(name);
  17272. }
  17273. /**
  17274. * Create a subpixel refiner of keypoint locations
  17275. * @param {string} [name]
  17276. * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
  17277. */
  17278. static SubpixelRefiner(name = undefined)
  17279. {
  17280. return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
  17281. }
  17282. /**
  17283. * Distance filter
  17284. * @param {string} [name]
  17285. * @returns {SpeedyPipelineNodeDistanceFilter}
  17286. */
  17287. static DistanceFilter(name = undefined)
  17288. {
  17289. return new SpeedyPipelineNodeKeypointDistanceFilter(name);
  17290. }
  17291. /**
  17292. * Hamming distance filter
  17293. * @param {string} [name]
  17294. * @returns {SpeedyPipelineNodeHammingDistanceFilter}
  17295. */
  17296. static HammingDistanceFilter(name = undefined)
  17297. {
  17298. return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
  17299. }
  17300. }
  17301. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
  17302. /*
  17303. * speedy-vision.js
  17304. * GPU-accelerated Computer Vision for JavaScript
  17305. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  17306. *
  17307. * Licensed under the Apache License, Version 2.0 (the "License");
  17308. * you may not use this file except in compliance with the License.
  17309. * You may obtain a copy of the License at
  17310. *
  17311. * http://www.apache.org/licenses/LICENSE-2.0
  17312. *
  17313. * Unless required by applicable law or agreed to in writing, software
  17314. * distributed under the License is distributed on an "AS IS" BASIS,
  17315. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17316. * See the License for the specific language governing permissions and
  17317. * limitations under the License.
  17318. *
  17319. * sink.js
  17320. * Gets keypoints out of the pipeline
  17321. */
  17322. // next power of 2
  17323. const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  17324. /**
  17325. * Gets 2D vectors out of the pipeline
  17326. */
  17327. class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode
  17328. {
  17329. /**
  17330. * Constructor
  17331. * @param {string} [name] name of the node
  17332. */
  17333. constructor(name = 'vec2')
  17334. {
  17335. super(name, 2, [
  17336. InputPort().expects(SpeedyPipelineMessageType.Vector2)
  17337. ]);
  17338. /** @type {SpeedyVector2[]} 2D vectors (output) */
  17339. this._vectors = [];
  17340. /** @type {SpeedyTextureReader} texture reader */
  17341. this._textureReader = new SpeedyTextureReader();
  17342. /** @type {number} page flipping index */
  17343. this._page = 0;
  17344. /** @type {boolean} accelerate GPU-CPU transfers */
  17345. this._turbo = false;
  17346. }
  17347. /**
  17348. * Accelerate GPU-CPU transfers
  17349. * @returns {boolean}
  17350. */
  17351. get turbo()
  17352. {
  17353. return this._turbo;
  17354. }
  17355. /**
  17356. * Accelerate GPU-CPU transfers
  17357. * @param {boolean} value
  17358. */
  17359. set turbo(value)
  17360. {
  17361. this._turbo = Boolean(value);
  17362. }
  17363. /**
  17364. * Initializes this node
  17365. * @param {SpeedyGPU} gpu
  17366. */
  17367. init(gpu)
  17368. {
  17369. super.init(gpu);
  17370. this._textureReader.init(gpu);
  17371. }
  17372. /**
  17373. * Releases this node
  17374. * @param {SpeedyGPU} gpu
  17375. */
  17376. release(gpu)
  17377. {
  17378. this._textureReader.release(gpu);
  17379. super.release(gpu);
  17380. }
  17381. /**
  17382. * Export data from this node to the user
  17383. * @returns {SpeedyPromise<SpeedyVector2[]>}
  17384. */
  17385. export()
  17386. {
  17387. return speedy_promise/* SpeedyPromise.resolve */.s.resolve(this._vectors);
  17388. }
  17389. /**
  17390. * Run the specific task of this node
  17391. * @param {SpeedyGPU} gpu
  17392. * @returns {void|SpeedyPromise<void>}
  17393. */
  17394. _run(gpu)
  17395. {
  17396. const { vectors } = /** @type {SpeedyPipelineMessageWith2DVectors} */ ( this.input().read() );
  17397. const useBufferedDownloads = this._turbo;
  17398. const encoderLength = vectors.width;
  17399. /*
  17400. I have found experimentally that, in Firefox, readPixelsAsync()
  17401. performs MUCH better if the width of the target texture is a power
  17402. of two. I have no idea why this is the case, nor if it's related to
  17403. some interaction with the GL drivers, somehow. This seems to make no
  17404. difference on Chrome, however. In any case, let's convert the input
  17405. texture to POT.
  17406. */
  17407. const encoderWidth = vector2_sink_nextPot(encoderLength);
  17408. const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  17409. //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
  17410. // copy the set of vectors to an internal texture
  17411. const copiedTexture = this._tex[this._page];
  17412. (gpu.programs.utils.copy2DVectors
  17413. .outputs(encoderWidth, encoderHeight, copiedTexture)
  17414. )(vectors);
  17415. // flip page
  17416. this._page = 1 - this._page;
  17417. // download the internal texture
  17418. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  17419. this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
  17420. });
  17421. }
  17422. /**
  17423. * Decode a sequence of vectors, given a flattened image of encoded pixels
  17424. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  17425. * @param {number} encoderWidth
  17426. * @param {number} encoderHeight
  17427. * @returns {SpeedyVector2[]} vectors
  17428. */
  17429. static _decode(pixels, encoderWidth, encoderHeight)
  17430. {
  17431. const bytesPerVector = 4; // 1 pixel per vector
  17432. const vectors = [];
  17433. let hi = 0, lo = 0;
  17434. let x = 0, y = 0;
  17435. // how many bytes should we read?
  17436. const e2 = encoderWidth * encoderHeight * bytesPerVector;
  17437. const size = Math.min(pixels.length, e2);
  17438. // for each encoded vector
  17439. for(let i = 0; i < size; i += bytesPerVector) {
  17440. // extract 16-bit words
  17441. lo = (pixels[i+1] << 8) | pixels[i];
  17442. hi = (pixels[i+3] << 8) | pixels[i+2];
  17443. // the vector is "null": we have reached the end of the list
  17444. if(lo == 0xFFFF && hi == 0xFFFF)
  17445. break;
  17446. // the vector must be discarded
  17447. if(lo == 0xFF00 && hi == 0xFF00)
  17448. continue;
  17449. // decode floats
  17450. x = utils/* Utils.decodeFloat16 */.c.decodeFloat16(lo);
  17451. y = utils/* Utils.decodeFloat16 */.c.decodeFloat16(hi);
  17452. // register vector
  17453. vectors.push(new SpeedyVector2(x, y));
  17454. }
  17455. // done!
  17456. return vectors;
  17457. }
  17458. }
  17459. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
  17460. /*
  17461. * speedy-vision.js
  17462. * GPU-accelerated Computer Vision for JavaScript
  17463. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  17464. *
  17465. * Licensed under the Apache License, Version 2.0 (the "License");
  17466. * you may not use this file except in compliance with the License.
  17467. * You may obtain a copy of the License at
  17468. *
  17469. * http://www.apache.org/licenses/LICENSE-2.0
  17470. *
  17471. * Unless required by applicable law or agreed to in writing, software
  17472. * distributed under the License is distributed on an "AS IS" BASIS,
  17473. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17474. * See the License for the specific language governing permissions and
  17475. * limitations under the License.
  17476. *
  17477. * vector2-factory.js
  17478. * 2D vectors
  17479. */
  17480. /**
  17481. * 2D vectors
  17482. */
  17483. class SpeedyPipelineVector2Factory extends Function
  17484. {
  17485. /**
  17486. * Constructor
  17487. */
  17488. constructor()
  17489. {
  17490. // This factory can be invoked as a function
  17491. super('...args', 'return this._create(...args)');
  17492. return this.bind(this);
  17493. }
  17494. /**
  17495. * @private
  17496. *
  17497. * Create a 2D vector
  17498. * @param {number} x x-coordinate
  17499. * @param {number} y y-coordinate
  17500. * @returns {SpeedyVector2}
  17501. */
  17502. _create(x, y)
  17503. {
  17504. return new SpeedyVector2(x, y);
  17505. }
  17506. /**
  17507. * Create a Vector2 sink
  17508. * @param {string} [name]
  17509. * @returns {SpeedyPipelineNodeVector2Sink}
  17510. */
  17511. Sink(name = undefined)
  17512. {
  17513. return new SpeedyPipelineNodeVector2Sink(name);
  17514. }
  17515. }
  17516. ;// CONCATENATED MODULE: ./src/main.js
  17517. /*
  17518. * speedy-vision.js
  17519. * GPU-accelerated Computer Vision for JavaScript
  17520. * Copyright 2020-2023 Alexandre Martins <alemartf(at)gmail.com>
  17521. *
  17522. * Licensed under the Apache License, Version 2.0 (the "License");
  17523. * you may not use this file except in compliance with the License.
  17524. * You may obtain a copy of the License at
  17525. *
  17526. * http://www.apache.org/licenses/LICENSE-2.0
  17527. *
  17528. * Unless required by applicable law or agreed to in writing, software
  17529. * distributed under the License is distributed on an "AS IS" BASIS,
  17530. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17531. * See the License for the specific language governing permissions and
  17532. * limitations under the License.
  17533. *
  17534. * main.js
  17535. * The entry point of the library
  17536. */
  17537. /* eslint-disable no-undef */
  17538. /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  17539. /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
  17540. /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
  17541. /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  17542. // Constants
  17543. /** @type {SpeedyMatrixFactory} */
  17544. const matrixFactory = new SpeedyMatrixFactory();
  17545. /** @type {SpeedyPipelineVector2Factory} */
  17546. const vector2Factory = new SpeedyPipelineVector2Factory();
  17547. /**
  17548. * GPU-accelerated Computer Vision for JavaScript
  17549. */
  17550. class Speedy
  17551. {
  17552. /**
  17553. * Loads a SpeedyMedia object based on the provided source element
  17554. * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
  17555. * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
  17556. * @returns {SpeedyPromise<SpeedyMedia>}
  17557. */
  17558. static load(sourceElement, options = {})
  17559. {
  17560. return SpeedyMedia.load(sourceElement, options);
  17561. }
  17562. /**
  17563. * Loads a camera stream
  17564. * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
  17565. * @param {number} [height] height of the stream
  17566. * @returns {SpeedyPromise<SpeedyMedia>}
  17567. */
  17568. static camera(widthOrConstraints = 640, height = 360)
  17569. {
  17570. const constraints = (typeof(widthOrConstraints) === 'object') ? widthOrConstraints : ({
  17571. audio: false,
  17572. video: {
  17573. width: widthOrConstraints | 0,
  17574. height: height | 0,
  17575. },
  17576. });
  17577. return utils/* Utils.requestCameraStream */.c.requestCameraStream(constraints).then(
  17578. video => SpeedyMedia.load(video)
  17579. );
  17580. }
  17581. /**
  17582. * Checks if Speedy can be executed in this machine & browser
  17583. * @returns {boolean} true if Speedy can be executed in this machine & browser
  17584. */
  17585. static isSupported()
  17586. {
  17587. return (
  17588. (typeof WebAssembly !== 'undefined') &&
  17589. (typeof WebGL2RenderingContext !== 'undefined') &&
  17590. (speedy_gl/* SpeedyGL.instance.gl */.$.instance.gl != null)
  17591. );
  17592. }
  17593. /**
  17594. * Create a 2D vector
  17595. * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
  17596. */
  17597. static get Vector2()
  17598. {
  17599. return vector2Factory;
  17600. }
  17601. /**
  17602. * Create a 2D point
  17603. * @param {number} x
  17604. * @param {number} y
  17605. * @returns {SpeedyPoint2}
  17606. */
  17607. static Point2(x, y)
  17608. {
  17609. return new SpeedyPoint2(x, y);
  17610. }
  17611. /**
  17612. * Create a new size object
  17613. * @param {number} width
  17614. * @param {number} height
  17615. * @returns {SpeedySize}
  17616. */
  17617. static Size(width, height)
  17618. {
  17619. return new SpeedySize(width, height);
  17620. }
  17621. /**
  17622. * Create a Matrix (entries are given in column-major format)
  17623. * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
  17624. */
  17625. static get Matrix()
  17626. {
  17627. return matrixFactory;
  17628. }
  17629. /**
  17630. * Speedy Promises
  17631. * @returns {typeof SpeedyPromise}
  17632. */
  17633. static get Promise()
  17634. {
  17635. return speedy_promise/* SpeedyPromise */.s;
  17636. }
  17637. /**
  17638. * Create a new Pipeline
  17639. * @returns {SpeedyPipeline}
  17640. */
  17641. static Pipeline()
  17642. {
  17643. return new SpeedyPipeline();
  17644. }
  17645. /**
  17646. * Image-related nodes
  17647. * @returns {typeof SpeedyPipelineImageFactory}
  17648. */
  17649. static get Image()
  17650. {
  17651. return SpeedyPipelineImageFactory;
  17652. }
  17653. /**
  17654. * Image filters
  17655. * @returns {typeof SpeedyPipelineFilterFactory}
  17656. */
  17657. static get Filter()
  17658. {
  17659. return SpeedyPipelineFilterFactory;
  17660. }
  17661. /**
  17662. * Image transforms
  17663. * @returns {typeof SpeedyPipelineTransformFactory}
  17664. */
  17665. static get Transform()
  17666. {
  17667. return SpeedyPipelineTransformFactory;
  17668. }
  17669. /**
  17670. * Keypoint-related nodes
  17671. * @returns {typeof SpeedyPipelineKeypointFactory}
  17672. */
  17673. static get Keypoint()
  17674. {
  17675. return SpeedyPipelineKeypointFactory;
  17676. }
  17677. /**
  17678. * The version of the library
  17679. * @returns {string} The version of the library
  17680. */
  17681. static get version()
  17682. {
  17683. if(false)
  17684. {}
  17685. else
  17686. return "0.9.1-wip";
  17687. }
  17688. /**
  17689. * The FPS rate
  17690. * @returns {number} Frames per second (FPS)
  17691. */
  17692. static get fps()
  17693. {
  17694. return FPSCounter.instance.fps;
  17695. }
  17696. /**
  17697. * Global settings
  17698. * @returns {typeof Settings}
  17699. */
  17700. static get Settings()
  17701. {
  17702. return settings/* Settings */.Z;
  17703. }
  17704. }
  17705. // Freeze the namespace
  17706. Object.freeze(Speedy);
  17707. // Display a notice
  17708. utils/* Utils.log */.c.log(
  17709. `Speedy Vision version ${Speedy.version}. ` +
  17710. `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` +
  17711. "https://github.com/alemart/speedy-vision"
  17712. );
  17713. // Big-endian machine? Currently untested.
  17714. if(!globals.LITTLE_ENDIAN)
  17715. utils/* Utils.warning */.c.warning('Running on a big-endian machine');
  17716. })();
  17717. __webpack_exports__ = __webpack_exports__["default"];
  17718. /******/ return __webpack_exports__;
  17719. /******/ })()
  17720. ;
  17721. });
  17722. /***/ })
  17723. /******/ });
  17724. /************************************************************************/
  17725. /******/ // The module cache
  17726. /******/ var __webpack_module_cache__ = {};
  17727. /******/
  17728. /******/ // The require function
  17729. /******/ function __webpack_require__(moduleId) {
  17730. /******/ // Check if module is in cache
  17731. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  17732. /******/ if (cachedModule !== undefined) {
  17733. /******/ return cachedModule.exports;
  17734. /******/ }
  17735. /******/ // Create a new module (and put it into the cache)
  17736. /******/ var module = __webpack_module_cache__[moduleId] = {
  17737. /******/ // no module.id needed
  17738. /******/ // no module.loaded needed
  17739. /******/ exports: {}
  17740. /******/ };
  17741. /******/
  17742. /******/ // Execute the module function
  17743. /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
  17744. /******/
  17745. /******/ // Return the exports of the module
  17746. /******/ return module.exports;
  17747. /******/ }
  17748. /******/
  17749. /************************************************************************/
  17750. /******/ /* webpack/runtime/compat get default export */
  17751. /******/ (() => {
  17752. /******/ // getDefaultExport function for compatibility with non-harmony modules
  17753. /******/ __webpack_require__.n = (module) => {
  17754. /******/ var getter = module && module.__esModule ?
  17755. /******/ () => (module['default']) :
  17756. /******/ () => (module);
  17757. /******/ __webpack_require__.d(getter, { a: getter });
  17758. /******/ return getter;
  17759. /******/ };
  17760. /******/ })();
  17761. /******/
  17762. /******/ /* webpack/runtime/define property getters */
  17763. /******/ (() => {
  17764. /******/ // define getter functions for harmony exports
  17765. /******/ __webpack_require__.d = (exports, definition) => {
  17766. /******/ for(var key in definition) {
  17767. /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
  17768. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  17769. /******/ }
  17770. /******/ }
  17771. /******/ };
  17772. /******/ })();
  17773. /******/
  17774. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  17775. /******/ (() => {
  17776. /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  17777. /******/ })();
  17778. /******/
  17779. /************************************************************************/
  17780. var __webpack_exports__ = {};
  17781. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  17782. (() => {
  17783. "use strict";
  17784. // EXPORTS
  17785. __webpack_require__.d(__webpack_exports__, {
  17786. "default": () => (/* binding */ Martins)
  17787. });
  17788. // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
  17789. var speedy_vision = __webpack_require__(528);
  17790. var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
  17791. ;// CONCATENATED MODULE: ./src/utils/errors.ts
  17792. /*
  17793. * MARTINS.js Free Edition
  17794. * GPU-accelerated Augmented Reality for the web
  17795. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  17796. * https://github.com/alemart/martins-js
  17797. *
  17798. * This program is free software: you can redistribute it and/or modify
  17799. * it under the terms of the GNU Affero General Public License version 3
  17800. * as published by the Free Software Foundation.
  17801. *
  17802. * This program is distributed in the hope that it will be useful,
  17803. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17804. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17805. * GNU Affero General Public License for more details.
  17806. *
  17807. * You should have received a copy of the GNU Affero General Public License
  17808. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17809. *
  17810. * errors.ts
  17811. * Error classes
  17812. */
  17813. /**
  17814. * Generic error class
  17815. */
  17816. class MartinsError extends Error {
  17817. /**
  17818. * Constructor
  17819. * @param message error message
  17820. * @param cause optional error cause
  17821. */
  17822. constructor(message = '', cause = null) {
  17823. super(`${message}\n${cause ? cause.toString() : ''}`);
  17824. this.cause = cause;
  17825. }
  17826. /**
  17827. * Error name
  17828. */
  17829. get name() {
  17830. return this.constructor.name;
  17831. }
  17832. }
  17833. /**
  17834. * A method has received one or more illegal arguments
  17835. */
  17836. class IllegalArgumentError extends MartinsError {
  17837. }
  17838. /**
  17839. * The method arguments are valid, but the method can't be called due to the
  17840. * current state of the object
  17841. */
  17842. class IllegalOperationError extends MartinsError {
  17843. }
  17844. /**
  17845. * The requested operation is not supported
  17846. */
  17847. class NotSupportedError extends MartinsError {
  17848. }
  17849. /**
  17850. * Access denied
  17851. */
  17852. class AccessDeniedError extends MartinsError {
  17853. }
  17854. /**
  17855. * Assertion error
  17856. */
  17857. class AssertionError extends MartinsError {
  17858. }
  17859. /**
  17860. * Tracking error
  17861. */
  17862. class TrackingError extends MartinsError {
  17863. }
  17864. /**
  17865. * Detection error
  17866. */
  17867. class DetectionError extends MartinsError {
  17868. }
  17869. /**
  17870. * Training error
  17871. */
  17872. class TrainingError extends MartinsError {
  17873. }
  17874. ;// CONCATENATED MODULE: ./src/core/resolution.ts
  17875. /*
  17876. * MARTINS.js Free Edition
  17877. * GPU-accelerated Augmented Reality for the web
  17878. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  17879. * https://github.com/alemart/martins-js
  17880. *
  17881. * This program is free software: you can redistribute it and/or modify
  17882. * it under the terms of the GNU Affero General Public License version 3
  17883. * as published by the Free Software Foundation.
  17884. *
  17885. * This program is distributed in the hope that it will be useful,
  17886. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17887. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17888. * GNU Affero General Public License for more details.
  17889. *
  17890. * You should have received a copy of the GNU Affero General Public License
  17891. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17892. *
  17893. * resolution.ts
  17894. * Resolution utilities
  17895. */
  17896. /** Reference heights when in landscape mode, measured in pixels */
  17897. const REFERENCE_HEIGHT = {
  17898. 'xs': 120,
  17899. 'xs+': 160,
  17900. 'sm': 200,
  17901. 'sm+': 240,
  17902. 'md': 320,
  17903. 'md+': 360,
  17904. 'lg': 480,
  17905. 'lg+': 600,
  17906. };
  17907. /**
  17908. * Convert a resolution type to a (width, height) pair
  17909. * @param resolution resolution type
  17910. * @param aspectRatio desired width / height ratio
  17911. * @returns size in pixels
  17912. */
  17913. function computeResolution(resolution, aspectRatio) {
  17914. const referenceHeight = REFERENCE_HEIGHT[resolution];
  17915. let width = 0, height = 0;
  17916. if (aspectRatio >= 1) {
  17917. // landscape
  17918. height = referenceHeight;
  17919. width = Math.round(height * aspectRatio);
  17920. width -= width % 2;
  17921. }
  17922. else {
  17923. // portrait
  17924. width = referenceHeight;
  17925. height = Math.round(width / aspectRatio);
  17926. height -= height % 2;
  17927. }
  17928. return speedy_vision_default().Size(width, height);
  17929. }
  17930. ;// CONCATENATED MODULE: ./src/utils/utils.ts
  17931. /*
  17932. * MARTINS.js Free Edition
  17933. * GPU-accelerated Augmented Reality for the web
  17934. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  17935. * https://github.com/alemart/martins-js
  17936. *
  17937. * This program is free software: you can redistribute it and/or modify
  17938. * it under the terms of the GNU Affero General Public License version 3
  17939. * as published by the Free Software Foundation.
  17940. *
  17941. * This program is distributed in the hope that it will be useful,
  17942. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17943. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17944. * GNU Affero General Public License for more details.
  17945. *
  17946. * You should have received a copy of the GNU Affero General Public License
  17947. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17948. *
  17949. * utils.ts
  17950. * Generic utilities
  17951. */
  17952. /**
  17953. * Generic utilities
  17954. */
  17955. class Utils {
  17956. /**
  17957. * Log a message
  17958. * @param message
  17959. * @param args optional additional messages
  17960. */
  17961. static log(message, ...args) {
  17962. console.log('[martins-js]', message, ...args);
  17963. }
  17964. /**
  17965. * Display a warning
  17966. * @param message
  17967. * @param args optional additional messages
  17968. */
  17969. static warning(message, ...args) {
  17970. console.warn('[martins-js]', message, ...args);
  17971. }
  17972. /**
  17973. * Display an error message
  17974. * @param message
  17975. * @param args optional additional messages
  17976. */
  17977. static error(message, ...args) {
  17978. console.error('[martins-js]', message, ...args);
  17979. }
  17980. /**
  17981. * Assertion
  17982. * @param expr expression
  17983. * @param errorMessage optional error message
  17984. * @throws {AssertionError}
  17985. */
  17986. static assert(expr, errorMessage = '') {
  17987. if (!expr)
  17988. throw new AssertionError(errorMessage);
  17989. }
  17990. /**
  17991. * Returns a range [0, 1, ..., n-1]
  17992. * @param n non-negative integer
  17993. * @returns range from 0 to n-1, inclusive
  17994. */
  17995. static range(n) {
  17996. if ((n |= 0) < 0)
  17997. throw new IllegalArgumentError();
  17998. return Array.from({ length: n }, (_, i) => i);
  17999. }
  18000. /**
  18001. * Convert a resolution type to a resolution measured in pixels
  18002. * @param resolution resolution type
  18003. * @param aspectRatio width / height ratio
  18004. * @returns resolution measured in pixels
  18005. */
  18006. static resolution(resolution, aspectRatio) {
  18007. return computeResolution(resolution, aspectRatio);
  18008. }
  18009. }
  18010. ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
  18011. /*
  18012. * MARTINS.js Free Edition
  18013. * GPU-accelerated Augmented Reality for the web
  18014. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18015. * https://github.com/alemart/martins-js
  18016. *
  18017. * This program is free software: you can redistribute it and/or modify
  18018. * it under the terms of the GNU Affero General Public License version 3
  18019. * as published by the Free Software Foundation.
  18020. *
  18021. * This program is distributed in the hope that it will be useful,
  18022. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18023. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18024. * GNU Affero General Public License for more details.
  18025. *
  18026. * You should have received a copy of the GNU Affero General Public License
  18027. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18028. *
  18029. * ar-events.ts
  18030. * AR-related Events
  18031. */
  18032. /**
  18033. * AR Event
  18034. */
  18035. class AREvent extends Event {
  18036. /**
  18037. * Constructor
  18038. * @param type event type
  18039. */
  18040. constructor(type) {
  18041. super(type);
  18042. }
  18043. /**
  18044. * Event type
  18045. */
  18046. get type() {
  18047. return super.type;
  18048. }
  18049. }
  18050. /**
  18051. * AR Event Target
  18052. */
  18053. class AREventTarget {
  18054. /**
  18055. * Constructor
  18056. */
  18057. constructor() {
  18058. this._delegate = new EventTarget();
  18059. }
  18060. /**
  18061. * Add event listener
  18062. * @param type event type
  18063. * @param callback
  18064. */
  18065. addEventListener(type, callback) {
  18066. this._delegate.addEventListener(type, callback);
  18067. }
  18068. /**
  18069. * Remove event listener
  18070. * @param type event type
  18071. * @param callback
  18072. */
  18073. removeEventListener(type, callback) {
  18074. this._delegate.removeEventListener(type, callback);
  18075. }
  18076. /**
  18077. * Synchronously trigger an event
  18078. * @param event
  18079. * @returns same value as a standard event target
  18080. * @internal
  18081. */
  18082. dispatchEvent(event) {
  18083. return this._delegate.dispatchEvent(event);
  18084. }
  18085. }
  18086. ;// CONCATENATED MODULE: ./src/core/hud.ts
  18087. /*
  18088. * MARTINS.js Free Edition
  18089. * GPU-accelerated Augmented Reality for the web
  18090. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18091. * https://github.com/alemart/martins-js
  18092. *
  18093. * This program is free software: you can redistribute it and/or modify
  18094. * it under the terms of the GNU Affero General Public License version 3
  18095. * as published by the Free Software Foundation.
  18096. *
  18097. * This program is distributed in the hope that it will be useful,
  18098. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18099. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18100. * GNU Affero General Public License for more details.
  18101. *
  18102. * You should have received a copy of the GNU Affero General Public License
  18103. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18104. *
  18105. * hud.ts
  18106. * Heads Up Display
  18107. */
  18108. /**
  18109. * Heads Up Display: an overlay displayed in front of the augmented scene
  18110. */
  18111. class HUD {
  18112. /**
  18113. * Constructor
  18114. * @param parent parent of the hud container
  18115. * @param hudContainer an existing hud container (optional)
  18116. */
  18117. constructor(parent, hudContainer) {
  18118. this._container = hudContainer || this._createContainer(parent);
  18119. this._ownContainer = (hudContainer == null);
  18120. // validate
  18121. if (this._container.parentElement !== parent)
  18122. throw new IllegalArgumentError('The container of the HUD must be a direct child of the container of the viewport');
  18123. // the HUD should be hidden initially
  18124. if (!this._container.hidden)
  18125. Utils.warning(`The container of the HUD should have the hidden attribute`);
  18126. }
  18127. /**
  18128. * The container of the HUD
  18129. */
  18130. get container() {
  18131. return this._container;
  18132. }
  18133. /**
  18134. * Whether or not the HUD is visible
  18135. */
  18136. get visible() {
  18137. return !this._container.hidden;
  18138. }
  18139. /**
  18140. * Whether or not the HUD is visible
  18141. */
  18142. set visible(visible) {
  18143. this._container.hidden = !visible;
  18144. }
  18145. /**
  18146. * Initialize the HUD
  18147. * @param zIndex the z-index of the container
  18148. * @internal
  18149. */
  18150. _init(zIndex) {
  18151. const container = this._container;
  18152. container.style.position = 'absolute';
  18153. container.style.left = container.style.top = '0px';
  18154. container.style.right = container.style.bottom = '0px';
  18155. container.style.padding = container.style.margin = '0px';
  18156. container.style.zIndex = String(zIndex);
  18157. container.style.userSelect = 'none';
  18158. }
  18159. /**
  18160. * Release the HUD
  18161. * @internal
  18162. */
  18163. _release() {
  18164. if (this._ownContainer) {
  18165. this._ownContainer = false;
  18166. this._container.remove();
  18167. }
  18168. }
  18169. /**
  18170. * Create a HUD container as an immediate child of the input node
  18171. * @param parent parent container
  18172. * @returns HUD container
  18173. */
  18174. _createContainer(parent) {
  18175. const node = document.createElement('div');
  18176. node.hidden = true;
  18177. parent.appendChild(node);
  18178. return node;
  18179. }
  18180. }
  18181. ;// CONCATENATED MODULE: ./src/core/viewport.ts
  18182. /*
  18183. * MARTINS.js Free Edition
  18184. * GPU-accelerated Augmented Reality for the web
  18185. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18186. * https://github.com/alemart/martins-js
  18187. *
  18188. * This program is free software: you can redistribute it and/or modify
  18189. * it under the terms of the GNU Affero General Public License version 3
  18190. * as published by the Free Software Foundation.
  18191. *
  18192. * This program is distributed in the hope that it will be useful,
  18193. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18194. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18195. * GNU Affero General Public License for more details.
  18196. *
  18197. * You should have received a copy of the GNU Affero General Public License
  18198. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18199. *
  18200. * viewport.ts
  18201. * Viewport
  18202. */
  18203. /** An event emitted by a Viewport */
  18204. class ViewportEvent extends AREvent {
  18205. }
  18206. /** Default viewport constructor settings */
  18207. const DEFAULT_VIEWPORT_SETTINGS = {
  18208. container: null,
  18209. hudContainer: null,
  18210. resolution: 'lg',
  18211. canvas: null,
  18212. };
  18213. /** Base z-index of the children of the viewport container */
  18214. const BASE_ZINDEX = 0;
  18215. /** Default viewport width, in pixels */
  18216. const DEFAULT_VIEWPORT_WIDTH = 300;
  18217. /** Default viewport height, in pixels */
  18218. const DEFAULT_VIEWPORT_HEIGHT = 150;
  18219. /**
  18220. * Viewport
  18221. */
  18222. class BaseViewport extends AREventTarget {
  18223. /**
  18224. * Constructor
  18225. * @param viewportSettings
  18226. */
  18227. constructor(viewportSettings) {
  18228. super();
  18229. // validate settings
  18230. const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
  18231. if (settings.container == null)
  18232. throw new IllegalArgumentError('Unspecified viewport container');
  18233. // initialize attributes
  18234. this._resolution = settings.resolution;
  18235. this._container = settings.container;
  18236. this._hud = new HUD(settings.container, settings.hudContainer);
  18237. this._parentOfImportedForegroundCanvas = settings.canvas ? settings.canvas.parentNode : null;
  18238. // create canvas elements
  18239. const size = speedy_vision_default().Size(DEFAULT_VIEWPORT_WIDTH, DEFAULT_VIEWPORT_HEIGHT);
  18240. this._backgroundCanvas = this._createBackgroundCanvas(this._container, size);
  18241. this._foregroundCanvas = settings.canvas == null ?
  18242. this._createForegroundCanvas(this._container, size) :
  18243. this._foregroundCanvas = this._importForegroundCanvas(settings.canvas, this._container, size);
  18244. }
  18245. /**
  18246. * Viewport container
  18247. */
  18248. get container() {
  18249. return this._container;
  18250. }
  18251. /**
  18252. * HUD
  18253. */
  18254. get hud() {
  18255. return this._hud;
  18256. }
  18257. /**
  18258. * Resolution of the virtual scene
  18259. */
  18260. get resolution() {
  18261. return this._resolution;
  18262. }
  18263. /**
  18264. * Size in pixels of the drawing buffer of the canvas
  18265. * on which the virtual scene will be drawn
  18266. */
  18267. get virtualSize() {
  18268. const aspectRatio = this._backgroundCanvas.width / this._backgroundCanvas.height;
  18269. return Utils.resolution(this._resolution, aspectRatio);
  18270. }
  18271. /**
  18272. * The canvas on which the virtual scene will be drawn
  18273. */
  18274. get canvas() {
  18275. return this._foregroundCanvas;
  18276. }
  18277. /**
  18278. * Background canvas
  18279. * @internal
  18280. */
  18281. get _background() {
  18282. return this._backgroundCanvas;
  18283. }
  18284. /**
  18285. * Size of the drawing buffer of the background canvas, in pixels
  18286. * @internal
  18287. */
  18288. get _size() {
  18289. throw new IllegalOperationError();
  18290. }
  18291. /**
  18292. * Initialize the viewport (when the session starts)
  18293. * @internal
  18294. */
  18295. _init() {
  18296. this._container.style.touchAction = 'none';
  18297. this._hud._init(BASE_ZINDEX + 2);
  18298. this._hud.visible = true;
  18299. }
  18300. /**
  18301. * Release the viewport (when the session starts)
  18302. * @internal
  18303. */
  18304. _release() {
  18305. //this._hud.visible = false; // depends on the type of the viewport
  18306. this._hud._release();
  18307. this._restoreImportedForegroundCanvas();
  18308. this._container.style.touchAction = 'auto';
  18309. }
  18310. /**
  18311. * Function to be called when the viewport is resized
  18312. * @internal
  18313. */
  18314. _onResize() {
  18315. // Resize the drawing buffer of the foreground canvas, so that it
  18316. // matches the desired resolution and the aspect ratio of the
  18317. // background canvas
  18318. const virtualSize = this.virtualSize;
  18319. this._foregroundCanvas.width = virtualSize.width;
  18320. this._foregroundCanvas.height = virtualSize.height;
  18321. this._styleCanvas(this._foregroundCanvas, 'foreground');
  18322. // dispatch event
  18323. const event = new ViewportEvent('resize');
  18324. this.dispatchEvent(event);
  18325. }
  18326. /**
  18327. * Create the background canvas
  18328. * @param parent parent container
  18329. * @param size size of the drawing buffer
  18330. * @returns a new canvas as a child of parent
  18331. */
  18332. _createBackgroundCanvas(parent, size) {
  18333. const canvas = this._createCanvas(parent, size);
  18334. return this._styleCanvas(canvas, 'background');
  18335. }
  18336. /**
  18337. * Create the foreground canvas
  18338. * @param parent parent container
  18339. * @param size size of the drawing buffer
  18340. * @returns a new canvas as a child of parent
  18341. */
  18342. _createForegroundCanvas(parent, size) {
  18343. const canvas = this._createCanvas(parent, size);
  18344. return this._styleCanvas(canvas, 'foreground');
  18345. }
  18346. /**
  18347. * Create a canvas and attach it to another HTML element
  18348. * @param parent parent container
  18349. * @param size size of the drawing buffer
  18350. * @returns a new canvas as a child of parent
  18351. */
  18352. _createCanvas(parent, size) {
  18353. const canvas = document.createElement('canvas');
  18354. canvas.width = size.width;
  18355. canvas.height = size.height;
  18356. parent.appendChild(canvas);
  18357. return canvas;
  18358. }
  18359. /**
  18360. * Add suitable CSS rules to a canvas
  18361. * @param canvas
  18362. * @param canvasType
  18363. * @returns canvas
  18364. */
  18365. _styleCanvas(canvas, canvasType) {
  18366. const offset = (canvasType == 'foreground') ? 1 : 0;
  18367. const zIndex = BASE_ZINDEX + offset;
  18368. canvas.setAttribute('style', [
  18369. 'position: absolute',
  18370. 'left: 0px',
  18371. 'top: 0px',
  18372. 'z-index: ' + String(zIndex),
  18373. 'width: 100% !important',
  18374. 'height: 100% !important',
  18375. ].join('; '));
  18376. return canvas;
  18377. }
  18378. /**
  18379. * Import an existing foreground canvas to the viewport
  18380. * @param canvas existing canvas
  18381. * @param parent parent container
  18382. * @param size size of the drawing buffer
  18383. * @returns the input canvas
  18384. */
  18385. _importForegroundCanvas(canvas, parent, size) {
  18386. if (!(canvas instanceof HTMLCanvasElement))
  18387. throw new IllegalArgumentError(`Not a <canvas>: ${canvas}`);
  18388. // borrow the canvas; add it as a child of the viewport container
  18389. canvas.remove();
  18390. parent.appendChild(canvas);
  18391. canvas.width = size.width;
  18392. canvas.height = size.height;
  18393. canvas.dataset.cssText = canvas.style.cssText; // save CSS
  18394. canvas.style.cssText = ''; // clear CSS
  18395. this._styleCanvas(canvas, 'foreground');
  18396. return canvas;
  18397. }
  18398. /**
  18399. * Restore a previously imported foreground canvas to its original parent
  18400. */
  18401. _restoreImportedForegroundCanvas() {
  18402. // not an imported canvas; nothing to do
  18403. if (this._parentOfImportedForegroundCanvas == null)
  18404. return;
  18405. const canvas = this._foregroundCanvas;
  18406. canvas.style.cssText = canvas.dataset.cssText || ''; // restore CSS
  18407. canvas.remove();
  18408. this._parentOfImportedForegroundCanvas.appendChild(canvas);
  18409. }
  18410. }
  18411. /**
  18412. * Viewport decorator
  18413. */
  18414. class ViewportDecorator extends AREventTarget {
  18415. /**
  18416. * Constructor
  18417. * @param base to be decorated
  18418. * @param getSize size getter
  18419. */
  18420. constructor(base, getSize) {
  18421. super();
  18422. this._base = base;
  18423. this._getSize = getSize;
  18424. }
  18425. /**
  18426. * Viewport container
  18427. */
  18428. get container() {
  18429. return this._base.container;
  18430. }
  18431. /**
  18432. * HUD
  18433. */
  18434. get hud() {
  18435. return this._base.hud;
  18436. }
  18437. /**
  18438. * Resolution of the virtual scene
  18439. */
  18440. get resolution() {
  18441. return this._base.resolution;
  18442. }
  18443. /**
  18444. * Size in pixels of the drawing buffer of the canvas
  18445. * on which the virtual scene will be drawn
  18446. */
  18447. get virtualSize() {
  18448. return this._base.virtualSize;
  18449. }
  18450. /**
  18451. * The canvas on which the virtual scene will be drawn
  18452. */
  18453. get canvas() {
  18454. return this._base.canvas;
  18455. }
  18456. /**
  18457. * Background canvas
  18458. * @internal
  18459. */
  18460. get _background() {
  18461. return this._base._background;
  18462. }
  18463. /**
  18464. * Size of the drawing buffer of the background canvas, in pixels
  18465. * @internal
  18466. */
  18467. get _size() {
  18468. return this._getSize();
  18469. }
  18470. /**
  18471. * Initialize the viewport
  18472. * @internal
  18473. */
  18474. _init() {
  18475. this._base._init();
  18476. }
  18477. /**
  18478. * Release the viewport
  18479. * @internal
  18480. */
  18481. _release() {
  18482. this._base._release();
  18483. }
  18484. /**
  18485. * Function to be called when the viewport is resized
  18486. * @internal
  18487. */
  18488. _onResize() {
  18489. this._base._onResize();
  18490. }
  18491. /**
  18492. * Add event listener
  18493. * @param type event type
  18494. * @param callback
  18495. */
  18496. addEventListener(type, callback) {
  18497. this._base.addEventListener(type, callback);
  18498. }
  18499. /**
  18500. * Remove event listener
  18501. * @param type event type
  18502. * @param callback
  18503. */
  18504. removeEventListener(type, callback) {
  18505. this._base.removeEventListener(type, callback);
  18506. }
  18507. /**
  18508. * Synchronously trigger an event
  18509. * @param event
  18510. * @returns same value as a standard event target
  18511. * @internal
  18512. */
  18513. dispatchEvent(event) {
  18514. return this._base.dispatchEvent(event);
  18515. }
  18516. }
  18517. /**
  18518. * A viewport that watches for page resizes
  18519. */
  18520. class ResizableViewport extends ViewportDecorator {
  18521. /**
  18522. * Constructor
  18523. * @param base to be decorated
  18524. * @param getSize size getter
  18525. */
  18526. constructor(base, getSize) {
  18527. super(base, getSize);
  18528. this._active = false;
  18529. }
  18530. /**
  18531. * Initialize the viewport
  18532. * @internal
  18533. */
  18534. _init() {
  18535. super._init();
  18536. this._active = true;
  18537. // Configure the resize listener. We want the viewport
  18538. // to adjust itself if the phone/screen is resized or
  18539. // changes orientation
  18540. let timeout = null;
  18541. const onresize = () => {
  18542. if (!this._active) {
  18543. window.removeEventListener('resize', onresize);
  18544. return;
  18545. }
  18546. if (timeout !== null)
  18547. clearTimeout(timeout);
  18548. timeout = setTimeout(() => {
  18549. timeout = null;
  18550. this._resize.call(this);
  18551. this._onResize.call(this);
  18552. }, 100);
  18553. };
  18554. window.addEventListener('resize', onresize);
  18555. this._resize();
  18556. this._onResize();
  18557. }
  18558. /**
  18559. * Release the viewport
  18560. * @internal
  18561. */
  18562. _release() {
  18563. this._active = false;
  18564. super._release();
  18565. }
  18566. }
  18567. /**
  18568. * Immersive viewport: it occupies the entire page
  18569. */
  18570. class ImmersiveViewport extends ResizableViewport {
  18571. /**
  18572. * Release the viewport
  18573. * @internal
  18574. */
  18575. _release() {
  18576. this.canvas.remove();
  18577. this._background.remove();
  18578. this.hud.visible = false;
  18579. this.container.style.cssText = ''; // reset CSS
  18580. super._release();
  18581. }
  18582. /**
  18583. * Resize the immersive viewport, so that it occupies the entire page.
  18584. * We respect the aspect ratio of the source media
  18585. */
  18586. _resize() {
  18587. const { width, height } = this._size;
  18588. const viewportSize = speedy_vision_default().Size(0, 0);
  18589. const viewportAspectRatio = width / height;
  18590. const windowSize = speedy_vision_default().Size(window.innerWidth, window.innerHeight);
  18591. const windowAspectRatio = windowSize.width / windowSize.height;
  18592. // figure out the viewport size
  18593. if (viewportAspectRatio <= windowAspectRatio) {
  18594. viewportSize.height = windowSize.height;
  18595. viewportSize.width = (viewportSize.height * viewportAspectRatio) | 0;
  18596. }
  18597. else {
  18598. viewportSize.width = windowSize.width;
  18599. viewportSize.height = (viewportSize.width / viewportAspectRatio) | 0;
  18600. }
  18601. // position the viewport and set its size
  18602. const container = this.container;
  18603. container.style.position = 'fixed';
  18604. container.style.left = `calc(50% - ${viewportSize.width >>> 1}px)`;
  18605. container.style.top = `calc(50% - ${viewportSize.height >>> 1}px)`;
  18606. container.style.zIndex = '1000000000'; // 1B //String(2147483647);
  18607. container.style.width = viewportSize.width + 'px';
  18608. container.style.height = viewportSize.height + 'px';
  18609. container.style.backgroundColor = '#000';
  18610. // set the size of the drawing buffer of the background canvas
  18611. const backgroundCanvas = this._background;
  18612. const backgroundCanvasAspectRatio = viewportAspectRatio;
  18613. const referenceHeight = height;
  18614. backgroundCanvas.height = referenceHeight;
  18615. backgroundCanvas.width = (backgroundCanvas.height * backgroundCanvasAspectRatio) | 0;
  18616. }
  18617. }
  18618. /**
  18619. * Inline viewport: it follows the typical flow of a web page
  18620. */
  18621. class InlineViewport extends ResizableViewport {
  18622. /**
  18623. * Resize the inline viewport
  18624. */
  18625. _resize() {
  18626. const { width, height } = this._size;
  18627. this.container.style.position = 'relative';
  18628. this.container.style.width = width + 'px';
  18629. this.container.style.height = height + 'px';
  18630. //this.container.style.display = 'inline-block';
  18631. this._background.width = width;
  18632. this._background.height = height;
  18633. }
  18634. }
  18635. ;// CONCATENATED MODULE: ./src/core/stats.ts
  18636. /*
  18637. * MARTINS.js Free Edition
  18638. * GPU-accelerated Augmented Reality for the web
  18639. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18640. * https://github.com/alemart/martins-js
  18641. *
  18642. * This program is free software: you can redistribute it and/or modify
  18643. * it under the terms of the GNU Affero General Public License version 3
  18644. * as published by the Free Software Foundation.
  18645. *
  18646. * This program is distributed in the hope that it will be useful,
  18647. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18648. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18649. * GNU Affero General Public License for more details.
  18650. *
  18651. * You should have received a copy of the GNU Affero General Public License
  18652. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18653. *
  18654. * stats.ts
  18655. * Stats for performance measurements
  18656. */
  18657. /** update interval, given in seconds */
  18658. const UPDATE_INTERVAL = 0.5;
  18659. /**
  18660. * Stats for performance measurements
  18661. */
  18662. class Stats {
  18663. /**
  18664. * Constructor
  18665. */
  18666. constructor() {
  18667. this._timeOfLastUpdate = this._now();
  18668. this._partialCycleCount = 0;
  18669. this._cyclesPerSecond = 0;
  18670. }
  18671. /**
  18672. * Update stats - call every frame
  18673. */
  18674. update() {
  18675. const now = this._now();
  18676. ++this._partialCycleCount;
  18677. if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
  18678. this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
  18679. this._partialCycleCount = 0;
  18680. this._timeOfLastUpdate = now;
  18681. }
  18682. }
  18683. /**
  18684. * Reset stats
  18685. */
  18686. reset() {
  18687. this._timeOfLastUpdate = this._now();
  18688. this._partialCycleCount = 0;
  18689. this._cyclesPerSecond = 0;
  18690. }
  18691. /**
  18692. * Number of cycles per second
  18693. */
  18694. get cyclesPerSecond() {
  18695. return this._cyclesPerSecond;
  18696. }
  18697. /**
  18698. * A measurement of time, in milliseconds
  18699. * @returns time in ms
  18700. */
  18701. _now() {
  18702. return performance.now();
  18703. }
  18704. }
  18705. ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
  18706. /*
  18707. * MARTINS.js Free Edition
  18708. * GPU-accelerated Augmented Reality for the web
  18709. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18710. * https://github.com/alemart/martins-js
  18711. *
  18712. * This program is free software: you can redistribute it and/or modify
  18713. * it under the terms of the GNU Affero General Public License version 3
  18714. * as published by the Free Software Foundation.
  18715. *
  18716. * This program is distributed in the hope that it will be useful,
  18717. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18718. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18719. * GNU Affero General Public License for more details.
  18720. *
  18721. * You should have received a copy of the GNU Affero General Public License
  18722. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18723. *
  18724. * stats-panel.ts
  18725. * Stats panel used for development purposes
  18726. */
  18727. /** Update interval, in ms */
  18728. const stats_panel_UPDATE_INTERVAL = 500;
  18729. /** Icons for different power profiles */
  18730. const POWER_ICON = Object.freeze({
  18731. 'default': '',
  18732. 'low-power': '<span style="color:#0f0">&#x1F50B</span>',
  18733. 'high-performance': '<span style="color:#ff0">&#x26A1</span>'
  18734. });
  18735. /**
  18736. * Stats panel used for development purposes
  18737. */
  18738. class StatsPanel {
  18739. /**
  18740. * Constructor
  18741. * @param parent parent element of the panel
  18742. */
  18743. constructor(parent) {
  18744. this._container = this._createContainer(parent);
  18745. this._lastUpdate = 0;
  18746. }
  18747. /**
  18748. * Release the panel
  18749. */
  18750. release() {
  18751. this._container.remove();
  18752. }
  18753. /**
  18754. * A method to be called in the update loop
  18755. * @param time current time in ms
  18756. * @param trackers the trackers attached to the session
  18757. * @param sources the sources of media linked to the session
  18758. * @param gpu GPU cycles per second
  18759. * @param fps frames per second
  18760. */
  18761. update(time, trackers, sources, gpu, fps) {
  18762. if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
  18763. this._lastUpdate = time;
  18764. this._update(trackers, sources, fps, gpu);
  18765. }
  18766. }
  18767. /**
  18768. * Visibility of the panel
  18769. */
  18770. get visible() {
  18771. return !this._container.hidden;
  18772. }
  18773. /**
  18774. * Visibility of the panel
  18775. */
  18776. set visible(visible) {
  18777. this._container.hidden = !visible;
  18778. }
  18779. /**
  18780. * Update the contents of the panel
  18781. * @param trackers the trackers attached to the session
  18782. * @param sources the sources of media linked to the session
  18783. * @param fps frames per second
  18784. * @param gpu GPU cycles per second
  18785. */
  18786. _update(trackers, sources, fps, gpu) {
  18787. const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
  18788. const sourceStats = sources.map(source => source._stats).join(', ');
  18789. const param = {
  18790. fps: this._colorize(fps),
  18791. gpu: this._colorize(gpu),
  18792. powerIcon: POWER_ICON[Settings.powerPreference]
  18793. };
  18794. this._container.textContent = (`MARTINS.js ${Martins.edition}
  18795. Version ${Martins.version}
  18796. FPS: [fps] | GPU: [gpu] [powerIcon]
  18797. IN : ${sourceStats}
  18798. OUT: ${trackerStats}`);
  18799. const fn = (_, x) => param[x];
  18800. this._container.innerHTML = this._container.innerHTML.replace(/\[(\w+)\]/g, fn);
  18801. }
  18802. /**
  18803. * Colorize a frequency number
  18804. * @param f frequency given in cycles per second
  18805. * @returns colorized number (HTML)
  18806. */
  18807. _colorize(f) {
  18808. const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
  18809. const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
  18810. const color2 = f >= 30 ? GREEN : RED;
  18811. const color = Settings.powerPreference != 'low-power' ? color3 : color2;
  18812. return `<span style="color:${color}">${Number(f)}</span>`;
  18813. }
  18814. /**
  18815. * Create the container for the panel
  18816. * @param parent parent element
  18817. * @returns a container
  18818. */
  18819. _createContainer(parent) {
  18820. const container = document.createElement('div');
  18821. container.style.position = 'absolute';
  18822. container.style.left = container.style.top = '0px';
  18823. container.style.zIndex = '1000000';
  18824. container.style.padding = '4px';
  18825. container.style.whiteSpace = 'pre-line';
  18826. container.style.backgroundColor = 'rgba(0,0,0,0.5)';
  18827. container.style.color = '#fff';
  18828. container.style.fontFamily = 'monospace';
  18829. container.style.fontSize = '14px';
  18830. parent.appendChild(container);
  18831. return container;
  18832. }
  18833. }
  18834. ;// CONCATENATED MODULE: ./src/core/frame.ts
  18835. /*
  18836. * MARTINS.js Free Edition
  18837. * GPU-accelerated Augmented Reality for the web
  18838. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18839. * https://github.com/alemart/martins-js
  18840. *
  18841. * This program is free software: you can redistribute it and/or modify
  18842. * it under the terms of the GNU Affero General Public License version 3
  18843. * as published by the Free Software Foundation.
  18844. *
  18845. * This program is distributed in the hope that it will be useful,
  18846. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18847. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18848. * GNU Affero General Public License for more details.
  18849. *
  18850. * You should have received a copy of the GNU Affero General Public License
  18851. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18852. *
  18853. * frame.ts
  18854. * A Frame holds information used to render a single animation frame of a Session
  18855. */
  18856. /**
  18857. * Iterable frame results (helper class)
  18858. */
  18859. class IterableTrackerResults {
  18860. constructor(_results) {
  18861. this._results = _results;
  18862. this._index = 0;
  18863. }
  18864. next() {
  18865. const i = this._index++;
  18866. return i < this._results.length ?
  18867. { done: false, value: this._results[i] } :
  18868. { done: true, value: undefined };
  18869. }
  18870. [Symbol.iterator]() {
  18871. return this;
  18872. }
  18873. }
  18874. /**
  18875. * A Frame holds information used to render a single animation frame of a Session
  18876. */
  18877. class Frame {
  18878. /**
  18879. * Constructor
  18880. * @param session
  18881. * @param results
  18882. */
  18883. constructor(session, results) {
  18884. this._session = session;
  18885. this._results = new IterableTrackerResults(results);
  18886. }
  18887. /**
  18888. * The session of which this frame holds data
  18889. */
  18890. get session() {
  18891. return this._session;
  18892. }
  18893. /**
  18894. * The results of all trackers in this frame
  18895. */
  18896. get results() {
  18897. return this._results;
  18898. }
  18899. }
  18900. ;// CONCATENATED MODULE: ./src/core/time.ts
  18901. /*
  18902. * MARTINS.js Free Edition
  18903. * GPU-accelerated Augmented Reality for the web
  18904. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18905. * https://github.com/alemart/martins-js
  18906. *
  18907. * This program is free software: you can redistribute it and/or modify
  18908. * it under the terms of the GNU Affero General Public License version 3
  18909. * as published by the Free Software Foundation.
  18910. *
  18911. * This program is distributed in the hope that it will be useful,
  18912. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18913. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18914. * GNU Affero General Public License for more details.
  18915. *
  18916. * You should have received a copy of the GNU Affero General Public License
  18917. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18918. *
  18919. * time.ts
  18920. * Time utilities
  18921. */
  18922. /**
  18923. * Time Manager
  18924. */
  18925. class Time {
  18926. constructor() {
  18927. /** time scale */
  18928. this._scale = 1;
  18929. /** time since the start of the session, in milliseconds */
  18930. this._time = 0;
  18931. /** unscaled time since the start of the session, in milliseconds */
  18932. this._unscaledTime = 0;
  18933. /** elapsed time between the current and the previous frame, in milliseconds */
  18934. this._delta = 0;
  18935. /** time of the first update call, in milliseconds */
  18936. this._firstUpdate = 0;
  18937. /** time of the last update call, in milliseconds */
  18938. this._lastUpdate = Number.POSITIVE_INFINITY;
  18939. }
  18940. /**
  18941. * Update the Time Manager
  18942. * @param timestamp in milliseconds
  18943. * @internal
  18944. */
  18945. _update(timestamp) {
  18946. if (timestamp < this._lastUpdate) {
  18947. this._firstUpdate = this._lastUpdate = timestamp;
  18948. return;
  18949. }
  18950. this._delta = (timestamp - this._lastUpdate) * this._scale;
  18951. this._time += this._delta;
  18952. this._unscaledTime = timestamp - this._firstUpdate;
  18953. this._lastUpdate = timestamp;
  18954. }
  18955. /**
  18956. * Elapsed time since the start of the session, measured at the
  18957. * beginning of the current animation frame and given in seconds
  18958. */
  18959. get elapsed() {
  18960. return this._time * 0.001;
  18961. }
  18962. /**
  18963. * Elapsed time between the current and the previous animation
  18964. * frame, given in seconds
  18965. */
  18966. get delta() {
  18967. return this._delta * 0.001;
  18968. }
  18969. /**
  18970. * Time scale (defaults to 1)
  18971. */
  18972. get scale() {
  18973. return this._scale;
  18974. }
  18975. /**
  18976. * Time scale (defaults to 1)
  18977. */
  18978. set scale(scale) {
  18979. this._scale = Math.max(0, +scale);
  18980. }
  18981. /**
  18982. * Time scale independent elapsed time since the start of the session,
  18983. * measured at the beginning of the current animation frame and given
  18984. * in seconds
  18985. */
  18986. get unscaled() {
  18987. return this._unscaledTime * 0.001;
  18988. }
  18989. }
  18990. ;// CONCATENATED MODULE: ./src/core/gizmos.ts
  18991. /*
  18992. * MARTINS.js Free Edition
  18993. * GPU-accelerated Augmented Reality for the web
  18994. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  18995. * https://github.com/alemart/martins-js
  18996. *
  18997. * This program is free software: you can redistribute it and/or modify
  18998. * it under the terms of the GNU Affero General Public License version 3
  18999. * as published by the Free Software Foundation.
  19000. *
  19001. * This program is distributed in the hope that it will be useful,
  19002. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19003. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19004. * GNU Affero General Public License for more details.
  19005. *
  19006. * You should have received a copy of the GNU Affero General Public License
  19007. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19008. *
  19009. * gizmos.ts
  19010. * Visual cues for testing & debugging
  19011. */
  19012. /** The maximum match distance ratio we'll consider to be "good" */
  19013. const GOOD_MATCH_THRESHOLD = 0.7;
  19014. /**
  19015. * Visual cues for testing & debugging
  19016. */
  19017. class Gizmos {
  19018. /**
  19019. * Constructor
  19020. */
  19021. constructor() {
  19022. this._visible = false;
  19023. }
  19024. /**
  19025. * Whether or not the gizmos will be rendered
  19026. */
  19027. get visible() {
  19028. return this._visible;
  19029. }
  19030. /**
  19031. * Whether or not the gizmos will be rendered
  19032. */
  19033. set visible(visible) {
  19034. this._visible = visible;
  19035. }
  19036. /**
  19037. * Render gizmos
  19038. * @param viewport
  19039. * @param trackers
  19040. * @internal
  19041. */
  19042. _render(viewport, trackers) {
  19043. // no need to render?
  19044. if (!this._visible)
  19045. return;
  19046. // viewport
  19047. const viewportSize = viewport._size;
  19048. const canvas = viewport._background;
  19049. const ctx = canvas.getContext('2d', { alpha: false });
  19050. if (!ctx)
  19051. throw new IllegalOperationError();
  19052. // debug
  19053. //ctx.fillStyle = '#000';
  19054. //ctx.fillRect(0, 0, canvas.width, canvas.height);
  19055. //ctx.clearRect(0, 0, canvas.width, canvas.height);
  19056. // render keypoints
  19057. for (let i = 0; i < trackers.length; i++) {
  19058. if (trackers[i].type != 'image-tracker')
  19059. continue;
  19060. const output = trackers[i]._output;
  19061. const keypoints = output.keypoints;
  19062. const screenSize = output.screenSize;
  19063. if (keypoints !== undefined && screenSize !== undefined)
  19064. this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
  19065. }
  19066. // render polylines
  19067. for (let i = 0; i < trackers.length; i++) {
  19068. if (trackers[i].type != 'image-tracker')
  19069. continue;
  19070. const output = trackers[i]._output;
  19071. const polyline = output.polyline;
  19072. const screenSize = output.screenSize;
  19073. if (polyline !== undefined && screenSize !== undefined)
  19074. this._renderPolyline(ctx, polyline, screenSize, viewportSize);
  19075. }
  19076. // render the axes of the 3D coordinate system
  19077. for (let i = 0; i < trackers.length; i++) {
  19078. if (trackers[i].type != 'image-tracker')
  19079. continue;
  19080. const output = trackers[i]._output;
  19081. const cameraMatrix = output.cameraMatrix;
  19082. const screenSize = output.screenSize;
  19083. if (cameraMatrix !== undefined && screenSize !== undefined)
  19084. this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
  19085. }
  19086. }
  19087. /**
  19088. * Split keypoints in matched/unmatched categories and
  19089. * render them for testing & development purposes
  19090. * @param ctx canvas 2D context
  19091. * @param keypoints keypoints to render
  19092. * @param screenSize AR screen size
  19093. * @param viewportSize viewport size
  19094. * @param size base keypoint rendering size
  19095. */
  19096. _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
  19097. if (keypoints.length == 0)
  19098. return;
  19099. if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
  19100. this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
  19101. return;
  19102. }
  19103. const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
  19104. (keypoint.matches.length > 1 &&
  19105. keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
  19106. keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
  19107. const matchedKeypoints = keypoints;
  19108. const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
  19109. const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
  19110. this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
  19111. this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
  19112. }
  19113. /**
  19114. * Render keypoints for testing & development purposes
  19115. * @param ctx canvas 2D context
  19116. * @param keypoints keypoints to render
  19117. * @param screenSize AR screen size
  19118. * @param viewportSize viewport size
  19119. * @param color color of the rendered keypoints
  19120. * @param size base keypoint rendering size
  19121. */
  19122. _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
  19123. const sx = viewportSize.width / screenSize.width;
  19124. const sy = viewportSize.height / screenSize.height;
  19125. ctx.beginPath();
  19126. for (let i = keypoints.length - 1; i >= 0; i--) {
  19127. const keypoint = keypoints[i];
  19128. const x = (keypoint.x * sx + 0.5) | 0;
  19129. const y = (keypoint.y * sy + 0.5) | 0;
  19130. const r = (size * keypoint.scale + 0.5) | 0;
  19131. ctx.rect(x - r, y - r, 2 * r, 2 * r);
  19132. }
  19133. ctx.strokeStyle = color;
  19134. ctx.lineWidth = 1;
  19135. ctx.stroke();
  19136. }
  19137. /**
  19138. * Render polyline for testing & development purposes
  19139. * @param ctx canvas 2D context
  19140. * @param polyline vertices
  19141. * @param screenSize AR screen size
  19142. * @param viewportSize viewport size
  19143. * @param color color of the rendered polyline
  19144. * @param lineWidth
  19145. */
  19146. _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
  19147. if (polyline.length == 0)
  19148. return;
  19149. const n = polyline.length;
  19150. const sx = viewportSize.width / screenSize.width;
  19151. const sy = viewportSize.height / screenSize.height;
  19152. // render polyline
  19153. ctx.beginPath();
  19154. ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
  19155. for (let j = 0; j < n; j++)
  19156. ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
  19157. ctx.strokeStyle = color;
  19158. ctx.lineWidth = lineWidth;
  19159. ctx.stroke();
  19160. }
  19161. /**
  19162. * Render the axes of a 3D coordinate system
  19163. * @param ctx canvas 2D context
  19164. * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
  19165. * @param screenSize AR screen size
  19166. * @param viewportSize viewport size
  19167. * @param lineWidth
  19168. */
  19169. _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
  19170. const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
  19171. const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
  19172. const length = 1; // length of each axis-corresponding line, given in normalized space units
  19173. const sx = viewportSize.width / screenSize.width;
  19174. const sy = viewportSize.height / screenSize.height;
  19175. /*
  19176. Multiply the 3x4 camera matrix P by:
  19177. [ 0 L 0 0 ]
  19178. [ 0 0 L 0 ] , where L = length in normalized space of the lines
  19179. [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
  19180. [ 1 1 1 1 ]
  19181. Each column of the resulting matrix will give us the pixel coordinates
  19182. we're looking for.
  19183. Note: we're working with homogeneous coordinates
  19184. */
  19185. const p = cameraMatrix.read();
  19186. const l = length;
  19187. const o = [p[9], p[10], p[11]]; // origin of the coordinate system
  19188. const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
  19189. const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
  19190. const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
  19191. const axis = [x, y, z];
  19192. // draw each axis
  19193. const ox = o[0] / o[2], oy = o[1] / o[2];
  19194. for (let i = 0; i < 3; i++) {
  19195. const q = axis[i];
  19196. const x = q[0] / q[2], y = q[1] / q[2];
  19197. ctx.beginPath();
  19198. ctx.moveTo(ox * sx, oy * sy);
  19199. ctx.lineTo(x * sx, y * sy);
  19200. ctx.strokeStyle = color[i];
  19201. ctx.lineWidth = lineWidth;
  19202. ctx.stroke();
  19203. }
  19204. //console.log("Origin",ox,oy);
  19205. }
  19206. }
  19207. ;// CONCATENATED MODULE: ./src/utils/asap.ts
  19208. /*
  19209. * MARTINS.js Free Edition
  19210. * GPU-accelerated Augmented Reality for the web
  19211. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19212. * https://github.com/alemart/martins-js
  19213. *
  19214. * This program is free software: you can redistribute it and/or modify
  19215. * it under the terms of the GNU Affero General Public License version 3
  19216. * as published by the Free Software Foundation.
  19217. *
  19218. * This program is distributed in the hope that it will be useful,
  19219. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19220. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19221. * GNU Affero General Public License for more details.
  19222. *
  19223. * You should have received a copy of the GNU Affero General Public License
  19224. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19225. *
  19226. * asap.ts
  19227. * Schedule a function to run "as soon as possible"
  19228. */
  19229. /** callbacks */
  19230. const callbacks = [];
  19231. /** arguments to be passed to the callbacks */
  19232. const args = [];
  19233. /** asap key */
  19234. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  19235. // Register an event listener
  19236. window.addEventListener('message', event => {
  19237. if (event.source !== window || event.data !== ASAP_KEY)
  19238. return;
  19239. event.stopPropagation();
  19240. if (callbacks.length == 0)
  19241. return;
  19242. const fn = callbacks.pop();
  19243. const argArray = args.pop();
  19244. fn.apply(undefined, argArray);
  19245. }, true);
  19246. /**
  19247. * Schedule a function to run "as soon as possible"
  19248. * @param fn callback
  19249. * @param params optional parameters
  19250. */
  19251. function asap(fn, ...params) {
  19252. callbacks.unshift(fn);
  19253. args.unshift(params);
  19254. window.postMessage(ASAP_KEY, '*');
  19255. }
  19256. ;// CONCATENATED MODULE: ./src/core/session.ts
  19257. /*
  19258. * MARTINS.js Free Edition
  19259. * GPU-accelerated Augmented Reality for the web
  19260. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19261. * https://github.com/alemart/martins-js
  19262. *
  19263. * This program is free software: you can redistribute it and/or modify
  19264. * it under the terms of the GNU Affero General Public License version 3
  19265. * as published by the Free Software Foundation.
  19266. *
  19267. * This program is distributed in the hope that it will be useful,
  19268. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19269. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19270. * GNU Affero General Public License for more details.
  19271. *
  19272. * You should have received a copy of the GNU Affero General Public License
  19273. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19274. *
  19275. * session.ts
  19276. * WebAR Session
  19277. */
  19278. /** An event emitted by a Session */
  19279. class SessionEvent extends AREvent {
  19280. }
  19281. /** Default options when starting a session */
  19282. const DEFAULT_OPTIONS = {
  19283. mode: 'immersive',
  19284. trackers: [],
  19285. sources: [],
  19286. viewport: null,
  19287. stats: false,
  19288. gizmos: false,
  19289. };
  19290. /**
  19291. * A Session represents an intent to display AR content
  19292. * and encapsulates the main loop (update-render cycle)
  19293. */
  19294. class Session extends AREventTarget {
  19295. /**
  19296. * Constructor
  19297. * @param sources previously initialized sources of data
  19298. * @param mode session mode
  19299. * @param viewport viewport
  19300. * @param stats render stats panel?
  19301. * @param gizmos render gizmos?
  19302. */
  19303. constructor(sources, mode, viewport, stats, gizmos) {
  19304. super();
  19305. this._mode = mode;
  19306. this._trackers = [];
  19307. this._sources = sources;
  19308. this._updateStats = new Stats();
  19309. this._renderStats = new Stats();
  19310. this._active = true;
  19311. this._frameReady = true; // no trackers at the moment
  19312. this._rafQueue = [];
  19313. this._time = new Time();
  19314. this._gizmos = new Gizmos();
  19315. this._gizmos.visible = gizmos;
  19316. // get media
  19317. const media = this.media;
  19318. // setup the viewport
  19319. if (mode == 'immersive')
  19320. this._viewport = new ImmersiveViewport(viewport, () => media.size);
  19321. else if (mode == 'inline')
  19322. this._viewport = new InlineViewport(viewport, () => media.size);
  19323. else
  19324. throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
  19325. this._viewport._init();
  19326. // setup the main loop
  19327. this._setupUpdateLoop();
  19328. this._setupRenderLoop();
  19329. // setup the stats panel
  19330. this._statsPanel = new StatsPanel(this._viewport.hud.container);
  19331. this._statsPanel.visible = stats;
  19332. // done!
  19333. Session._count++;
  19334. Utils.log(`The ${mode} session is now active!`);
  19335. }
  19336. /**
  19337. * Checks if the engine can be run in the browser the client is using
  19338. * @returns true if the engine is compatible with the browser
  19339. */
  19340. static isSupported() {
  19341. return speedy_vision_default().isSupported();
  19342. }
  19343. /**
  19344. * Instantiate a session
  19345. * @param options options
  19346. * @returns a promise that resolves to a new session
  19347. */
  19348. static instantiate(options = DEFAULT_OPTIONS) {
  19349. const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
  19350. Utils.log(`Starting a new ${mode} session...`);
  19351. return speedy_vision_default().Promise.resolve().then(() => {
  19352. // is the engine supported?
  19353. if (!Session.isSupported())
  19354. throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with the MARTINS.js engine');
  19355. // block multiple immersive sessions
  19356. if (mode !== 'inline' && Session.count > 0)
  19357. throw new IllegalOperationError(`Can't start more than one immersive session`);
  19358. // initialize matrix routines
  19359. return speedy_vision_default().Matrix.ready();
  19360. }).then(() => {
  19361. // validate sources of data
  19362. const videoSources = sources.filter(source => source._type == 'video');
  19363. if (videoSources.length != 1)
  19364. throw new IllegalArgumentError(`One video source of data must be provided`);
  19365. for (let i = sources.length - 1; i >= 0; i--) {
  19366. if (sources.indexOf(sources[i]) < i)
  19367. throw new IllegalArgumentError(`Found repeated sources of data`);
  19368. }
  19369. // initialize sources of data
  19370. return speedy_vision_default().Promise.all(sources.map(source => source._init()));
  19371. }).then(() => {
  19372. // get the viewport
  19373. if (!viewport)
  19374. throw new IllegalArgumentError(`Can't create a session without a viewport`);
  19375. // instantiate session
  19376. return new Session(sources, mode, viewport, stats, gizmos);
  19377. }).then(session => {
  19378. // validate trackers
  19379. if (trackers.length == 0)
  19380. Utils.warning(`No trackers have been attached to the session!`);
  19381. for (let i = trackers.length - 1; i >= 0; i--) {
  19382. if (trackers.indexOf(trackers[i]) < i)
  19383. throw new IllegalArgumentError(`Found repeated trackers`);
  19384. }
  19385. // attach trackers and return the session
  19386. return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
  19387. }).catch(err => {
  19388. // log errors, if any
  19389. Utils.error(`Can't start session: ${err.message}`);
  19390. throw err;
  19391. });
  19392. }
  19393. /**
  19394. * Number of active sessions
  19395. */
  19396. static get count() {
  19397. return this._count;
  19398. }
  19399. /**
  19400. * End the session
  19401. * @returns promise that resolves after the session is shut down
  19402. */
  19403. end() {
  19404. // is the session inactive?
  19405. if (!this._active)
  19406. return speedy_vision_default().Promise.resolve();
  19407. // deactivate the session
  19408. Utils.log('Shutting down the session...');
  19409. this._active = false; // set before wait()
  19410. // wait a few ms, so that the GPU is no longer sending any data
  19411. const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
  19412. setTimeout(resolve, ms);
  19413. });
  19414. // release resources
  19415. return wait(100).then(() => speedy_vision_default().Promise.all(
  19416. // release trackers
  19417. this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
  19418. // release input sources
  19419. this._sources.map(source => source._release()))).then(() => {
  19420. this._sources.length = 0;
  19421. this._trackers.length = 0;
  19422. // release internal components
  19423. this._updateStats.reset();
  19424. this._renderStats.reset();
  19425. this._statsPanel.release();
  19426. this._viewport._release();
  19427. // end the session
  19428. Session._count--;
  19429. // dispatch event
  19430. const event = new SessionEvent('end');
  19431. this.dispatchEvent(event);
  19432. // done!
  19433. Utils.log('Session ended.');
  19434. });
  19435. }
  19436. /**
  19437. * Analogous to window.requestAnimationFrame()
  19438. * @param callback
  19439. * @returns a handle
  19440. */
  19441. requestAnimationFrame(callback) {
  19442. const handle = Symbol('raf-handle');
  19443. if (this._active)
  19444. this._rafQueue.push([handle, callback]);
  19445. else
  19446. throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
  19447. return handle;
  19448. }
  19449. /**
  19450. * Analogous to window.cancelAnimationFrame()
  19451. * @param handle a handle returned by this.requestAnimationFrame()
  19452. */
  19453. cancelAnimationFrame(handle) {
  19454. for (let i = this._rafQueue.length - 1; i >= 0; i--) {
  19455. if (this._rafQueue[i][0] === handle) {
  19456. this._rafQueue.splice(i, 1);
  19457. break;
  19458. }
  19459. }
  19460. }
  19461. /**
  19462. * The underlying media (generally a camera stream)
  19463. * @internal
  19464. */
  19465. get media() {
  19466. for (let i = this._sources.length - 1; i >= 0; i--) {
  19467. if (this._sources[i]._type == 'video')
  19468. return this._sources[i]._data;
  19469. }
  19470. // this shouldn't happen
  19471. throw new IllegalOperationError(`Invalid input source`);
  19472. }
  19473. /**
  19474. * Session mode
  19475. */
  19476. get mode() {
  19477. return this._mode;
  19478. }
  19479. /**
  19480. * Rendering viewport
  19481. */
  19482. get viewport() {
  19483. return this._viewport;
  19484. }
  19485. /**
  19486. * Time utilities
  19487. */
  19488. get time() {
  19489. return this._time;
  19490. }
  19491. /**
  19492. * Visual cues for testing & debugging
  19493. */
  19494. get gizmos() {
  19495. return this._gizmos;
  19496. }
  19497. /**
  19498. * Attach a tracker to the session
  19499. * @param tracker
  19500. */
  19501. _attachTracker(tracker) {
  19502. if (this._trackers.indexOf(tracker) >= 0)
  19503. throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
  19504. else if (!this._active)
  19505. throw new IllegalOperationError(`Inactive session`);
  19506. this._trackers.push(tracker);
  19507. return tracker._init(this);
  19508. }
  19509. /**
  19510. * Render the user media to the background canvas
  19511. */
  19512. _renderUserMedia() {
  19513. const canvas = this._viewport._background;
  19514. const ctx = canvas.getContext('2d', { alpha: false });
  19515. if (ctx) {
  19516. ctx.imageSmoothingEnabled = false;
  19517. // draw user media
  19518. const image = this.media.source;
  19519. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  19520. // render output image(s)
  19521. for (let i = 0; i < this._trackers.length; i++) {
  19522. const image = this._trackers[i]._output.image;
  19523. if (image !== undefined)
  19524. ctx.drawImage(image.source, 0, 0, canvas.width, canvas.height);
  19525. //ctx.drawImage(image.source, canvas.width - image.width, canvas.height - image.height, image.width, image.height);
  19526. }
  19527. // render gizmos
  19528. this._gizmos._render(this._viewport, this._trackers);
  19529. }
  19530. }
  19531. /**
  19532. * Setup the update loop
  19533. */
  19534. _setupUpdateLoop() {
  19535. const scheduleNextFrame = () => {
  19536. if (this._active) {
  19537. if (Settings.powerPreference == 'high-performance')
  19538. asap(repeat);
  19539. else
  19540. window.requestAnimationFrame(repeat);
  19541. }
  19542. };
  19543. const update = () => {
  19544. this._update().then(scheduleNextFrame).turbocharge();
  19545. };
  19546. function repeat() {
  19547. if (Settings.powerPreference == 'low-power') // 30 fps
  19548. window.requestAnimationFrame(update);
  19549. else
  19550. update();
  19551. }
  19552. window.requestAnimationFrame(update);
  19553. }
  19554. /**
  19555. * The core of the update loop
  19556. */
  19557. _update() {
  19558. // active session?
  19559. if (this._active) {
  19560. return speedy_vision_default().Promise.all(
  19561. // update trackers
  19562. this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
  19563. // update internals
  19564. this._updateStats.update();
  19565. this._frameReady = true;
  19566. }).catch(err => {
  19567. // handle error
  19568. Utils.warning('Tracking error: ' + err.toString());
  19569. });
  19570. }
  19571. else {
  19572. // inactive session
  19573. this._updateStats.reset();
  19574. return speedy_vision_default().Promise.resolve();
  19575. }
  19576. }
  19577. /**
  19578. * Setup the render loop
  19579. */
  19580. _setupRenderLoop() {
  19581. let skip = false, toggle = false;
  19582. const render = (timestamp) => {
  19583. const enableFrameSkipping = (Settings.powerPreference == 'low-power');
  19584. const highPerformance = (Settings.powerPreference == 'high-performance');
  19585. // advance time
  19586. this._time._update(timestamp);
  19587. // skip frames
  19588. if (!enableFrameSkipping || !(skip = !skip))
  19589. this._render(timestamp, false);
  19590. //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
  19591. // repeat
  19592. if (this._active)
  19593. window.requestAnimationFrame(render);
  19594. };
  19595. window.requestAnimationFrame(render);
  19596. }
  19597. /**
  19598. * Render a frame (RAF callback)
  19599. * @param time current time, in ms
  19600. * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
  19601. */
  19602. _render(time, skipUserMedia) {
  19603. // is the session active?
  19604. if (this._active) {
  19605. // are we ready to render a frame?
  19606. if (this._frameReady) {
  19607. // create a frame
  19608. const results = this._trackers.map(tracker => tracker._output.exports || ({
  19609. tracker: tracker,
  19610. trackables: [],
  19611. }));
  19612. const frame = new Frame(this, results);
  19613. // clone & clear the RAF queue
  19614. const rafQueue = this._rafQueue.slice(0);
  19615. this._rafQueue.length = 0;
  19616. // render user media
  19617. if (!skipUserMedia)
  19618. this._renderUserMedia();
  19619. // render frame
  19620. for (let i = 0; i < rafQueue.length; i++)
  19621. rafQueue[i][1].call(undefined, time, frame);
  19622. // update internals
  19623. this._renderStats.update();
  19624. this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
  19625. this._frameReady = false;
  19626. }
  19627. else {
  19628. // skip frame
  19629. ;
  19630. // we'll update the renderStats even if we skip the frame,
  19631. // otherwise this becomes updateStats! (approximately)
  19632. // This is a window.requestAnimationFrame() call, so the
  19633. // browser is rendering content even if we're not.
  19634. this._renderStats.update();
  19635. }
  19636. }
  19637. else {
  19638. // inactive session
  19639. this._renderStats.reset();
  19640. }
  19641. }
  19642. }
  19643. /** Number of active sessions */
  19644. Session._count = 0;
  19645. ;// CONCATENATED MODULE: ./src/core/settings.ts
  19646. /*
  19647. * MARTINS.js Free Edition
  19648. * GPU-accelerated Augmented Reality for the web
  19649. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19650. * https://github.com/alemart/martins-js
  19651. *
  19652. * This program is free software: you can redistribute it and/or modify
  19653. * it under the terms of the GNU Affero General Public License version 3
  19654. * as published by the Free Software Foundation.
  19655. *
  19656. * This program is distributed in the hope that it will be useful,
  19657. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19658. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19659. * GNU Affero General Public License for more details.
  19660. *
  19661. * You should have received a copy of the GNU Affero General Public License
  19662. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19663. *
  19664. * settings.ts
  19665. * Global Settings
  19666. */
  19667. /**
  19668. * Global Settings
  19669. */
  19670. class Settings {
  19671. /**
  19672. * Power preference (may impact performance x battery life)
  19673. */
  19674. static get powerPreference() {
  19675. return this._powerPreference;
  19676. }
  19677. /**
  19678. * Power preference (may impact performance x battery life)
  19679. * Note: this setting should be the very first thing you set
  19680. * (before the WebGL context is created by Speedy)
  19681. */
  19682. static set powerPreference(value) {
  19683. // validate
  19684. if (Session.count > 0)
  19685. throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
  19686. else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
  19687. throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
  19688. /*
  19689. // we won't use 'high-performance' for Speedy's GPU computations
  19690. // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
  19691. // also, it seems like low-power mode may break WebGL2 in some drivers?!
  19692. if(value == 'high-performance')
  19693. Speedy.Settings.powerPreference = 'default';
  19694. else
  19695. Speedy.Settings.powerPreference = value;
  19696. */
  19697. // change the GPU polling mode
  19698. if (value == 'high-performance')
  19699. (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
  19700. else
  19701. (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
  19702. // update the power preference
  19703. this._powerPreference = value;
  19704. // log
  19705. Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
  19706. }
  19707. }
  19708. Settings._powerPreference = 'default';
  19709. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
  19710. /*
  19711. * MARTINS.js Free Edition
  19712. * GPU-accelerated Augmented Reality for the web
  19713. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19714. * https://github.com/alemart/martins-js
  19715. *
  19716. * This program is free software: you can redistribute it and/or modify
  19717. * it under the terms of the GNU Affero General Public License version 3
  19718. * as published by the Free Software Foundation.
  19719. *
  19720. * This program is distributed in the hope that it will be useful,
  19721. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19722. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19723. * GNU Affero General Public License for more details.
  19724. *
  19725. * You should have received a copy of the GNU Affero General Public License
  19726. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19727. *
  19728. * reference-image-database.ts
  19729. * A collection of Reference Images
  19730. */
  19731. /** Default capacity of a Reference Image Database */
  19732. const DEFAULT_CAPACITY = 100;
  19733. /** Generate a unique name for a reference image */
  19734. const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
  19735. /**
  19736. * A collection of Reference Images
  19737. */
  19738. class ReferenceImageDatabase {
  19739. /**
  19740. * Constructor
  19741. */
  19742. constructor() {
  19743. this._capacity = DEFAULT_CAPACITY;
  19744. this._database = [];
  19745. this._locked = false;
  19746. }
  19747. /**
  19748. * The number of reference images stored in this database
  19749. */
  19750. get count() {
  19751. return this._database.length;
  19752. }
  19753. /**
  19754. * Maximum number of elements
  19755. */
  19756. get capacity() {
  19757. return this._capacity;
  19758. }
  19759. /**
  19760. * Maximum number of elements
  19761. */
  19762. /*
  19763. set capacity(value: number)
  19764. {
  19765. const capacity = Math.max(0, value | 0);
  19766. if(this.count > capacity)
  19767. throw new IllegalArgumentError(`Can't set the capacity of the database to ${this._capacity}: it currently stores ${this.count} entries`);
  19768. this._capacity = capacity;
  19769. }
  19770. */
  19771. /**
  19772. * Iterates over the collection
  19773. */
  19774. *[Symbol.iterator]() {
  19775. const ref = this._database.map(entry => entry.referenceImage);
  19776. yield* ref;
  19777. }
  19778. /**
  19779. * Add reference images to this database
  19780. * Add only the images you actually need to track!
  19781. * (each image take up storage space)
  19782. * @param referenceImages one or more reference images with unique names (a unique name will
  19783. * be generated automatically if you don't specify one)
  19784. * @returns a promise that resolves as soon as the images are loaded and added to this database
  19785. */
  19786. add(referenceImages) {
  19787. // handle no input
  19788. if (referenceImages.length == 0)
  19789. return speedy_vision_default().Promise.resolve();
  19790. // handle multiple images as input
  19791. if (referenceImages.length > 1) {
  19792. const promises = referenceImages.map(image => this.add([image]));
  19793. return speedy_vision_default().Promise.all(promises).then(() => void (0));
  19794. }
  19795. // handle a single image as input
  19796. const referenceImage = referenceImages[0];
  19797. // locked database?
  19798. if (this._locked)
  19799. throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
  19800. // reached full capacity?
  19801. if (this.count >= this.capacity)
  19802. throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
  19803. // check for duplicate names
  19804. if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
  19805. throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
  19806. // load the media and add the reference image to the database
  19807. return speedy_vision_default().load(referenceImage.image).then(media => {
  19808. this._database.push({
  19809. referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
  19810. media: media
  19811. });
  19812. });
  19813. }
  19814. /**
  19815. * Lock the database, so that new reference images can no longer be added to it
  19816. * @internal
  19817. */
  19818. _lock() {
  19819. this._locked = true;
  19820. }
  19821. /**
  19822. * Get the media object associated to a reference image
  19823. * @param name reference image name
  19824. * @returns media
  19825. * @internal
  19826. */
  19827. _findMedia(name) {
  19828. for (let i = 0; i < this._database.length; i++) {
  19829. if (this._database[i].referenceImage.name === name)
  19830. return this._database[i].media;
  19831. }
  19832. throw new IllegalArgumentError(`Can't find reference image "${name}"`);
  19833. }
  19834. }
  19835. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
  19836. /*
  19837. * MARTINS.js Free Edition
  19838. * GPU-accelerated Augmented Reality for the web
  19839. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19840. * https://github.com/alemart/martins-js
  19841. *
  19842. * This program is free software: you can redistribute it and/or modify
  19843. * it under the terms of the GNU Affero General Public License version 3
  19844. * as published by the Free Software Foundation.
  19845. *
  19846. * This program is distributed in the hope that it will be useful,
  19847. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19848. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19849. * GNU Affero General Public License for more details.
  19850. *
  19851. * You should have received a copy of the GNU Affero General Public License
  19852. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19853. *
  19854. * settings.ts
  19855. * Settings of the Image Tracker
  19856. */
  19857. /** Default tracking resolution */
  19858. const DEFAULT_TRACKING_RESOLUTION = 'sm+';
  19859. /** Maximum number of keypoints to be stored for each reference image when in the training state */
  19860. const TRAIN_MAX_KEYPOINTS = 1024; //512;
  19861. /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
  19862. const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
  19863. /** Normalized width & height of an image target, in pixels */
  19864. const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
  19865. /** Used to identify the best maches */
  19866. const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
  19867. /** Maximum number of keypoints to be analyzed when in the scanning state */
  19868. const SCAN_MAX_KEYPOINTS = 512;
  19869. /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
  19870. const SCAN_PYRAMID_LEVELS = 4; //7;
  19871. /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
  19872. const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
  19873. /** Threshold of the FAST corner detector used in the scanning/training states */
  19874. const SCAN_FAST_THRESHOLD = 60;
  19875. /** Minimum number of accepted matches for us to move out from the scanning state */
  19876. const SCAN_MIN_MATCHES = 20; //30;
  19877. /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
  19878. const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
  19879. /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
  19880. const SCAN_RANSAC_REPROJECTIONERROR = 5;
  19881. /** Number of tables used in the LSH-based keypoint matching */
  19882. const SCAN_LSH_TABLES = 8; // up to 32
  19883. /** Hash size, in bits, used in the LSH-based keypoint matching */
  19884. const SCAN_LSH_HASHSIZE = 15; // up to 16
  19885. /** Use the Nightvision filter when in the scanning/training state? */
  19886. const SCAN_WITH_NIGHTVISION = true;
  19887. /** Nightvision filter: gain */
  19888. const NIGHTVISION_GAIN = 0.3; // 0.2;
  19889. /** Nightvision filter: offset */
  19890. const NIGHTVISION_OFFSET = 0.5;
  19891. /** Nightvision filter: decay */
  19892. const NIGHTVISION_DECAY = 0.0;
  19893. /** Nightvision filter: quality level */
  19894. const NIGHTVISION_QUALITY = 'low';
  19895. /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
  19896. const ORB_GAUSSIAN_KSIZE = 9;
  19897. /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
  19898. const ORB_GAUSSIAN_SIGMA = 2.0;
  19899. /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
  19900. const SUBPIXEL_GAUSSIAN_KSIZE = 5;
  19901. /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
  19902. const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
  19903. /** Subpixel refinement method */
  19904. const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
  19905. /** Minimum acceptable number of matched keypoints when in the tracking state */
  19906. const TRACK_MIN_MATCHES = 4; //10; //20;
  19907. /** Maximum number of keypoints to be analyzed in the tracking state */
  19908. const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
  19909. /** Capacity of the keypoint detector used in the the tracking state */
  19910. const TRACK_DETECTOR_CAPACITY = 2048; //4096;
  19911. /** Quality of the Harris/Shi-Tomasi corner detector */
  19912. const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
  19913. /** Use the Nightvision filter when in the tracking state? */
  19914. const TRACK_WITH_NIGHTVISION = false; // produces shaking?
  19915. /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
  19916. const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
  19917. /** Relative size (%) used to clip keypoints from the borders of the rectified image */
  19918. const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
  19919. /** Number of iterations used to refine the target image before tracking */
  19920. const TRACK_REFINEMENT_ITERATIONS = 3;
  19921. /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
  19922. const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
  19923. /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
  19924. const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
  19925. /** Used to identify the best maches */
  19926. const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
  19927. /** Number of consecutive frames in which we tolerate a "target lost" situation */
  19928. const TRACK_LOST_TOLERANCE = 10;
  19929. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
  19930. /*
  19931. * MARTINS.js Free Edition
  19932. * GPU-accelerated Augmented Reality for the web
  19933. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  19934. * https://github.com/alemart/martins-js
  19935. *
  19936. * This program is free software: you can redistribute it and/or modify
  19937. * it under the terms of the GNU Affero General Public License version 3
  19938. * as published by the Free Software Foundation.
  19939. *
  19940. * This program is distributed in the hope that it will be useful,
  19941. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19942. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19943. * GNU Affero General Public License for more details.
  19944. *
  19945. * You should have received a copy of the GNU Affero General Public License
  19946. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19947. *
  19948. * state.ts
  19949. * Abstract state of the Image Tracker
  19950. */
  19951. /**
  19952. * Abstract state of the Image Tracker
  19953. */
  19954. class ImageTrackerState {
  19955. /**
  19956. * Constructor
  19957. * @param name
  19958. * @param imageTracker
  19959. */
  19960. constructor(name, imageTracker) {
  19961. this._name = name;
  19962. this._imageTracker = imageTracker;
  19963. this._pipeline = this._createPipeline();
  19964. }
  19965. /**
  19966. * State name
  19967. */
  19968. get name() {
  19969. return this._name;
  19970. }
  19971. /**
  19972. * AR screen size
  19973. */
  19974. get screenSize() {
  19975. const screen = this._pipeline.node('screen');
  19976. if (!screen)
  19977. throw new IllegalOperationError();
  19978. // this is available once this state has run at least once
  19979. return screen.size;
  19980. }
  19981. /**
  19982. * Initialize the state
  19983. */
  19984. init() {
  19985. }
  19986. /**
  19987. * Release resources
  19988. */
  19989. release() {
  19990. return this._pipeline.release();
  19991. }
  19992. /**
  19993. * Update the state
  19994. * @param media user media
  19995. * @param screenSize AR screen size for image processing
  19996. * @param state all states
  19997. * @returns promise
  19998. */
  19999. update(media, screenSize) {
  20000. const source = this._pipeline.node('source');
  20001. const screen = this._pipeline.node('screen');
  20002. // validate the pipeline
  20003. if (!source || !screen)
  20004. throw new IllegalOperationError();
  20005. // prepare the pipeline
  20006. source.media = media;
  20007. screen.size = screenSize;
  20008. // run the pipeline
  20009. return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
  20010. }
  20011. /**
  20012. * Called as soon as this becomes the active state, just before update() runs for the first time
  20013. * @param settings
  20014. */
  20015. onEnterState(settings) {
  20016. }
  20017. /**
  20018. * Called when leaving the state, after update()
  20019. */
  20020. onLeaveState() {
  20021. }
  20022. /**
  20023. * Called just before the GPU processing
  20024. * @returns promise
  20025. */
  20026. _beforeUpdate() {
  20027. return speedy_vision_default().Promise.resolve();
  20028. }
  20029. /**
  20030. * GPU processing
  20031. * @returns promise with the pipeline results
  20032. */
  20033. _gpuUpdate() {
  20034. return this._pipeline.run();
  20035. }
  20036. //
  20037. // Some utility methods common to various states
  20038. //
  20039. /**
  20040. * Find the coordinates of a polyline surrounding the target image
  20041. * @param homography maps the target image to the AR screen
  20042. * @param targetSize size of the target space
  20043. * @returns promise that resolves to 4 points in AR screen space
  20044. */
  20045. _findPolylineCoordinates(homography, targetSize) {
  20046. const w = targetSize.width, h = targetSize.height;
  20047. const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
  20048. 0, 0,
  20049. w, 0,
  20050. w, h,
  20051. 0, h,
  20052. ]);
  20053. const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
  20054. return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
  20055. }
  20056. /**
  20057. * Find a polyline surrounding the target image
  20058. * @param homography maps the target image to the AR screen
  20059. * @param targetSize size of the target space
  20060. * @returns promise that resolves to 4 points in AR screen space
  20061. */
  20062. _findPolyline(homography, targetSize) {
  20063. return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
  20064. const polydata = polylineCoordinates.read();
  20065. const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
  20066. return polyline;
  20067. });
  20068. }
  20069. /**
  20070. * Whether or not to rotate the warped image in order to best fit the AR screen
  20071. * @param media media associated with the reference image
  20072. * @param screenSize AR screen
  20073. * @returns boolean
  20074. */
  20075. _mustRotateWarpedImage(media, screenSize) {
  20076. const screenAspectRatio = screenSize.width / screenSize.height;
  20077. const mediaAspectRatio = media.width / media.height;
  20078. const eps = 0.1;
  20079. return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
  20080. }
  20081. /**
  20082. * Find a rectification matrix to be applied to an image fitting the entire AR screen
  20083. * @param media media associated with the reference image
  20084. * @param screenSize AR screen
  20085. * @returns promise that resolves to a rectification matrix
  20086. */
  20087. _findRectificationMatrixOfFullscreenImage(media, screenSize) {
  20088. const b = TRACK_RECTIFIED_BORDER;
  20089. const sw = screenSize.width, sh = screenSize.height;
  20090. const mediaAspectRatio = media.width / media.height;
  20091. const mustRotate = this._mustRotateWarpedImage(media, screenSize);
  20092. // compute the vertices of the target in screen space
  20093. // we suppose portrait or landscape mode for both screen & media
  20094. const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
  20095. const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
  20096. const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
  20097. const right = sw - left;
  20098. const bottom = sh - top;
  20099. const targetVertices = speedy_vision_default().Matrix(2, 4, [
  20100. left, top,
  20101. right, top,
  20102. right, bottom,
  20103. left, bottom,
  20104. ]);
  20105. const screenVertices = speedy_vision_default().Matrix(2, 4, [
  20106. 0, 0,
  20107. sw, 0,
  20108. sw, sh,
  20109. 0, sh
  20110. ]);
  20111. const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
  20112. const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
  20113. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  20114. return (mustRotate ? speedy_vision_default().Matrix.perspective(
  20115. // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
  20116. preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
  20117. // alignment: align the target to the center of the screen
  20118. speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
  20119. // pre-rectify and then align
  20120. rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
  20121. }
  20122. /**
  20123. * Find a rectification matrix to be applied to the target image
  20124. * @param homography maps a reference image to the AR screen
  20125. * @param targetSize size of the target space
  20126. * @param media media associated with the reference image
  20127. * @param screenSize AR screen
  20128. * @returns promise that resolves to a rectification matrix
  20129. */
  20130. _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
  20131. const sw = screenSize.width, sh = screenSize.height;
  20132. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  20133. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  20134. return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
  20135. // from target space to (full)screen
  20136. speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
  20137. // from (full)screen to rectified coordinates
  20138. this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
  20139. // function composition
  20140. rectificationMatrix.setTo(mat.times(rectificationMatrix)));
  20141. }
  20142. }
  20143. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
  20144. /*
  20145. * MARTINS.js Free Edition
  20146. * GPU-accelerated Augmented Reality for the web
  20147. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  20148. * https://github.com/alemart/martins-js
  20149. *
  20150. * This program is free software: you can redistribute it and/or modify
  20151. * it under the terms of the GNU Affero General Public License version 3
  20152. * as published by the Free Software Foundation.
  20153. *
  20154. * This program is distributed in the hope that it will be useful,
  20155. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20156. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20157. * GNU Affero General Public License for more details.
  20158. *
  20159. * You should have received a copy of the GNU Affero General Public License
  20160. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20161. *
  20162. * initial.ts
  20163. * Initial state of the Image Tracker
  20164. */
  20165. /**
  20166. * The purpose of the initial state of the Image Tracker
  20167. * is to initialize the training state using the state machine
  20168. */
  20169. class ImageTrackerInitialState extends ImageTrackerState {
  20170. /**
  20171. * Constructor
  20172. * @param imageTracker
  20173. */
  20174. constructor(imageTracker) {
  20175. super('initial', imageTracker);
  20176. }
  20177. /**
  20178. * Called just before the GPU processing
  20179. * @returns promise
  20180. */
  20181. _beforeUpdate() {
  20182. const source = this._pipeline.node('source');
  20183. const media = source.media;
  20184. const mediaSize = media.size;
  20185. if (mediaSize.area() < this.screenSize.area())
  20186. Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
  20187. return speedy_vision_default().Promise.resolve();
  20188. }
  20189. /**
  20190. * Post processing that takes place just after the GPU processing
  20191. * @param result pipeline results
  20192. * @returns state output
  20193. */
  20194. _afterUpdate(result) {
  20195. return speedy_vision_default().Promise.resolve({
  20196. nextState: 'training',
  20197. trackerOutput: {},
  20198. });
  20199. }
  20200. /**
  20201. * Create & setup the pipeline
  20202. * @returns pipeline
  20203. */
  20204. _createPipeline() {
  20205. // this pipeline does nothing useful,
  20206. // but it does preload some shaders...
  20207. const pipeline = speedy_vision_default().Pipeline();
  20208. const source = speedy_vision_default().Image.Source('source');
  20209. const screen = speedy_vision_default().Transform.Resize('screen');
  20210. const greyscale = speedy_vision_default().Filter.Greyscale();
  20211. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
  20212. const nightvision = speedy_vision_default().Filter.Nightvision();
  20213. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  20214. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  20215. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20216. const blur = speedy_vision_default().Filter.GaussianBlur();
  20217. const clipper = speedy_vision_default().Keypoint.Clipper();
  20218. const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
  20219. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  20220. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  20221. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  20222. const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
  20223. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
  20224. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
  20225. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  20226. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
  20227. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  20228. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
  20229. source.media = null;
  20230. screen.size = speedy_vision_default().Size(0, 0);
  20231. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20232. nightvision.quality = NIGHTVISION_QUALITY;
  20233. subpixel.method = SUBPIXEL_METHOD;
  20234. //borderClipper.imageSize = screen.size;
  20235. borderClipper.imageSize = speedy_vision_default().Size(100, 100);
  20236. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  20237. matcher.k = 1; //2;
  20238. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  20239. keypointPortalSource.source = keypointPortalSink;
  20240. muxOfReferenceKeypoints.port = 0;
  20241. muxOfBufferOfReferenceKeypoints.port = 0;
  20242. bufferOfReferenceKeypoints.frozen = false;
  20243. keypointSink.turbo = false;
  20244. // prepare input
  20245. source.output().connectTo(screen.input());
  20246. screen.output().connectTo(greyscale.input());
  20247. // preprocess images
  20248. greyscale.output().connectTo(imageRectifier.input());
  20249. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  20250. imageRectifier.output().connectTo(nightvision.input());
  20251. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20252. nightvisionMux.output().connectTo(blur.input());
  20253. // keypoint detection & clipping
  20254. nightvisionMux.output().connectTo(detector.input());
  20255. detector.output().connectTo(borderClipper.input());
  20256. borderClipper.output().connectTo(clipper.input());
  20257. // keypoint refinement
  20258. imageRectifier.output().connectTo(denoiser.input());
  20259. denoiser.output().connectTo(subpixel.input('image'));
  20260. clipper.output().connectTo(subpixel.input('keypoints'));
  20261. // keypoint description
  20262. blur.output().connectTo(descriptor.input('image'));
  20263. subpixel.output().connectTo(descriptor.input('keypoints'));
  20264. // keypoint matching
  20265. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  20266. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  20267. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  20268. descriptor.output().connectTo(matcher.input('keypoints'));
  20269. // store reference keypoints
  20270. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  20271. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  20272. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  20273. // portals
  20274. descriptor.output().connectTo(keypointPortalSink.input());
  20275. // prepare output
  20276. descriptor.output().connectTo(keypointRectifier.input());
  20277. keypointRectifier.output().connectTo(keypointSink.input());
  20278. matcher.output().connectTo(keypointSink.input('matches'));
  20279. // done!
  20280. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  20281. /*
  20282. const run = pipeline.run.bind(pipeline);
  20283. pipeline.run = function() {
  20284. console.time("TIME");
  20285. return run().then(x => {
  20286. console.timeEnd("TIME");
  20287. return x;
  20288. });
  20289. };
  20290. */
  20291. return pipeline;
  20292. }
  20293. }
  20294. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
  20295. /*
  20296. * MARTINS.js Free Edition
  20297. * GPU-accelerated Augmented Reality for the web
  20298. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  20299. * https://github.com/alemart/martins-js
  20300. *
  20301. * This program is free software: you can redistribute it and/or modify
  20302. * it under the terms of the GNU Affero General Public License version 3
  20303. * as published by the Free Software Foundation.
  20304. *
  20305. * This program is distributed in the hope that it will be useful,
  20306. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20307. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20308. * GNU Affero General Public License for more details.
  20309. *
  20310. * You should have received a copy of the GNU Affero General Public License
  20311. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20312. *
  20313. * training.ts
  20314. * Training state of the Image Tracker
  20315. */
  20316. /**
  20317. * Training state of the Image Tracker
  20318. */
  20319. class ImageTrackerTrainingState extends ImageTrackerState {
  20320. /**
  20321. * Constructor
  20322. * @param imageTracker
  20323. */
  20324. constructor(imageTracker) {
  20325. super('training', imageTracker);
  20326. this._currentImageIndex = 0;
  20327. this._image = [];
  20328. // initialize the training map
  20329. this._trainingMap = {
  20330. referenceImageIndex: [],
  20331. referenceImage: [],
  20332. keypoints: []
  20333. };
  20334. }
  20335. /**
  20336. * Called as soon as this becomes the active state, just before update() runs for the first time
  20337. * @param settings
  20338. */
  20339. onEnterState(settings) {
  20340. const database = this._imageTracker.database;
  20341. // validate
  20342. if (database.count == 0)
  20343. throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
  20344. // prepare to train...
  20345. this._currentImageIndex = 0;
  20346. this._image.length = 0;
  20347. this._trainingMap.referenceImageIndex.length = 0;
  20348. this._trainingMap.referenceImage.length = 0;
  20349. this._trainingMap.keypoints.length = 0;
  20350. // lock the database
  20351. Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
  20352. database._lock();
  20353. // collect all images
  20354. for (const referenceImage of database)
  20355. this._image.push(referenceImage);
  20356. }
  20357. /**
  20358. * Called just before the GPU processing
  20359. * @returns promise
  20360. */
  20361. _beforeUpdate() {
  20362. const arScreenSize = this.screenSize;
  20363. const source = this._pipeline.node('source');
  20364. const screen = this._pipeline.node('screen');
  20365. const keypointScaler = this._pipeline.node('keypointScaler');
  20366. // this shouldn't happen
  20367. if (this._currentImageIndex >= this._image.length)
  20368. return speedy_vision_default().Promise.reject(new IllegalOperationError());
  20369. // set the appropriate training media
  20370. const database = this._imageTracker.database;
  20371. const referenceImage = this._image[this._currentImageIndex];
  20372. const media = database._findMedia(referenceImage.name);
  20373. source.media = media;
  20374. // compute the appropriate size of the training image space
  20375. const resolution = this._imageTracker.resolution;
  20376. const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
  20377. const aspectRatioOfTrainingImage = media.width / media.height;
  20378. /*
  20379. let sin = 0, cos = 1;
  20380. if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
  20381. // training image and source video: both in landscape mode or both in portrait mode
  20382. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  20383. screen.size.width = Math.round(screen.size.width * scale);
  20384. screen.size.height = Math.round(screen.size.height * scale);
  20385. }
  20386. else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
  20387. // training image: portrait mode; source video: landscape mode
  20388. screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
  20389. screen.size.width = Math.round(screen.size.width * scale);
  20390. screen.size.height = Math.round(screen.size.height * scale);
  20391. sin = 1; cos = 0; // rotate 90deg
  20392. }
  20393. else {
  20394. // training image: landscape mode; source video: portrait mode
  20395. }
  20396. */
  20397. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  20398. screen.size.width = Math.round(screen.size.width * scale);
  20399. screen.size.height = Math.round(screen.size.height * scale);
  20400. // convert keypoints from the training image space to AR screen space
  20401. // let's pretend that trained keypoints belong to the AR screen space,
  20402. // regardless of the size of the target image. This will make things
  20403. // easier when computing the homography.
  20404. /*
  20405. const sw = arScreenSize.width / screen.size.width;
  20406. const sh = arScreenSize.height / screen.size.height;
  20407. */
  20408. const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
  20409. const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
  20410. keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
  20411. sw, 0, 0,
  20412. 0, sh, 0,
  20413. 0, 0, 1,
  20414. ]);
  20415. // log
  20416. Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
  20417. // done!
  20418. return speedy_vision_default().Promise.resolve();
  20419. }
  20420. /**
  20421. * Post processing that takes place just after the GPU processing
  20422. * @param result pipeline results
  20423. * @returns state output
  20424. */
  20425. _afterUpdate(result) {
  20426. const referenceImage = this._image[this._currentImageIndex];
  20427. const keypoints = result.keypoints;
  20428. const image = result.image;
  20429. // log
  20430. Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
  20431. // set the training map, so that we can map all keypoints of the current image to the current image
  20432. this._trainingMap.referenceImage.push(referenceImage);
  20433. for (let i = 0; i < keypoints.length; i++) {
  20434. this._trainingMap.keypoints.push(keypoints[i]);
  20435. this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
  20436. }
  20437. // the current image has been processed!
  20438. ++this._currentImageIndex;
  20439. // set output
  20440. if (this._currentImageIndex >= this._image.length) {
  20441. // finished training!
  20442. return speedy_vision_default().Promise.resolve({
  20443. //nextState: 'training',
  20444. nextState: 'scanning',
  20445. nextStateSettings: {
  20446. keypoints: this._trainingMap.keypoints,
  20447. },
  20448. trackerOutput: {},
  20449. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  20450. });
  20451. }
  20452. else {
  20453. // we're not done yet
  20454. return speedy_vision_default().Promise.resolve({
  20455. nextState: 'training',
  20456. trackerOutput: {},
  20457. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  20458. });
  20459. }
  20460. }
  20461. /**
  20462. * Create & setup the pipeline
  20463. * @returns pipeline
  20464. */
  20465. _createPipeline() {
  20466. const pipeline = speedy_vision_default().Pipeline();
  20467. const source = speedy_vision_default().Image.Source('source');
  20468. const screen = speedy_vision_default().Transform.Resize('screen');
  20469. const greyscale = speedy_vision_default().Filter.Greyscale();
  20470. const blur = speedy_vision_default().Filter.GaussianBlur();
  20471. const nightvision = speedy_vision_default().Filter.Nightvision();
  20472. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  20473. const pyramid = speedy_vision_default().Image.Pyramid();
  20474. const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
  20475. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20476. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  20477. const blurredPyramid = speedy_vision_default().Image.Pyramid();
  20478. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  20479. const clipper = speedy_vision_default().Keypoint.Clipper();
  20480. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  20481. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  20482. const imageSink = speedy_vision_default().Image.Sink('image');
  20483. source.media = null;
  20484. screen.size = speedy_vision_default().Size(0, 0);
  20485. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  20486. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  20487. nightvision.gain = NIGHTVISION_GAIN;
  20488. nightvision.offset = NIGHTVISION_OFFSET;
  20489. nightvision.decay = NIGHTVISION_DECAY;
  20490. nightvision.quality = NIGHTVISION_QUALITY;
  20491. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  20492. detector.levels = SCAN_PYRAMID_LEVELS;
  20493. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  20494. detector.threshold = SCAN_FAST_THRESHOLD;
  20495. detector.capacity = 8192;
  20496. subpixel.method = SUBPIXEL_METHOD;
  20497. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  20498. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  20499. clipper.size = TRAIN_MAX_KEYPOINTS;
  20500. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  20501. keypointSink.turbo = false;
  20502. // prepare input
  20503. source.output().connectTo(screen.input());
  20504. screen.output().connectTo(greyscale.input());
  20505. // preprocess image
  20506. greyscale.output().connectTo(nightvisionMux.input('in0'));
  20507. greyscale.output().connectTo(nightvision.input());
  20508. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20509. nightvisionMux.output().connectTo(pyramid.input());
  20510. // keypoint detection
  20511. pyramid.output().connectTo(detector.input());
  20512. detector.output().connectTo(clipper.input());
  20513. // keypoint refinement
  20514. greyscale.output().connectTo(denoiser.input()); // reduce noise
  20515. denoiser.output().connectTo(blurredPyramid.input());
  20516. clipper.output().connectTo(subpixel.input('keypoints'));
  20517. blurredPyramid.output().connectTo(subpixel.input('image'));
  20518. // keypoint description
  20519. greyscale.output().connectTo(blur.input());
  20520. blur.output().connectTo(descriptor.input('image'));
  20521. clipper.output().connectTo(descriptor.input('keypoints'));
  20522. // prepare output
  20523. descriptor.output().connectTo(keypointScaler.input());
  20524. keypointScaler.output().connectTo(keypointSink.input());
  20525. nightvisionMux.output().connectTo(imageSink.input());
  20526. // done!
  20527. pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
  20528. return pipeline;
  20529. }
  20530. /**
  20531. * Get reference image
  20532. * @param keypointIndex -1 if not found
  20533. * @returns reference image
  20534. */
  20535. referenceImageOfKeypoint(keypointIndex) {
  20536. const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
  20537. if (imageIndex < 0)
  20538. return null;
  20539. return this._trainingMap.referenceImage[imageIndex];
  20540. }
  20541. /**
  20542. * Get reference image index
  20543. * @param keypointIndex -1 if not found
  20544. * @returns reference image index, or -1 if not found
  20545. */
  20546. referenceImageIndexOfKeypoint(keypointIndex) {
  20547. const n = this._trainingMap.referenceImageIndex.length;
  20548. if (keypointIndex < 0 || keypointIndex >= n)
  20549. return -1;
  20550. const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
  20551. if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
  20552. return -1;
  20553. return imageIndex;
  20554. }
  20555. /**
  20556. * Get keypoint of the trained set
  20557. * @param keypointIndex -1 if not found
  20558. * @returns a keypoint
  20559. */
  20560. referenceKeypoint(keypointIndex) {
  20561. if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
  20562. return null;
  20563. return this._trainingMap.keypoints[keypointIndex];
  20564. }
  20565. }
  20566. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
  20567. /*
  20568. * MARTINS.js Free Edition
  20569. * GPU-accelerated Augmented Reality for the web
  20570. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  20571. * https://github.com/alemart/martins-js
  20572. *
  20573. * This program is free software: you can redistribute it and/or modify
  20574. * it under the terms of the GNU Affero General Public License version 3
  20575. * as published by the Free Software Foundation.
  20576. *
  20577. * This program is distributed in the hope that it will be useful,
  20578. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20579. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20580. * GNU Affero General Public License for more details.
  20581. *
  20582. * You should have received a copy of the GNU Affero General Public License
  20583. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20584. *
  20585. * scanning.ts
  20586. * Scanning state of the Image Tracker
  20587. */
  20588. /** Default target space size (used when training) */
  20589. const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  20590. /** Port of the portal multiplexer: get new data from the camera */
  20591. const PORT_CAMERA = 0;
  20592. /** Port of the portal multiplexer: get previously memorized data */
  20593. const PORT_MEMORY = 1;
  20594. /**
  20595. * Scanning state of the Image Tracker
  20596. */
  20597. class ImageTrackerScanningState extends ImageTrackerState {
  20598. /**
  20599. * Constructor
  20600. * @param imageTracker
  20601. */
  20602. constructor(imageTracker) {
  20603. super('scanning', imageTracker);
  20604. this._counter = 0;
  20605. this._bestScore = 0;
  20606. this._bestHomography = speedy_vision_default().Matrix.Eye(3);
  20607. }
  20608. /**
  20609. * Called as soon as this becomes the active state, just before update() runs for the first time
  20610. * @param settings
  20611. */
  20612. onEnterState(settings) {
  20613. const imagePortalMux = this._pipeline.node('imagePortalMux');
  20614. const lshTables = this._pipeline.node('lshTables');
  20615. const keypoints = settings.keypoints;
  20616. // set attributes
  20617. this._counter = 0;
  20618. this._bestScore = 0;
  20619. // reset the image memorization circuit
  20620. imagePortalMux.port = PORT_CAMERA;
  20621. // prepare the keypoint matcher
  20622. if (keypoints !== undefined)
  20623. lshTables.keypoints = keypoints;
  20624. }
  20625. /**
  20626. * Post processing that takes place just after the GPU processing
  20627. * @param result pipeline results
  20628. * @returns state output
  20629. */
  20630. _afterUpdate(result) {
  20631. const imagePortalMux = this._pipeline.node('imagePortalMux');
  20632. const keypoints = result.keypoints;
  20633. const matchedKeypoints = this._goodMatches(keypoints);
  20634. // tracker output
  20635. const trackerOutput = {
  20636. keypoints: keypoints,
  20637. screenSize: this.screenSize
  20638. };
  20639. // keep the last memorized image
  20640. imagePortalMux.port = PORT_MEMORY;
  20641. // have we found enough matches...?
  20642. if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
  20643. return this._findHomography(matchedKeypoints).then(([homography, score]) => {
  20644. // have we found the best homography so far?
  20645. if (score >= this._bestScore) {
  20646. // store it only if we'll be running the pipeline again
  20647. if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
  20648. this._bestScore = score;
  20649. this._bestHomography = homography;
  20650. // memorize the last image, corresponding to the best homography(*)
  20651. imagePortalMux.port = PORT_CAMERA;
  20652. /*
  20653. (*) technically speaking, this is not exactly the case. Since we're
  20654. using turbo to download the keypoints, there's a slight difference
  20655. between the data used to compute the homography and the last image.
  20656. Still, assuming continuity of the video stream, this logic is
  20657. good enough.
  20658. */
  20659. }
  20660. }
  20661. // find a polyline surrounding the target
  20662. return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
  20663. }).then(polyline => {
  20664. // continue a little longer in the scanning state
  20665. if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
  20666. return {
  20667. nextState: this.name,
  20668. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  20669. };
  20670. }
  20671. // this image should correspond to the best homography
  20672. const snapshot = this._pipeline.node('imagePortalSink');
  20673. // the reference image that we'll track
  20674. const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
  20675. // let's track the target!
  20676. return {
  20677. nextState: 'pre-tracking',
  20678. nextStateSettings: {
  20679. homography: this._bestHomography,
  20680. snapshot: snapshot,
  20681. referenceImage: referenceImage,
  20682. },
  20683. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  20684. };
  20685. }).catch(() => {
  20686. // continue in the scanning state
  20687. return {
  20688. nextState: this.name,
  20689. trackerOutput: trackerOutput,
  20690. };
  20691. });
  20692. }
  20693. else {
  20694. // not enough matches...!
  20695. this._counter = 0;
  20696. this._bestScore = 0;
  20697. }
  20698. // we'll continue to scan the scene
  20699. return speedy_vision_default().Promise.resolve({
  20700. nextState: this.name,
  20701. trackerOutput: trackerOutput,
  20702. });
  20703. }
  20704. /**
  20705. * Find "high quality" matches of a single reference image
  20706. * @param keypoints
  20707. * @returns high quality matches
  20708. */
  20709. _goodMatches(keypoints) {
  20710. const matchedKeypointsPerImageIndex = Object.create(null);
  20711. // filter "good matches"
  20712. for (let j = keypoints.length - 1; j >= 0; j--) {
  20713. const keypoint = keypoints[j];
  20714. if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
  20715. const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
  20716. // the best match should be "much better" than the second best match,
  20717. // which means that they are "distinct enough"
  20718. if (d1 <= SCAN_MATCH_RATIO * d2) {
  20719. const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
  20720. //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
  20721. //if(idx1 == idx2 && idx1 >= 0) {
  20722. if (idx1 >= 0) {
  20723. if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
  20724. matchedKeypointsPerImageIndex[idx1] = [];
  20725. matchedKeypointsPerImageIndex[idx1].push(keypoint);
  20726. }
  20727. }
  20728. }
  20729. }
  20730. // find the image with the most matches
  20731. let matchedKeypoints = [];
  20732. for (const imageIndex in matchedKeypointsPerImageIndex) {
  20733. if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
  20734. matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
  20735. }
  20736. // done!
  20737. return matchedKeypoints;
  20738. }
  20739. /**
  20740. * Find a homography matrix using matched keypoints
  20741. * @param matchedKeypoints "good" matches only
  20742. * @returns homography from reference image space to AR screen space & homography "quality" score
  20743. */
  20744. _findHomography(matchedKeypoints) {
  20745. const srcCoords = [];
  20746. const dstCoords = [];
  20747. // find matching coordinates of the keypoints
  20748. for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
  20749. const matchedKeypoint = matchedKeypoints[i];
  20750. const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
  20751. if (referenceKeypoint != null) {
  20752. srcCoords.push(referenceKeypoint.x);
  20753. srcCoords.push(referenceKeypoint.y);
  20754. dstCoords.push(matchedKeypoint.x);
  20755. dstCoords.push(matchedKeypoint.y);
  20756. }
  20757. else {
  20758. // this shouldn't happen
  20759. return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
  20760. }
  20761. }
  20762. // too few points?
  20763. const n = srcCoords.length / 2;
  20764. if (n < 4) {
  20765. return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
  20766. }
  20767. // compute a homography
  20768. const src = speedy_vision_default().Matrix(2, n, srcCoords);
  20769. const dst = speedy_vision_default().Matrix(2, n, dstCoords);
  20770. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  20771. const homography = speedy_vision_default().Matrix.Zeros(3);
  20772. return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
  20773. method: 'pransac',
  20774. reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
  20775. numberOfHypotheses: 512,
  20776. bundleSize: 128,
  20777. mask: mask,
  20778. }).then(homography => {
  20779. // check if this is a valid homography
  20780. const a00 = homography.at(0, 0);
  20781. if (Number.isNaN(a00))
  20782. throw new DetectionError(`Can't compute homography`);
  20783. // count the number of inliers
  20784. const inliers = mask.read();
  20785. let inlierCount = 0;
  20786. for (let i = inliers.length - 1; i >= 0; i--)
  20787. inlierCount += inliers[i];
  20788. const score = inlierCount / inliers.length;
  20789. // done!
  20790. return [homography, score];
  20791. });
  20792. }
  20793. /**
  20794. * Create & setup the pipeline
  20795. * @returns pipeline
  20796. */
  20797. _createPipeline() {
  20798. const pipeline = speedy_vision_default().Pipeline();
  20799. const source = speedy_vision_default().Image.Source('source');
  20800. const screen = speedy_vision_default().Transform.Resize('screen');
  20801. const greyscale = speedy_vision_default().Filter.Greyscale();
  20802. const blur = speedy_vision_default().Filter.GaussianBlur();
  20803. const nightvision = speedy_vision_default().Filter.Nightvision();
  20804. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  20805. const pyramid = speedy_vision_default().Image.Pyramid();
  20806. const detector = speedy_vision_default().Keypoint.Detector.FAST();
  20807. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  20808. const clipper = speedy_vision_default().Keypoint.Clipper();
  20809. const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
  20810. const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
  20811. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  20812. const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
  20813. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  20814. const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
  20815. const imagePortalBuffer = speedy_vision_default().Image.Buffer();
  20816. const imagePortalCopy = speedy_vision_default().Transform.Resize();
  20817. //const imageSink = Speedy.Image.Sink('image');
  20818. source.media = null;
  20819. screen.size = speedy_vision_default().Size(0, 0);
  20820. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  20821. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  20822. nightvision.gain = NIGHTVISION_GAIN;
  20823. nightvision.offset = NIGHTVISION_OFFSET;
  20824. nightvision.decay = NIGHTVISION_DECAY;
  20825. nightvision.quality = NIGHTVISION_QUALITY;
  20826. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  20827. detector.levels = SCAN_PYRAMID_LEVELS;
  20828. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  20829. detector.threshold = SCAN_FAST_THRESHOLD;
  20830. detector.capacity = 2048;
  20831. clipper.size = SCAN_MAX_KEYPOINTS;
  20832. lshTables.keypoints = [];
  20833. lshTables.numberOfTables = SCAN_LSH_TABLES;
  20834. lshTables.hashSize = SCAN_LSH_HASHSIZE;
  20835. knn.k = 2;
  20836. knn.quality = 'default';
  20837. //knn.quality = 'fastest';
  20838. imagePortalSource.source = imagePortalSink;
  20839. imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
  20840. imagePortalCopy.size = speedy_vision_default().Size(0, 0);
  20841. imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
  20842. keypointSink.turbo = true;
  20843. // prepare input
  20844. source.output().connectTo(screen.input());
  20845. screen.output().connectTo(greyscale.input());
  20846. // preprocess image
  20847. greyscale.output().connectTo(blur.input());
  20848. greyscale.output().connectTo(nightvisionMux.input('in0'));
  20849. greyscale.output().connectTo(nightvision.input());
  20850. nightvision.output().connectTo(nightvisionMux.input('in1'));
  20851. nightvisionMux.output().connectTo(pyramid.input());
  20852. // keypoint detection
  20853. pyramid.output().connectTo(detector.input());
  20854. detector.output().connectTo(clipper.input());
  20855. // keypoint description
  20856. blur.output().connectTo(descriptor.input('image'));
  20857. clipper.output().connectTo(descriptor.input('keypoints'));
  20858. // keypoint matching
  20859. descriptor.output().connectTo(knn.input('keypoints'));
  20860. lshTables.output().connectTo(knn.input('lsh'));
  20861. // prepare output
  20862. clipper.output().connectTo(keypointSink.input());
  20863. knn.output().connectTo(keypointSink.input('matches'));
  20864. //pyramid.output().connectTo(imageSink.input());
  20865. // memorize image
  20866. source.output().connectTo(imagePortalBuffer.input());
  20867. imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
  20868. imagePortalSource.output().connectTo(imagePortalCopy.input());
  20869. imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
  20870. imagePortalMux.output().connectTo(imagePortalSink.input());
  20871. // done!
  20872. pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
  20873. return pipeline;
  20874. }
  20875. }
  20876. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
  20877. /*
  20878. * MARTINS.js Free Edition
  20879. * GPU-accelerated Augmented Reality for the web
  20880. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  20881. * https://github.com/alemart/martins-js
  20882. *
  20883. * This program is free software: you can redistribute it and/or modify
  20884. * it under the terms of the GNU Affero General Public License version 3
  20885. * as published by the Free Software Foundation.
  20886. *
  20887. * This program is distributed in the hope that it will be useful,
  20888. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20889. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20890. * GNU Affero General Public License for more details.
  20891. *
  20892. * You should have received a copy of the GNU Affero General Public License
  20893. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20894. *
  20895. * pre-tracking.ts
  20896. * Pre-tracking state of the Image Tracker
  20897. */
  20898. /** Default target space size (used when training) */
  20899. const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  20900. /** Use the camera stream as the input of the pipeline */
  20901. const PORT_CAMERA_IMAGE = 1;
  20902. /** Use the reference image as the input of the pipeline */
  20903. const PORT_REFERENCE_IMAGE = 0;
  20904. /**
  20905. * The pre-tracking state of the Image Tracker is a new training
  20906. * phase for the particular, actual target we'll be tracking
  20907. */
  20908. class ImageTrackerPreTrackingState extends ImageTrackerState {
  20909. /**
  20910. * Constructor
  20911. * @param imageTracker
  20912. */
  20913. constructor(imageTracker) {
  20914. super('pre-tracking', imageTracker);
  20915. this._homography = speedy_vision_default().Matrix.Eye(3);
  20916. this._referenceImage = null;
  20917. this._step = 'read-reference-image';
  20918. this._referenceKeypoints = [];
  20919. this._iterations = 0;
  20920. }
  20921. /**
  20922. * Called as soon as this becomes the active state, just before update() runs for the first time
  20923. * @param settings
  20924. */
  20925. onEnterState(settings) {
  20926. const imagePortalSource = this._pipeline.node('imagePortalSource');
  20927. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  20928. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  20929. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  20930. const homography = settings.homography;
  20931. const referenceImage = settings.referenceImage;
  20932. const snapshot = settings.snapshot;
  20933. // this shouldn't happen
  20934. if (!referenceImage)
  20935. throw new TrackingError(`Can't track a null reference image`);
  20936. // set attributes
  20937. this._homography = homography;
  20938. this._referenceImage = referenceImage;
  20939. this._step = 'read-reference-image';
  20940. this._referenceKeypoints = [];
  20941. this._iterations = 0;
  20942. // setup the pipeline
  20943. imagePortalSource.source = snapshot;
  20944. muxOfReferenceKeypoints.port = 0;
  20945. muxOfBufferOfReferenceKeypoints.port = 0;
  20946. bufferOfReferenceKeypoints.frozen = false;
  20947. }
  20948. /**
  20949. * Called just before the GPU processing
  20950. * @returns promise
  20951. */
  20952. _beforeUpdate() {
  20953. const referenceImage = this._referenceImage;
  20954. const source = this._pipeline.node('source');
  20955. const sourceMux = this._pipeline.node('sourceMux');
  20956. const imageRectifier = this._pipeline.node('imageRectifier');
  20957. const keypointRectifier = this._pipeline.node('keypointRectifier');
  20958. const borderClipper = this._pipeline.node('borderClipper');
  20959. const screenSize = this.screenSize;
  20960. // set the source media to the reference image we're going to track
  20961. const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
  20962. source.media = targetMedia;
  20963. // setup the source multiplexer
  20964. if (this._step == 'read-reference-image')
  20965. sourceMux.port = PORT_REFERENCE_IMAGE;
  20966. else
  20967. sourceMux.port = PORT_CAMERA_IMAGE;
  20968. // clip keypoints from the borders of the target image
  20969. borderClipper.imageSize = screenSize;
  20970. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  20971. // rectify the image
  20972. const rectify = (this._step == 'read-reference-image') ?
  20973. this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
  20974. this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
  20975. return rectify.then(rectificationMatrix => {
  20976. imageRectifier.transform = rectificationMatrix;
  20977. });
  20978. }
  20979. /**
  20980. * Post processing that takes place just after the GPU processing
  20981. * @param result pipeline results
  20982. * @returns state output
  20983. */
  20984. _afterUpdate(result) {
  20985. const referenceImage = this._referenceImage;
  20986. const imagePortalSink = this._pipeline.node('imagePortal');
  20987. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  20988. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  20989. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  20990. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  20991. const keypoints = result.keypoints;
  20992. const image = result.image;
  20993. // tracker output
  20994. const trackerOutput = {
  20995. keypoints: image !== undefined ? keypoints : undefined,
  20996. image: image,
  20997. screenSize: this.screenSize,
  20998. };
  20999. // decide what to do next
  21000. switch (this._step) {
  21001. case 'read-reference-image': {
  21002. // enable matching
  21003. muxOfReferenceKeypoints.port = 1;
  21004. // store reference keypoints
  21005. this._referenceKeypoints = keypoints;
  21006. // next step
  21007. this._step = 'warp-camera-image';
  21008. return speedy_vision_default().Promise.resolve({
  21009. nextState: 'pre-tracking',
  21010. trackerOutput: trackerOutput,
  21011. });
  21012. }
  21013. case 'warp-camera-image': {
  21014. // freeze reference keypoints
  21015. bufferOfReferenceKeypoints.frozen = true;
  21016. muxOfBufferOfReferenceKeypoints.port = 1;
  21017. // refine warp?
  21018. if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
  21019. this._step = 'warp-camera-image';
  21020. else
  21021. this._step = 'train-camera-image';
  21022. // warp image & go to next step
  21023. return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
  21024. nextState: 'pre-tracking',
  21025. trackerOutput: trackerOutput,
  21026. })).catch(err => {
  21027. Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
  21028. return {
  21029. nextState: 'scanning',
  21030. trackerOutput: trackerOutput,
  21031. };
  21032. });
  21033. }
  21034. case 'train-camera-image': {
  21035. // log
  21036. Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
  21037. // change the coordinates
  21038. return this._changeSpace(this._homography, this.screenSize).then(homography => {
  21039. // we're ready to track the target!
  21040. return speedy_vision_default().Promise.resolve({
  21041. //nextState: 'pre-tracking',
  21042. nextState: 'tracking',
  21043. trackerOutput: trackerOutput,
  21044. nextStateSettings: {
  21045. homography: homography,
  21046. referenceImage: referenceImage,
  21047. templateKeypoints: keypoints,
  21048. keypointPortalSink: keypointPortalSink,
  21049. imagePortalSink: imagePortalSink,
  21050. screenSize: this.screenSize,
  21051. },
  21052. });
  21053. });
  21054. }
  21055. }
  21056. }
  21057. /**
  21058. * Find an adjustment warp between the camera image and the reference image
  21059. * @param dstKeypoints destination
  21060. * @param srcKeypoints source
  21061. * @returns a promise that resolves to a 3x3 homography
  21062. */
  21063. _findWarp(dstKeypoints, srcKeypoints) {
  21064. //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
  21065. const srcCoords = [];
  21066. const dstCoords = [];
  21067. // find matching coordinates of the keypoints
  21068. for (let i = 0; i < dstKeypoints.length; i++) {
  21069. const dstKeypoint = dstKeypoints[i];
  21070. if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
  21071. const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
  21072. // the best match should be "much better" than the second best match,
  21073. // which means that they are "distinct enough"
  21074. if (d1 <= TRACK_MATCH_RATIO * d2) {
  21075. const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
  21076. srcCoords.push(srcKeypoint.x);
  21077. srcCoords.push(srcKeypoint.y);
  21078. dstCoords.push(dstKeypoint.x);
  21079. dstCoords.push(dstKeypoint.y);
  21080. }
  21081. }
  21082. }
  21083. // too few points?
  21084. const n = srcCoords.length / 2;
  21085. if (n < 4) {
  21086. return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
  21087. }
  21088. // compute warp
  21089. const model = speedy_vision_default().Matrix.Eye(3);
  21090. return this._findKeypointWarp().then(transform =>
  21091. // rectify keypoints
  21092. speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
  21093. // find warp
  21094. speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
  21095. method: 'pransac',
  21096. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21097. numberOfHypotheses: 512 * 4,
  21098. bundleSize: 128,
  21099. })).then(_ => {
  21100. // validate the model
  21101. const a00 = model.at(0, 0);
  21102. if (Number.isNaN(a00))
  21103. throw new TrackingError(`Can't compute warp: bad keypoints`);
  21104. // done!
  21105. return model;
  21106. });
  21107. }
  21108. /**
  21109. * Find a warp to be applied to the keypoints
  21110. * @returns affine transform
  21111. */
  21112. _findKeypointWarp() {
  21113. const referenceImage = this._referenceImage;
  21114. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21115. const screenSize = this.screenSize;
  21116. // no rotation is needed
  21117. if (!this._mustRotateWarpedImage(media, screenSize))
  21118. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
  21119. // rotate by 90 degrees clockwise around the pivot
  21120. const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
  21121. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
  21122. 0, 1, 0,
  21123. -1, 0, 0,
  21124. py + px, py - px, 1,
  21125. ]));
  21126. }
  21127. /**
  21128. * Change the space of the homography in order to improve tracking quality
  21129. * @param homography mapping coordinates from normalized target space to AR screen space
  21130. * @param screenSize AR screen size
  21131. * @returns homography mapping coordinates from AR screen space to AR screen space
  21132. */
  21133. _changeSpace(homography, screenSize) {
  21134. const sw = screenSize.width, sh = screenSize.height;
  21135. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  21136. const mat = speedy_vision_default().Matrix.Zeros(3);
  21137. return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
  21138. }
  21139. /**
  21140. * Create & setup the pipeline
  21141. * @returns pipeline
  21142. */
  21143. _createPipeline() {
  21144. const pipeline = speedy_vision_default().Pipeline();
  21145. const source = speedy_vision_default().Image.Source('source');
  21146. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  21147. const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
  21148. const screen = speedy_vision_default().Transform.Resize('screen');
  21149. const greyscale = speedy_vision_default().Filter.Greyscale();
  21150. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  21151. const nightvision = speedy_vision_default().Filter.Nightvision();
  21152. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  21153. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  21154. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  21155. const blur = speedy_vision_default().Filter.GaussianBlur();
  21156. const clipper = speedy_vision_default().Keypoint.Clipper();
  21157. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  21158. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  21159. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  21160. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  21161. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  21162. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  21163. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  21164. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
  21165. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
  21166. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
  21167. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  21168. const imageSink = speedy_vision_default().Image.Sink('image');
  21169. source.media = null;
  21170. screen.size = speedy_vision_default().Size(0, 0);
  21171. imagePortalSource.source = null;
  21172. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21173. sourceMux.port = PORT_REFERENCE_IMAGE;
  21174. nightvision.gain = NIGHTVISION_GAIN;
  21175. nightvision.offset = NIGHTVISION_OFFSET;
  21176. nightvision.decay = NIGHTVISION_DECAY;
  21177. nightvision.quality = NIGHTVISION_QUALITY;
  21178. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  21179. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  21180. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  21181. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  21182. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  21183. detector.quality = TRACK_HARRIS_QUALITY;
  21184. detector.capacity = TRACK_DETECTOR_CAPACITY;
  21185. subpixel.method = SUBPIXEL_METHOD;
  21186. clipper.size = TRACK_MAX_KEYPOINTS;
  21187. borderClipper.imageSize = screen.size;
  21188. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  21189. matcher.k = 2;
  21190. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21191. keypointPortalSource.source = keypointPortalSink;
  21192. muxOfReferenceKeypoints.port = 0;
  21193. muxOfBufferOfReferenceKeypoints.port = 0;
  21194. bufferOfReferenceKeypoints.frozen = false;
  21195. keypointSink.turbo = false;
  21196. // prepare input
  21197. source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
  21198. imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
  21199. sourceMux.output().connectTo(screen.input());
  21200. screen.output().connectTo(greyscale.input());
  21201. // preprocess images
  21202. greyscale.output().connectTo(imageRectifier.input());
  21203. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  21204. imageRectifier.output().connectTo(nightvision.input());
  21205. nightvision.output().connectTo(nightvisionMux.input('in1'));
  21206. nightvisionMux.output().connectTo(blur.input());
  21207. // keypoint detection & clipping
  21208. nightvisionMux.output().connectTo(detector.input());
  21209. detector.output().connectTo(borderClipper.input());
  21210. borderClipper.output().connectTo(clipper.input());
  21211. // keypoint refinement
  21212. imageRectifier.output().connectTo(denoiser.input());
  21213. denoiser.output().connectTo(subpixel.input('image'));
  21214. clipper.output().connectTo(subpixel.input('keypoints'));
  21215. // keypoint description
  21216. blur.output().connectTo(descriptor.input('image'));
  21217. subpixel.output().connectTo(descriptor.input('keypoints'));
  21218. // keypoint matching
  21219. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  21220. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  21221. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  21222. descriptor.output().connectTo(matcher.input('keypoints'));
  21223. // store reference keypoints
  21224. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  21225. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  21226. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  21227. // portals
  21228. descriptor.output().connectTo(keypointPortalSink.input());
  21229. // prepare output
  21230. descriptor.output().connectTo(keypointRectifier.input());
  21231. keypointRectifier.output().connectTo(keypointSink.input());
  21232. matcher.output().connectTo(keypointSink.input('matches'));
  21233. //imageRectifier.output().connectTo(imageSink.input());
  21234. // done!
  21235. pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  21236. return pipeline;
  21237. }
  21238. }
  21239. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
  21240. /*
  21241. * MARTINS.js Free Edition
  21242. * GPU-accelerated Augmented Reality for the web
  21243. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  21244. * https://github.com/alemart/martins-js
  21245. *
  21246. * This program is free software: you can redistribute it and/or modify
  21247. * it under the terms of the GNU Affero General Public License version 3
  21248. * as published by the Free Software Foundation.
  21249. *
  21250. * This program is distributed in the hope that it will be useful,
  21251. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21252. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21253. * GNU Affero General Public License for more details.
  21254. *
  21255. * You should have received a copy of the GNU Affero General Public License
  21256. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21257. *
  21258. * image-tracker-event.ts
  21259. * Events emitted by an Image Tracker
  21260. */
  21261. /**
  21262. * An event emitted by an Image Tracker
  21263. */
  21264. class ImageTrackerEvent extends AREvent {
  21265. /**
  21266. * Constructor
  21267. * @param type event type
  21268. * @param referenceImage optional reference image
  21269. */
  21270. constructor(type, referenceImage) {
  21271. super(type);
  21272. this._referenceImage = referenceImage;
  21273. }
  21274. /**
  21275. * Reference image
  21276. */
  21277. get referenceImage() {
  21278. return this._referenceImage;
  21279. }
  21280. }
  21281. ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
  21282. /*
  21283. * MARTINS.js Free Edition
  21284. * GPU-accelerated Augmented Reality for the web
  21285. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  21286. * https://github.com/alemart/martins-js
  21287. *
  21288. * This program is free software: you can redistribute it and/or modify
  21289. * it under the terms of the GNU Affero General Public License version 3
  21290. * as published by the Free Software Foundation.
  21291. *
  21292. * This program is distributed in the hope that it will be useful,
  21293. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21294. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21295. * GNU Affero General Public License for more details.
  21296. *
  21297. * You should have received a copy of the GNU Affero General Public License
  21298. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21299. *
  21300. * camera-model.ts
  21301. * Camera model
  21302. */
  21303. /** Number of samples we'll be keeping to help calibrate the camera */
  21304. const INTRISICS_SAMPLES = 401; //201; //31; // odd number
  21305. /** Whether or not to auto-calibrate the camera */
  21306. const FOVY_AUTODETECT = false; //true;
  21307. /** A guess of the vertical field-of-view of a generic camera, in degrees */
  21308. const FOVY_GUESS = 45; //50; // will be part of the viewing frustum
  21309. /** Number of iterations used to refine the estimated pose */
  21310. const POSE_ITERATIONS = 30;
  21311. /** Number of samples used in the rotation filter */
  21312. const ROTATION_FILTER_SAMPLES = 10;
  21313. /** Number of samples used in the translation filter */
  21314. const TRANSLATION_FILTER_SAMPLES = 10;
  21315. /** Convert degrees to radians */
  21316. const DEG2RAD = 0.017453292519943295; // pi / 180
  21317. /** Convert radians to degrees */
  21318. const RAD2DEG = 57.29577951308232; // 180 / pi
  21319. /** Numerical tolerance */
  21320. const EPSILON = 1e-6;
  21321. /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
  21322. const FX = 0;
  21323. /** Index of the vertical focal length in the camera intrinsics matrix */
  21324. const FY = 4;
  21325. /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
  21326. const U0 = 6;
  21327. /** Index of the vertical position of the principal point in the camera intrinsics matrix */
  21328. const V0 = 7;
  21329. /** Translation refinement: predefined buffers for efficiency */
  21330. const TRANSLATION_REFINEMENT_BUFFERS = (() => {
  21331. const l = 1.0;
  21332. const x = [0, l, 0, -l, 0];
  21333. const y = [-l, 0, l, 0, 0];
  21334. const n = x.length;
  21335. return Object.freeze({
  21336. x, y,
  21337. a1: new Array(n),
  21338. a2: new Array(n),
  21339. a3: new Array(n),
  21340. m: new Array(3 * n * 3),
  21341. v: new Array(3 * n),
  21342. t: new Array(3),
  21343. r: new Array(3 * n),
  21344. c: new Array(3),
  21345. Mc: new Array(3 * n),
  21346. });
  21347. })();
  21348. /** Translation refinement: number of iterations */
  21349. const TRANSLATION_REFINEMENT_ITERATIONS = 3; // 1; // 5;
  21350. /** Translation refinement: number of samples */
  21351. const TRANSLATION_REFINEMENT_SAMPLES = 5; // TRANSLATION_REFINEMENT_BUFFERS.x.length;
  21352. /** Translation refinement: the triple of the number of samples */
  21353. const TRANSLATION_REFINEMENT_SAMPLES_3X = 15; //3 * TRANSLATION_REFINEMENT_SAMPLES;
  21354. /**
  21355. * Camera model
  21356. */
  21357. class CameraModel {
  21358. /**
  21359. * Constructor
  21360. */
  21361. constructor() {
  21362. this._screenSize = speedy_vision_default().Size(0, 0);
  21363. this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
  21364. this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // identity matrix
  21365. this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // no rotation & no translation [ R | t ] = [ I | 0 ]
  21366. this._f = (new Array(INTRISICS_SAMPLES)).fill(this._intrinsics[FY]);
  21367. this._fp = 0;
  21368. this._partialRotationBuffer = [];
  21369. this._translationBuffer = [];
  21370. }
  21371. /**
  21372. * Initialize the model
  21373. * @param screenSize
  21374. */
  21375. init(screenSize) {
  21376. // validate
  21377. if (screenSize.area() == 0)
  21378. throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
  21379. // set the screen size
  21380. this._screenSize.width = screenSize.width;
  21381. this._screenSize.height = screenSize.height;
  21382. // reset the model
  21383. this._resetIntrinsics();
  21384. this._resetExtrinsics();
  21385. // log
  21386. Utils.log(`Initializing the camera model...`);
  21387. }
  21388. /**
  21389. * Release the model
  21390. */
  21391. release() {
  21392. this.reset();
  21393. return null;
  21394. }
  21395. /**
  21396. * Update the camera model
  21397. * @param homography 3x3 perspective transform
  21398. * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
  21399. * @returns promise that resolves to a camera matrix
  21400. */
  21401. update(homography, screenSize) {
  21402. // validate the shape of the homography
  21403. if (homography.rows != 3 || homography.columns != 3)
  21404. throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
  21405. // validate screenSize
  21406. if (screenSize.area() == 0)
  21407. throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
  21408. // changed screen size?
  21409. if (!this._screenSize.equals(screenSize)) {
  21410. Utils.log(`Camera model: detected a change in screen size...`);
  21411. // update the screen size
  21412. this._screenSize.width = screenSize.width;
  21413. this._screenSize.height = screenSize.height;
  21414. // reset camera
  21415. this.reset();
  21416. }
  21417. // read the entries of the homography
  21418. const h = homography.read();
  21419. const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
  21420. // validate the homography (homography matrices aren't singular)
  21421. const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
  21422. if (Math.abs(det) < EPSILON) {
  21423. Utils.warning(`Can't update the camera model using an invalid homography matrix`);
  21424. return speedy_vision_default().Promise.resolve(this._matrix);
  21425. }
  21426. // estimate the focal length (auto-calibration)
  21427. const f = this._estimateFocal(homography);
  21428. if (f > 0)
  21429. this._storeFocal(f);
  21430. //console.log(this.fovy * RAD2DEG);
  21431. // estimate the pose
  21432. const pose = this._estimatePose(homography);
  21433. this._storePose(pose);
  21434. // compute the camera matrix
  21435. const C = this.denormalizer();
  21436. const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
  21437. const E = speedy_vision_default().Matrix(3, 4, this._extrinsics);
  21438. this._matrix.setToSync(K.times(E).times(C));
  21439. //console.log("intrinsics -----------", K.toString());
  21440. //console.log("matrix ----------------",this._matrix.toString());
  21441. return speedy_vision_default().Promise.resolve(this._matrix);
  21442. }
  21443. /**
  21444. * Reset camera model
  21445. */
  21446. reset() {
  21447. this._resetIntrinsics();
  21448. this._resetExtrinsics();
  21449. }
  21450. /**
  21451. * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
  21452. * 2D AR screen space (measured in pixels)
  21453. * @returns 3x4 camera matrix
  21454. */
  21455. get matrix() {
  21456. return this._matrix;
  21457. }
  21458. /**
  21459. * Camera intrinsics matrix
  21460. * @returns 3x3 intrinsics matrix in column-major format
  21461. */
  21462. get intrinsics() {
  21463. return this._intrinsics;
  21464. }
  21465. /**
  21466. * Camera extrinsics matrix
  21467. * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
  21468. */
  21469. get extrinsics() {
  21470. return this._extrinsics;
  21471. }
  21472. /**
  21473. * Convert coordinates from normalized space [-1,1]^3 to a
  21474. * "3D pixel space" based on the dimensions of the AR screen.
  21475. *
  21476. * We perform a 180-degrees rotation around the x-axis so that
  21477. * it looks nicer (the y-axis grows downwards in image space).
  21478. *
  21479. * The final camera matrix is P = K * [ R | t ] * C, where
  21480. * C is this conversion matrix. The intent behind this is to
  21481. * make tracking independent of target and screen sizes.
  21482. *
  21483. * Reminder: we use a right-handed coordinate system in 3D!
  21484. * In 2D image space the coordinate system is left-handed.
  21485. *
  21486. * @returns 4x4 conversion matrix C
  21487. */
  21488. denormalizer() {
  21489. const w = this._screenSize.width / 2; // half width, in pixels
  21490. const h = this._screenSize.height / 2; // half height, in pixels
  21491. const d = Math.min(w, h); // virtual unit length, in pixels
  21492. /*
  21493. return Speedy.Matrix(4, 4, [
  21494. 1, 0, 0, 0,
  21495. 0,-1, 0, 0,
  21496. 0, 0,-1, 0,
  21497. w/d, h/d, 0, 1/d
  21498. ]);
  21499. */
  21500. return speedy_vision_default().Matrix(4, 4, [
  21501. d, 0, 0, 0,
  21502. 0, -d, 0, 0,
  21503. 0, 0, -d, 0,
  21504. w, h, 0, 1,
  21505. ]);
  21506. }
  21507. /**
  21508. * Size of the AR screen space, in pixels
  21509. * @returns size in pixels
  21510. */
  21511. get screenSize() {
  21512. return this._screenSize;
  21513. }
  21514. /**
  21515. * Focal length in pixel units (projection distance in the pinhole camera model)
  21516. * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
  21517. * @returns focal length
  21518. */
  21519. get focalLength() {
  21520. return this._intrinsics[FY]; // fx == fy
  21521. }
  21522. /**
  21523. * Horizontal field-of-view, given in radians
  21524. * @returns vertical field-of-view
  21525. */
  21526. get fovx() {
  21527. return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
  21528. }
  21529. /**
  21530. * Vertical field-of-view, given in radians
  21531. * @returns vertical field-of-view
  21532. */
  21533. get fovy() {
  21534. return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
  21535. }
  21536. /**
  21537. * Principal point
  21538. * @returns principal point, in pixel coordinates
  21539. */
  21540. principalPoint() {
  21541. return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
  21542. }
  21543. /**
  21544. * Reset camera extrinsics
  21545. */
  21546. _resetExtrinsics() {
  21547. // set the rotation matrix to the identity
  21548. this._extrinsics.fill(0);
  21549. this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
  21550. // reset filters
  21551. this._partialRotationBuffer.length = 0;
  21552. this._translationBuffer.length = 0;
  21553. }
  21554. /**
  21555. * Reset camera intrinsics
  21556. */
  21557. _resetIntrinsics() {
  21558. const u0 = this._screenSize.width / 2;
  21559. const v0 = this._screenSize.height / 2;
  21560. const f = v0 / Math.tan(DEG2RAD * FOVY_GUESS / 2);
  21561. this._intrinsics[FX] = f;
  21562. this._intrinsics[FY] = f;
  21563. this._intrinsics[U0] = u0;
  21564. this._intrinsics[V0] = v0;
  21565. this._f.fill(this._intrinsics[FY]);
  21566. this._fp = 0;
  21567. }
  21568. /**
  21569. * Estimate the focal length
  21570. * @param homography valid homography
  21571. * @returns estimated focal length, or 0 on error
  21572. */
  21573. _estimateFocal(homography) {
  21574. // auto-detect the focal length?
  21575. if (!FOVY_AUTODETECT)
  21576. return 0;
  21577. // read the entries of the homography
  21578. const h = homography.read();
  21579. const h11 = h[0], h12 = h[3]; //, h13 = h[6];
  21580. const h21 = h[1], h22 = h[4]; //, h23 = h[7];
  21581. const h31 = h[2], h32 = h[5]; //, h33 = h[8];
  21582. // read the principal point
  21583. const u0 = this._intrinsics[U0];
  21584. const v0 = this._intrinsics[V0];
  21585. // estimate the focal length based on the orthogonality
  21586. // constraint r1'r2 = 0 of a rotation matrix
  21587. const f2 = -((h11 / h31 - u0) * (h12 / h32 - u0) + (h21 / h31 - v0) * (h22 / h32 - v0));
  21588. // can't estimate it?
  21589. if (f2 < 0)
  21590. return this._intrinsics[FY];
  21591. //return 0;
  21592. // done!
  21593. return Math.sqrt(f2);
  21594. }
  21595. /**
  21596. * Store an estimated focal length
  21597. * @param f estimated focal length
  21598. */
  21599. _storeFocal(f) {
  21600. // store the focal length
  21601. this._f[this._fp] = f;
  21602. this._fp = (this._fp + 1) % INTRISICS_SAMPLES;
  21603. // take the median of the estimated focal lengths
  21604. const sorted = this._f.concat([]).sort((a, b) => a - b);
  21605. const median = sorted[sorted.length >>> 1];
  21606. // update the intrinsics matrix
  21607. this._intrinsics[FX] = this._intrinsics[FY] = median;
  21608. /*
  21609. // test
  21610. const u0 = this._intrinsics[U0];
  21611. const v0 = this._intrinsics[V0];
  21612. const fovx = 2 * Math.atan(u0 / median) * RAD2DEG;
  21613. const fovy = 2 * Math.atan(v0 / median) * RAD2DEG;
  21614. console.log('---------------');
  21615. console.log("fov:",fovx,fovy);
  21616. console.log("f:",median);
  21617. */
  21618. }
  21619. /**
  21620. * Compute a normalized homography H' = K^(-1) * H for an
  21621. * ideal pinhole with f = 1 and principal point = (0,0)
  21622. * @param homography homography H to be normalized
  21623. * @param f focal length
  21624. * @returns normalized homography H'
  21625. */
  21626. _normalizeHomography(homography, f = this._intrinsics[FY]) {
  21627. const h = homography.read();
  21628. const u0 = this._intrinsics[U0];
  21629. const v0 = this._intrinsics[V0];
  21630. const h11 = h[0] - u0 * h[2], h12 = h[3] - u0 * h[5], h13 = h[6] - u0 * h[8];
  21631. const h21 = h[1] - v0 * h[2], h22 = h[4] - v0 * h[5], h23 = h[7] - v0 * h[8];
  21632. const h31 = h[2] * f, h32 = h[5] * f, h33 = h[8] * f;
  21633. return speedy_vision_default().Matrix(3, 3, [
  21634. h11, h21, h31,
  21635. h12, h22, h32,
  21636. h13, h23, h33,
  21637. ]);
  21638. }
  21639. /**
  21640. * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
  21641. * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
  21642. * @returns a 3x3 matrix
  21643. */
  21644. _estimatePartialPose(normalizedHomography) {
  21645. const h = normalizedHomography.read();
  21646. const h11 = h[0], h12 = h[3], h13 = h[6];
  21647. const h21 = h[1], h22 = h[4], h23 = h[7];
  21648. const h31 = h[2], h32 = h[5], h33 = h[8];
  21649. // select the sign so that t3 = tz > 0
  21650. const sign = h33 >= 0 ? 1 : -1;
  21651. // compute the scale factor
  21652. const h1norm = Math.sqrt(h11 * h11 + h21 * h21 + h31 * h31);
  21653. const h2norm = Math.sqrt(h12 * h12 + h22 * h22 + h32 * h32);
  21654. //const scale = sign * 2 / (h1norm + h2norm);
  21655. //const scale = sign / h1norm;
  21656. //const scale = sign / h2norm;
  21657. const scale = sign / Math.max(h1norm, h2norm); // this seems to work. why?
  21658. // invalid homography?
  21659. if (Number.isNaN(scale))
  21660. return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
  21661. // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
  21662. // if h1norm is not approximately h2norm, it means that the first two columns of
  21663. // the normalized homography are not really encoding a rotation (up to a scale)
  21664. // what is causing this? does h3 (and h33) tell us anything about it?
  21665. // what about the intrinsics matrix? the principal point...? the fov...?
  21666. //console.log("h1,h2",h1norm,h2norm);
  21667. //console.log(normalizedHomography.toString());
  21668. // recover the translation and the rotation
  21669. const t1 = scale * h13;
  21670. const t2 = scale * h23;
  21671. const t3 = scale * h33;
  21672. const r11 = scale * h11;
  21673. const r21 = scale * h21;
  21674. const r31 = scale * h31;
  21675. const r12 = scale * h12;
  21676. const r22 = scale * h22;
  21677. const r32 = scale * h32;
  21678. // refine the pose
  21679. const r = this._refineRotation(r11, r21, r31, r12, r22, r32);
  21680. const t = this._refineTranslation(normalizedHomography, r, [t1, t2, t3]);
  21681. //const t = [t1, t2, t3]; // faster, but less accurate
  21682. // done!
  21683. return speedy_vision_default().Matrix(3, 3, r.concat(t)); // this is possibly NaN... why? homography...
  21684. }
  21685. /**
  21686. * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
  21687. * @param r11 x of r1
  21688. * @param r21 y of r1
  21689. * @param r31 z of r1
  21690. * @param r12 x of r2
  21691. * @param r22 y of r2
  21692. * @param r32 z of r2
  21693. * @returns a 3x2 matrix R such that R'R = I (column-major format)
  21694. */
  21695. _refineRotation(r11, r21, r31, r12, r22, r32) {
  21696. /*
  21697. A little technique I figured out to correct the rotation vectors
  21698. ----------------------------------------------------------------
  21699. We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
  21700. R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
  21701. because vectors r1 and r2 are not perfectly orthonormal due to noise.
  21702. Let's first notice that R'R is symmetric. You can easily check that its
  21703. two eigenvalues are both real and positive (as long as r1, r2 != 0 and
  21704. r1 is not parallel to r2, but we never take such vectors as input).
  21705. R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
  21706. [ r1'r2 r2'r2 ]
  21707. We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
  21708. chosen to be orthogonal and D is a diagonal matrix whose entries are
  21709. the eigenvalues of R'R.
  21710. Let LL' be the Cholesky decomposition of D. Such decomposition exists
  21711. and is trivially computed: just take the square roots of the entries of
  21712. D. Since L is diagonal, we have L = L'. Its inverse is also trivially
  21713. computed - call it Linv.
  21714. Now, define a 2x2 correction matrix C as follows:
  21715. C = Q * Linv * Q'
  21716. This matrix rotates the input vector, scales it by some amount, and
  21717. then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
  21718. We compute RC in order to correct the rotation vectors. We take its
  21719. two columns as the corrected vectors.
  21720. In order to show that the two columns of RC are orthonormal, we can
  21721. show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
  21722. expand the expression:
  21723. (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
  21724. Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
  21725. Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
  21726. I have provided below a closed formula to correct the rotation vectors.
  21727. What C does to R is very interesting: it makes the singular values
  21728. become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
  21729. values of R are the square roots of the eigenvalues of R'R. Letting
  21730. S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
  21731. means that RC is equivalent to the correction "trick" using the SVD
  21732. found in the computer vision literature (i.e., compute the SVD and
  21733. return U V'). That "trick" is known to return the rotation matrix that
  21734. minimizes the Frobenius norm of the difference between the input and
  21735. the output. Consequently, the technique I have just presented is also
  21736. optimal in that sense!
  21737. By the way, the input matrix R does not need to be 3x2.
  21738. */
  21739. // compute the entries of R'R
  21740. const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
  21741. const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
  21742. const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
  21743. // compute the two real eigenvalues of R'R
  21744. const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
  21745. const sqrt = Math.sqrt(delta); // delta >= 0 always
  21746. const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
  21747. const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
  21748. // compute two unit eigenvectors qi = (xi,yi) of R'R
  21749. const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
  21750. const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
  21751. const y1 = x1 / alpha1;
  21752. const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
  21753. const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
  21754. const y2 = x2 / alpha2;
  21755. // compute the Cholesky decomposition LL' of the diagonal matrix D
  21756. // whose entries are the two eigenvalues of R'R and then invert L
  21757. const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
  21758. const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
  21759. // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
  21760. // is orthogonal and Linv is computed as above
  21761. const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
  21762. const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
  21763. const C = Q.times(Linv).times(Qt);
  21764. // correct the rotation vectors r1 and r2 using C
  21765. const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
  21766. return speedy_vision_default().Matrix(R.times(C)).read();
  21767. }
  21768. /**
  21769. * Compute a refined translation vector
  21770. * @param normalizedHomography ideal pinhole K = I
  21771. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  21772. * @param t0 initial estimate for the translation vector
  21773. * @returns 3x1 translation vector in column-major format
  21774. */
  21775. _refineTranslation(normalizedHomography, rot, t0) {
  21776. /*
  21777. Given a normalized homography H, the rotation vectors r1, r2, and a
  21778. translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
  21779. scale factor s.
  21780. If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
  21781. [ r1 | r2 | t ] u is parallel to H u, which means that their cross
  21782. product is zero:
  21783. [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
  21784. The following code finds an optimal translation vector t based on the
  21785. above observation. H, r1, r2 are known.
  21786. */
  21787. const B = TRANSLATION_REFINEMENT_BUFFERS;
  21788. const n = TRANSLATION_REFINEMENT_SAMPLES;
  21789. const n3 = TRANSLATION_REFINEMENT_SAMPLES_3X;
  21790. Utils.assert(B.x.length === n);
  21791. const h = normalizedHomography.read();
  21792. const h11 = h[0], h12 = h[3], h13 = h[6];
  21793. const h21 = h[1], h22 = h[4], h23 = h[7];
  21794. const h31 = h[2], h32 = h[5], h33 = h[8];
  21795. const r11 = rot[0], r12 = rot[3];
  21796. const r21 = rot[1], r22 = rot[4];
  21797. const r31 = rot[2], r32 = rot[5];
  21798. // get sample points (xi, yi), 0 <= i < n
  21799. const x = B.x, y = B.y;
  21800. // set auxiliary values: ai = H [ xi yi 1 ]'
  21801. const a1 = B.a1, a2 = B.a2, a3 = B.a3;
  21802. for (let i = 0; i < n; i++) {
  21803. a1[i] = x[i] * h11 + y[i] * h12 + h13;
  21804. a2[i] = x[i] * h21 + y[i] * h22 + h23;
  21805. a3[i] = x[i] * h31 + y[i] * h32 + h33;
  21806. }
  21807. // solve M t = v for t; M: 3n x 3, v: 3n x 1, t: 3 x 1 (linear least squares)
  21808. const m = B.m, v = B.v;
  21809. for (let i = 0, k = 0; k < n; i += 3, k++) {
  21810. m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
  21811. m[i + n3] = -(m[i + 1] = a3[k]);
  21812. m[i + 2] = -(m[i + n3 + n3] = a2[k]);
  21813. m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
  21814. v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
  21815. v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
  21816. v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
  21817. }
  21818. /*
  21819. // this works, but I want more lightweight
  21820. const M = Speedy.Matrix(n3, 3, m);
  21821. const v_ = Speedy.Matrix(n3, 1, v);
  21822. return Speedy.Matrix(M.ldiv(v_)).read();
  21823. */
  21824. /*
  21825. Gradient descent with optimal step size / learning rate
  21826. -------------------------------------------------------
  21827. Let's find the column-vector x that minimizes the error function
  21828. E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
  21829. least squares. We want to find x easily, QUICKLY and iteratively.
  21830. The update rule of gradient descent is set to:
  21831. x := x - w * grad(E)
  21832. where w is the learning rate and grad(E) is the gradient of E(x):
  21833. grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
  21834. Let's adjust w to make x "converge quickly". Define function S(w) as:
  21835. S(w) = x - w * grad(E) (step)
  21836. and another function F(w) as:
  21837. F(w) = E(S(w))
  21838. which is the error of the step. We minimize F by setting its derivative
  21839. to zero:
  21840. 0 = dF = dF dS
  21841. dw dS dw
  21842. What follows is a fair amount of algebra. Do the math and you'll find
  21843. the following optimal update rule:
  21844. (c'c)
  21845. x := x - --------- c
  21846. (Ac)'(Ac)
  21847. where c = A'r = A'(Ax - b)
  21848. */
  21849. // initial guess
  21850. const t = B.t;
  21851. t[0] = t0[0];
  21852. t[1] = t0[1];
  21853. t[2] = t0[2];
  21854. // gradient descent: super lightweight implementation
  21855. const r = B.r, c = B.c, Mc = B.Mc;
  21856. for (let it = 0; it < TRANSLATION_REFINEMENT_ITERATIONS; it++) {
  21857. // compute residual r = Mt - v
  21858. for (let i = 0; i < n3; i++) {
  21859. r[i] = 0;
  21860. for (let j = 0; j < 3; j++)
  21861. r[i] += m[j * n3 + i] * t[j];
  21862. r[i] -= v[i];
  21863. }
  21864. // compute c = M'r
  21865. for (let i = 0; i < 3; i++) {
  21866. c[i] = 0;
  21867. for (let j = 0; j < n3; j++)
  21868. c[i] += m[i * n3 + j] * r[j];
  21869. }
  21870. // compute Mc
  21871. for (let i = 0; i < n3; i++) {
  21872. Mc[i] = 0;
  21873. for (let j = 0; j < 3; j++)
  21874. Mc[i] += m[j * n3 + i] * c[j];
  21875. }
  21876. // compute num = c'c and den = (Mc)'(Mc)
  21877. let num = 0, den = 0;
  21878. for (let i = 0; i < 3; i++)
  21879. num += c[i] * c[i];
  21880. for (let i = 0; i < n3; i++)
  21881. den += Mc[i] * Mc[i];
  21882. // compute num / den
  21883. const frc = num / den;
  21884. if (Number.isNaN(frc))
  21885. break;
  21886. // iterate: t = t - (num / den) * c
  21887. for (let i = 0; i < 3; i++)
  21888. t[i] -= frc * c[i];
  21889. }
  21890. //console.log("OLD t:\n\n",t0.join('\n'));
  21891. //console.log("new t:\n\n",t.join('\n'));
  21892. // done!
  21893. return t;
  21894. }
  21895. /**
  21896. * Apply a smoothing filter to the partial pose
  21897. * @param partialPose 3x3 [ r1 | r2 | t ]
  21898. * @returns filtered partial pose
  21899. */
  21900. _filterPartialPose(partialPose) {
  21901. const avg = new Array(9).fill(0);
  21902. const entries = partialPose.read();
  21903. const rotationBlock = entries.slice(0, 6);
  21904. const translationBlock = entries.slice(6, 9);
  21905. // how many samples should we store, at most?
  21906. const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
  21907. const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
  21908. const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
  21909. // is it a valid partial pose?
  21910. if (!Number.isNaN(entries[0])) {
  21911. // store samples
  21912. this._partialRotationBuffer.unshift(rotationBlock);
  21913. if (this._partialRotationBuffer.length > N)
  21914. this._partialRotationBuffer.length = N;
  21915. this._translationBuffer.unshift(translationBlock);
  21916. if (this._translationBuffer.length > M)
  21917. this._translationBuffer.length = M;
  21918. }
  21919. else if (this._partialRotationBuffer.length == 0) {
  21920. // invalid pose, no samples
  21921. return speedy_vision_default().Matrix.Eye(3);
  21922. }
  21923. // average *nearby* rotations
  21924. const n = this._partialRotationBuffer.length;
  21925. for (let i = 0; i < n; i++) {
  21926. const r = this._partialRotationBuffer[i];
  21927. for (let j = 0; j < 6; j++)
  21928. avg[j] += r[j] / n;
  21929. }
  21930. const r = this._refineRotation(avg[0], avg[1], avg[2], avg[3], avg[4], avg[5]);
  21931. // average translations
  21932. const m = this._translationBuffer.length;
  21933. for (let i = 0; i < m; i++) {
  21934. const t = this._translationBuffer[i];
  21935. for (let j = 0; j < 3; j++)
  21936. avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
  21937. //avg[6 + j] += t[j] / m;
  21938. }
  21939. const t = [avg[6], avg[7], avg[8]];
  21940. // done!
  21941. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  21942. }
  21943. /**
  21944. * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
  21945. * @param partialPose
  21946. * @returns 3x4 matrix
  21947. */
  21948. _estimateFullPose(partialPose) {
  21949. const p = partialPose.read();
  21950. const r11 = p[0], r12 = p[3], t1 = p[6];
  21951. const r21 = p[1], r22 = p[4], t2 = p[7];
  21952. const r31 = p[2], r32 = p[5], t3 = p[8];
  21953. // r3 = +- ( r1 x r2 )
  21954. let r13 = r21 * r32 - r31 * r22;
  21955. let r23 = r31 * r12 - r11 * r32;
  21956. let r33 = r11 * r22 - r21 * r12;
  21957. // let's make sure that det R = +1 (keep the orientation)
  21958. const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
  21959. if (det < 0) {
  21960. r13 = -r13;
  21961. r23 = -r23;
  21962. r33 = -r33;
  21963. }
  21964. // done!
  21965. return speedy_vision_default().Matrix(3, 4, [
  21966. r11, r21, r31,
  21967. r12, r22, r32,
  21968. r13, r23, r33,
  21969. t1, t2, t3,
  21970. ]);
  21971. }
  21972. /**
  21973. * Estimate the pose [ R | t ] given a homography in AR screen space
  21974. * @param homography must be valid
  21975. * @param f focal length
  21976. * @returns 3x4 matrix
  21977. */
  21978. _estimatePose(homography, f = this._intrinsics[FY]) {
  21979. const normalizedHomography = this._normalizeHomography(homography, f);
  21980. const partialPose = speedy_vision_default().Matrix.Eye(3);
  21981. // we want the estimated partial pose [ r1 | r2 | t ] to be as close
  21982. // as possible to the normalized homography, up to a scale factor;
  21983. // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
  21984. // it won't be a perfect equality due to noise in the homography
  21985. const residual = speedy_vision_default().Matrix(normalizedHomography);
  21986. for (let k = 0; k < POSE_ITERATIONS; k++) {
  21987. // incrementally improve the partial pose
  21988. const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
  21989. partialPose.setToSync(rt.times(partialPose));
  21990. residual.setToSync(residual.times(rt.inverse()));
  21991. //console.log("residual",residual.toString());
  21992. }
  21993. //console.log('-----------');
  21994. /*
  21995. // test
  21996. const result = Speedy.Matrix.Zeros(3);
  21997. result.setToSync(partialPose.times(normalizedHomography.inverse()));
  21998. const m11 = result.at(0,0);
  21999. result.setToSync(result.times(1/m11));
  22000. console.log("Pose * NORMALIZED HOM^-1", result.toString());
  22001. */
  22002. /*
  22003. const rt = partialPose.read();
  22004. const r = rt.slice(0, 6);
  22005. const t = this._refineTranslation(normalizedHomography, r, rt.slice(6, 9));
  22006. const refinedPartialPose = Speedy.Matrix(3, 3, r.concat(t));
  22007. const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
  22008. */
  22009. // filter the partial pose
  22010. const filteredPartialPose = this._filterPartialPose(partialPose);
  22011. // estimate the full pose
  22012. return this._estimateFullPose(filteredPartialPose);
  22013. }
  22014. /**
  22015. * Store an estimated pose
  22016. * @param pose 3x4 matrix
  22017. */
  22018. _storePose(pose) {
  22019. this._extrinsics = pose.read();
  22020. }
  22021. }
  22022. ;// CONCATENATED MODULE: ./src/geometry/pose.ts
  22023. /*
  22024. * MARTINS.js Free Edition
  22025. * GPU-accelerated Augmented Reality for the web
  22026. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22027. * https://github.com/alemart/martins-js
  22028. *
  22029. * This program is free software: you can redistribute it and/or modify
  22030. * it under the terms of the GNU Affero General Public License version 3
  22031. * as published by the Free Software Foundation.
  22032. *
  22033. * This program is distributed in the hope that it will be useful,
  22034. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22035. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22036. * GNU Affero General Public License for more details.
  22037. *
  22038. * You should have received a copy of the GNU Affero General Public License
  22039. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22040. *
  22041. * pose.ts
  22042. * A pose represents a position and an orientation in a 3D space
  22043. */
  22044. /**
  22045. * A pose represents a position and an orientation in a 3D space
  22046. * (and sometimes a scale, too...)
  22047. */
  22048. class Pose {
  22049. /**
  22050. * Constructor
  22051. * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
  22052. */
  22053. constructor(transform) {
  22054. this._transform = transform;
  22055. }
  22056. /**
  22057. * A transform describing the position and the orientation
  22058. * of the pose relative to the 3D space to which it belongs
  22059. */
  22060. get transform() {
  22061. return this._transform;
  22062. }
  22063. }
  22064. ;// CONCATENATED MODULE: ./src/geometry/transform.ts
  22065. /*
  22066. * MARTINS.js Free Edition
  22067. * GPU-accelerated Augmented Reality for the web
  22068. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22069. * https://github.com/alemart/martins-js
  22070. *
  22071. * This program is free software: you can redistribute it and/or modify
  22072. * it under the terms of the GNU Affero General Public License version 3
  22073. * as published by the Free Software Foundation.
  22074. *
  22075. * This program is distributed in the hope that it will be useful,
  22076. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22077. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22078. * GNU Affero General Public License for more details.
  22079. *
  22080. * You should have received a copy of the GNU Affero General Public License
  22081. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22082. *
  22083. * transform.ts
  22084. * 3D geometrical transforms
  22085. */
  22086. /**
  22087. * A 3D transformation
  22088. */
  22089. class BaseTransform {
  22090. /**
  22091. * Constructor
  22092. * @param matrix a 4x4 matrix
  22093. */
  22094. constructor(matrix) {
  22095. if (matrix.rows != 4 || matrix.columns != 4)
  22096. throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
  22097. this._matrix = matrix;
  22098. }
  22099. /**
  22100. * The 4x4 transformation matrix (read-only)
  22101. */
  22102. get matrix() {
  22103. return this._matrix;
  22104. }
  22105. }
  22106. /**
  22107. * An invertible 3D transformation
  22108. */
  22109. class InvertibleTransform extends BaseTransform {
  22110. /**
  22111. * Constructor
  22112. * @param matrix a 4x4 matrix
  22113. */
  22114. constructor(matrix) {
  22115. // WARNING: we do not check if the matrix actually encodes an invertible transform!
  22116. super(matrix);
  22117. }
  22118. /**
  22119. * The inverse of the transform
  22120. */
  22121. get inverse() {
  22122. const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
  22123. return new InvertibleTransform(inverseMatrix);
  22124. }
  22125. }
  22126. /**
  22127. * A 3D transformation described by translation, rotation and scale
  22128. */
  22129. class StandardTransform extends InvertibleTransform {
  22130. // TODO: position, rotation and scale attributes
  22131. /**
  22132. * Constructor
  22133. * @param matrix a 4x4 matrix
  22134. */
  22135. constructor(matrix) {
  22136. // WARNING: we do not check if the matrix actually encodes a standard transform!
  22137. super(matrix);
  22138. }
  22139. /**
  22140. * The inverse of the transform
  22141. */
  22142. get inverse() {
  22143. /*
  22144. The inverse of a 4x4 standard transform T * R * S...
  22145. [ RS t ] is [ ZR' -ZR't ]
  22146. [ 0' 1 ] [ 0' 1 ]
  22147. where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
  22148. */
  22149. return super.inverse;
  22150. }
  22151. }
  22152. /**
  22153. * A 3D transformation described by position and orientation
  22154. */
  22155. class RigidTransform extends StandardTransform {
  22156. // TODO: position and rotation attributes (need to decompose the matrix)
  22157. /**
  22158. * Constructor
  22159. * @param matrix a 4x4 matrix
  22160. */
  22161. constructor(matrix) {
  22162. // WARNING: we do not check if the matrix actually encodes a rigid transform!
  22163. super(matrix);
  22164. }
  22165. /**
  22166. * The inverse of the transform
  22167. */
  22168. get inverse() {
  22169. /*
  22170. The inverse of a 4x4 rigid transform
  22171. [ R t ] is [ R' -R't ]
  22172. [ 0' 1 ] [ 0' 1 ]
  22173. where R is 3x3, t is 3x1 and 0' is 1x3
  22174. */
  22175. const m = this._matrix.read();
  22176. if (m[15] == 0) // error? abs()??
  22177. throw new IllegalOperationError('Not a rigid transform');
  22178. const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
  22179. const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
  22180. const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
  22181. const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
  22182. const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
  22183. const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
  22184. const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
  22185. const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
  22186. const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
  22187. r11, r12, r13, 0,
  22188. r21, r22, r23, 0,
  22189. r31, r32, r33, 0,
  22190. -rt1, -rt2, -rt3, 1
  22191. ]);
  22192. return new RigidTransform(inverseMatrix);
  22193. }
  22194. }
  22195. ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
  22196. /*
  22197. * MARTINS.js Free Edition
  22198. * GPU-accelerated Augmented Reality for the web
  22199. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22200. * https://github.com/alemart/martins-js
  22201. *
  22202. * This program is free software: you can redistribute it and/or modify
  22203. * it under the terms of the GNU Affero General Public License version 3
  22204. * as published by the Free Software Foundation.
  22205. *
  22206. * This program is distributed in the hope that it will be useful,
  22207. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22208. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22209. * GNU Affero General Public License for more details.
  22210. *
  22211. * You should have received a copy of the GNU Affero General Public License
  22212. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22213. *
  22214. * viewer-pose.ts
  22215. * The pose of a virtual camera in 3D world space at a moment in time
  22216. */
  22217. /**
  22218. * The pose of a virtual camera in 3D world space at a moment in time
  22219. */
  22220. class ViewerPose extends Pose {
  22221. /**
  22222. * Constructor
  22223. * @param camera camera model
  22224. */
  22225. constructor(camera) {
  22226. // compute the view matrix and its inverse in AR screen space
  22227. const viewMatrix = ViewerPose._computeViewMatrix(camera);
  22228. const inverseTransform = new RigidTransform(viewMatrix);
  22229. super(inverseTransform.inverse);
  22230. this._viewMatrix = viewMatrix;
  22231. }
  22232. /**
  22233. * This 4x4 matrix moves 3D points from world space to viewer space. We
  22234. * assume that the camera is looking in the direction of the negative
  22235. * z-axis (WebGL-friendly)
  22236. */
  22237. get viewMatrix() {
  22238. return this._viewMatrix;
  22239. }
  22240. /**
  22241. * Compute the view matrix in AR screen space, measured in pixels
  22242. * @param camera
  22243. * @returns a 4x4 matrix describing a rotation and a translation
  22244. */
  22245. static _computeViewMatrix(camera) {
  22246. /*
  22247. // this is the view matrix in AR screen space, measured in pixels
  22248. // we augment the extrinsics matrix, making it 4x4 by adding a
  22249. // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
  22250. const V = Speedy.Matrix(4, 4, [
  22251. E[0], E[1], E[2], 0,
  22252. E[3], E[4], E[5], 0,
  22253. E[6], E[7], E[8], 0,
  22254. E[9], E[10], E[11], 1
  22255. ]);
  22256. // we premultiply V by F, which performs a rotation around the
  22257. // x-axis by 180 degrees, so that we get the 3D objects in front
  22258. // of the camera pointing in the direction of the negative z-axis
  22259. const F = Speedy.Matrix(4, 4, [
  22260. 1, 0, 0, 0,
  22261. 0,-1, 0, 0,
  22262. 0, 0,-1, 0,
  22263. 0, 0, 0, 1
  22264. ]);
  22265. Matrix F * V is matrix V with the second and third rows negated
  22266. */
  22267. const E = camera.extrinsics;
  22268. return speedy_vision_default().Matrix(4, 4, [
  22269. E[0], -E[1], -E[2], 0,
  22270. E[3], -E[4], -E[5], 0,
  22271. E[6], -E[7], -E[8], 0,
  22272. E[9], -E[10], -E[11], 1
  22273. ]);
  22274. }
  22275. }
  22276. ;// CONCATENATED MODULE: ./src/geometry/view.ts
  22277. /*
  22278. * MARTINS.js Free Edition
  22279. * GPU-accelerated Augmented Reality for the web
  22280. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22281. * https://github.com/alemart/martins-js
  22282. *
  22283. * This program is free software: you can redistribute it and/or modify
  22284. * it under the terms of the GNU Affero General Public License version 3
  22285. * as published by the Free Software Foundation.
  22286. *
  22287. * This program is distributed in the hope that it will be useful,
  22288. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22289. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22290. * GNU Affero General Public License for more details.
  22291. *
  22292. * You should have received a copy of the GNU Affero General Public License
  22293. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22294. *
  22295. * view.ts
  22296. * A view of the 3D world at a moment in time,
  22297. * featuring the means to project points into clip space
  22298. */
  22299. /** Default distance in pixels of the near plane to the optical center of the camera */
  22300. const DEFAULT_NEAR = 1;
  22301. /** Default distance in pixels of the far plane to the optical center of the camera */
  22302. const DEFAULT_FAR = 20000;
  22303. /**
  22304. * A PerspectiveView is a View defining a symmetric frustum around the z-axis
  22305. * (perspective projection)
  22306. */
  22307. class PerspectiveView {
  22308. /**
  22309. * Constructor
  22310. * @param camera camera model
  22311. * @param near distance of the near plane
  22312. * @param far distance of the far plane
  22313. */
  22314. constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
  22315. const intrinsics = camera.intrinsics;
  22316. const screenSize = camera.screenSize;
  22317. this._near = Math.max(0, +near);
  22318. this._far = Math.max(0, +far);
  22319. if (this._near >= this._far)
  22320. throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
  22321. this._aspect = screenSize.width / screenSize.height;
  22322. this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
  22323. this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
  22324. }
  22325. /**
  22326. * A 4x4 projection matrix for WebGL
  22327. */
  22328. get projectionMatrix() {
  22329. return this._projectionMatrix;
  22330. }
  22331. /**
  22332. * Aspect ratio of the frustum
  22333. */
  22334. get aspect() {
  22335. return this._aspect;
  22336. }
  22337. /**
  22338. * Vertical field-of-view of the frustum, measured in radians
  22339. */
  22340. get fovy() {
  22341. return 2 * Math.atan(this._tanOfHalfFovy);
  22342. }
  22343. /**
  22344. * Distance of the near plane
  22345. */
  22346. get near() {
  22347. return this._near;
  22348. }
  22349. /**
  22350. * Distance of the far plane
  22351. */
  22352. get far() {
  22353. return this._far;
  22354. }
  22355. /**
  22356. * Compute a perspective projection matrix for WebGL
  22357. * @param K camera intrinsics
  22358. * @param near distance of the near plane
  22359. * @param far distance of the far plane
  22360. */
  22361. static _computeProjectionMatrix(K, near, far) {
  22362. // we assume that the principal point is at the center of the image
  22363. const top = near * (K[V0] / K[FY]);
  22364. const right = near * (K[U0] / K[FX]);
  22365. const bottom = -top, left = -right; // symmetric frustum
  22366. // a derivation of this projection matrix can be found at
  22367. // https://www.songho.ca/opengl/gl_projectionmatrix.html
  22368. // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
  22369. return speedy_vision_default().Matrix(4, 4, [
  22370. 2 * near / (right - left), 0, 0, 0,
  22371. 0, 2 * near / (top - bottom), 0, 0,
  22372. (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
  22373. 0, 0, -2 * far * near / (far - near), 0
  22374. ]);
  22375. }
  22376. }
  22377. ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
  22378. /*
  22379. * MARTINS.js Free Edition
  22380. * GPU-accelerated Augmented Reality for the web
  22381. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22382. * https://github.com/alemart/martins-js
  22383. *
  22384. * This program is free software: you can redistribute it and/or modify
  22385. * it under the terms of the GNU Affero General Public License version 3
  22386. * as published by the Free Software Foundation.
  22387. *
  22388. * This program is distributed in the hope that it will be useful,
  22389. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22390. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22391. * GNU Affero General Public License for more details.
  22392. *
  22393. * You should have received a copy of the GNU Affero General Public License
  22394. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22395. *
  22396. * view.ts
  22397. * A viewer represents a virtual camera in 3D world space
  22398. */
  22399. /**
  22400. * A viewer represents a virtual camera in 3D world space
  22401. */
  22402. class Viewer {
  22403. /**
  22404. * Constructor
  22405. * @param camera camera model
  22406. */
  22407. constructor(camera) {
  22408. this._pose = new ViewerPose(camera);
  22409. this._views = [new PerspectiveView(camera)];
  22410. }
  22411. /**
  22412. * The pose of this viewer
  22413. */
  22414. get pose() {
  22415. return this._pose;
  22416. }
  22417. /**
  22418. * The view of this viewer (only for monoscopic rendering)
  22419. */
  22420. get view() {
  22421. /*
  22422. if(this._views.length > 1)
  22423. throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
  22424. */
  22425. return this._views[0];
  22426. }
  22427. /**
  22428. * The views of this viewer
  22429. */
  22430. /*
  22431. get views(): View[]
  22432. {
  22433. return this._views.concat([]);
  22434. }
  22435. */
  22436. /**
  22437. * Convert a pose from world space to viewer space
  22438. * @param pose a pose in world space
  22439. * @returns a pose in viewer space
  22440. */
  22441. convertToViewerSpace(pose) {
  22442. const modelMatrix = pose.transform.matrix;
  22443. const viewMatrix = this._pose.viewMatrix;
  22444. const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
  22445. const transform = new StandardTransform(modelViewMatrix);
  22446. return new Pose(transform);
  22447. }
  22448. }
  22449. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
  22450. /*
  22451. * MARTINS.js Free Edition
  22452. * GPU-accelerated Augmented Reality for the web
  22453. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  22454. * https://github.com/alemart/martins-js
  22455. *
  22456. * This program is free software: you can redistribute it and/or modify
  22457. * it under the terms of the GNU Affero General Public License version 3
  22458. * as published by the Free Software Foundation.
  22459. *
  22460. * This program is distributed in the hope that it will be useful,
  22461. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22462. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22463. * GNU Affero General Public License for more details.
  22464. *
  22465. * You should have received a copy of the GNU Affero General Public License
  22466. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22467. *
  22468. * tracking.ts
  22469. * Tracking state of the Image Tracker
  22470. */
  22471. /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
  22472. const USE_TURBO = true;
  22473. /** Number of PBOs; meaningful only when using turbo */
  22474. const NUMBER_OF_PBOS = 2;
  22475. /** Frame skipping; meaningful only when using turbo */
  22476. const TURBO_SKIP = 2;
  22477. /**
  22478. * The tracking state of the Image Tracker tracks
  22479. * keypoints of the image target and updates the
  22480. * rectification matrix
  22481. */
  22482. class ImageTrackerTrackingState extends ImageTrackerState {
  22483. /**
  22484. * Constructor
  22485. * @param imageTracker
  22486. */
  22487. constructor(imageTracker) {
  22488. super('tracking', imageTracker);
  22489. this._referenceImage = null;
  22490. this._warpHomography = speedy_vision_default().Matrix.Eye(3);
  22491. this._poseHomography = speedy_vision_default().Matrix.Eye(3);
  22492. this._initialHomography = speedy_vision_default().Matrix.Eye(3);
  22493. this._initialKeypoints = [];
  22494. this._counter = 0;
  22495. this._camera = new CameraModel();
  22496. this._predictedKeypoints = [];
  22497. this._lastPipelineOutput = { keypoints: [] };
  22498. this._pipelineCounter = 0;
  22499. this._lastOutput = {};
  22500. this._lostCounter = 0;
  22501. // we need at least 4 correspondences of points to compute a homography matrix
  22502. Utils.assert(TRACK_MIN_MATCHES >= 4);
  22503. }
  22504. /**
  22505. * Called as soon as this becomes the active state, just before update() runs for the first time
  22506. * @param settings
  22507. */
  22508. onEnterState(settings) {
  22509. const homography = settings.homography;
  22510. const referenceImage = settings.referenceImage;
  22511. const templateKeypoints = settings.templateKeypoints;
  22512. const keypointPortalSink = settings.keypointPortalSink;
  22513. const screenSize = settings.screenSize; // this.screenSize is not yet set
  22514. const keypointPortalSource = this._pipeline.node('keypointPortalSource');
  22515. // this shouldn't happen
  22516. if (!referenceImage)
  22517. throw new IllegalOperationError(`Can't track a null reference image`);
  22518. // set attributes
  22519. this._referenceImage = referenceImage;
  22520. this._warpHomography = speedy_vision_default().Matrix(homography);
  22521. this._poseHomography = speedy_vision_default().Matrix(homography);
  22522. this._initialHomography = speedy_vision_default().Matrix(homography);
  22523. this._initialKeypoints = templateKeypoints;
  22524. this._counter = 0;
  22525. this._predictedKeypoints = [];
  22526. this._lastPipelineOutput = { keypoints: [] };
  22527. this._pipelineCounter = 0;
  22528. this._lastOutput = {};
  22529. this._lostCounter = 0;
  22530. // setup portals
  22531. keypointPortalSource.source = keypointPortalSink;
  22532. // setup camera
  22533. this._camera.init(screenSize);
  22534. // emit event
  22535. const ev = new ImageTrackerEvent('targetfound', referenceImage);
  22536. this._imageTracker.dispatchEvent(ev);
  22537. // log
  22538. Utils.log(`Tracking image "${referenceImage.name}"...`);
  22539. }
  22540. /**
  22541. * Called when leaving the state
  22542. */
  22543. onLeaveState() {
  22544. const referenceImage = this._referenceImage;
  22545. // release the camera
  22546. this._camera.release();
  22547. // emit event
  22548. const ev = new ImageTrackerEvent('targetlost', referenceImage);
  22549. this._imageTracker.dispatchEvent(ev);
  22550. }
  22551. /**
  22552. * Called just before the GPU processing
  22553. * @returns promise
  22554. */
  22555. _beforeUpdate() {
  22556. const imageRectifier = this._pipeline.node('imageRectifier');
  22557. const borderClipper = this._pipeline.node('borderClipper');
  22558. const keypointRectifier = this._pipeline.node('keypointRectifier');
  22559. const screenSize = this.screenSize;
  22560. /*
  22561. // pause media (test)
  22562. const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
  22563. const media = source.media as SpeedyMedia;
  22564. (media.source as HTMLVideoElement).pause();
  22565. */
  22566. // clip keypoints from the borders of the target image
  22567. borderClipper.imageSize = screenSize;
  22568. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  22569. // rectify the image
  22570. return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
  22571. imageRectifier.transform = warp;
  22572. });
  22573. }
  22574. /**
  22575. * GPU processing
  22576. * @returns promise with the pipeline results
  22577. */
  22578. _gpuUpdate() {
  22579. //return super._gpuUpdate();
  22580. // No turbo?
  22581. if (!USE_TURBO || Settings.powerPreference == 'low-power')
  22582. return super._gpuUpdate();
  22583. // When using turbo, we reduce the GPU usage by skipping every other frame
  22584. const counter = this._pipelineCounter;
  22585. this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
  22586. // Skip frame
  22587. if (counter != 0) {
  22588. if (this._lastPipelineOutput.keypoints !== undefined) {
  22589. this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
  22590. }
  22591. else
  22592. this._predictedKeypoints.length = 0;
  22593. this._lastPipelineOutput.keypoints = this._predictedKeypoints;
  22594. return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
  22595. }
  22596. // Run the pipeline and store the results
  22597. return super._gpuUpdate().then(results => {
  22598. this._lastPipelineOutput = results;
  22599. return results;
  22600. });
  22601. }
  22602. /**
  22603. * Post processing that takes place just after the GPU processing
  22604. * @param result pipeline results
  22605. * @returns state output
  22606. */
  22607. _afterUpdate(result) {
  22608. const imageRectifier = this._pipeline.node('imageRectifier');
  22609. const keypoints = result.keypoints;
  22610. const image = result.image;
  22611. const referenceImage = this._referenceImage;
  22612. // find the best keypoint matches
  22613. return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
  22614. // find motion models
  22615. return speedy_vision_default().Promise.all([
  22616. this._findAffineMotion(matches),
  22617. this._findPerspectiveMotion(matches)
  22618. ]);
  22619. }).then(([affineMotion, perspectiveMotion]) => {
  22620. const lowPower = (Settings.powerPreference == 'low-power');
  22621. const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
  22622. // update warp homography
  22623. const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
  22624. const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
  22625. if (!USE_TURBO || this._counter % delay == remainder)
  22626. this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
  22627. // update pose homography
  22628. if (!frozen)
  22629. this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
  22630. // update counter
  22631. this._counter = (this._counter + 1) % delay;
  22632. // update the camera
  22633. if (!frozen)
  22634. return this._camera.update(this._poseHomography, this.screenSize);
  22635. else
  22636. return this._camera.matrix;
  22637. }).then(_ => {
  22638. // find the inverse of the rectification matrix
  22639. const rectificationMatrix = imageRectifier.transform;
  22640. const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
  22641. // move keypoints from rectified space back to image space
  22642. const n = keypoints.length;
  22643. const coords = new Array(2 * n);
  22644. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22645. coords[j] = keypoints[i].position.x;
  22646. coords[j + 1] = keypoints[i].position.y;
  22647. }
  22648. return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
  22649. /*
  22650. // test image center
  22651. const coords2: number[] = new Array(2 * n);
  22652. for(let i = 0, j = 0; i < n; i++, j += 2) {
  22653. coords2[j] = this._imageTracker.screenSize.width / 2;
  22654. coords2[j+1] = this._imageTracker.screenSize.height / 2;
  22655. if(i % 2 == 0) {
  22656. coords2[j] = this._imageTracker.screenSize.width / 4;
  22657. coords2[j+1] = this._imageTracker.screenSize.height / 4;
  22658. }
  22659. }
  22660. return Speedy.Matrix.applyPerspectiveTransform(
  22661. Speedy.Matrix.Zeros(2, n),
  22662. Speedy.Matrix(2, n, coords2),
  22663. this._poseHomography
  22664. //this._warpHomography
  22665. );
  22666. */
  22667. }).then(mat => {
  22668. /*
  22669. const n = keypoints.length;
  22670. const coords = mat.read();
  22671. // ** this will interfere with the calculations when frame skipping is on **
  22672. // get keypoints in image space
  22673. for(let i = 0, j = 0; i < n; i++, j += 2) {
  22674. keypoints[i].position.x = coords[j];
  22675. keypoints[i].position.y = coords[j+1];
  22676. }
  22677. */
  22678. // find a polyline surrounding the target
  22679. return this._findPolyline(this._poseHomography, this.screenSize);
  22680. //return this._findPolyline(this._warpHomography, this.screenSize);
  22681. }).then(polyline => {
  22682. // we let the target object be at the origin of the world space
  22683. // (identity transform). We also perform a change of coordinates,
  22684. // so that we move out from pixel space and into normalized space
  22685. const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
  22686. const transform = new StandardTransform(modelMatrix);
  22687. const pose = new Pose(transform);
  22688. // given the current state of the camera model, we get a viewer
  22689. // compatible with the pose of the target
  22690. const viewer = new Viewer(this._camera);
  22691. // the trackable object
  22692. const trackable = {
  22693. pose: pose,
  22694. referenceImage: referenceImage
  22695. };
  22696. // the result generated by the image tracker
  22697. const result = {
  22698. tracker: this._imageTracker,
  22699. trackables: [trackable],
  22700. viewer: viewer
  22701. };
  22702. // build and save the output
  22703. this._lastOutput = {
  22704. exports: result,
  22705. cameraMatrix: this._camera.matrix,
  22706. homography: this._warpHomography,
  22707. //keypoints: keypoints,
  22708. screenSize: this.screenSize,
  22709. image: image,
  22710. polyline: polyline,
  22711. };
  22712. // we have successfully tracked the target in this frame
  22713. this._lostCounter = 0;
  22714. // done!
  22715. return {
  22716. nextState: 'tracking',
  22717. trackerOutput: this._lastOutput
  22718. };
  22719. }).catch(err => {
  22720. // give some tolerance to tracking errors
  22721. if (err instanceof TrackingError) {
  22722. if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
  22723. //console.log("ABSORB",this._lostCounter,err.toString())
  22724. // absorb the error
  22725. return {
  22726. nextState: 'tracking',
  22727. trackerOutput: this._lastOutput
  22728. };
  22729. }
  22730. }
  22731. // lost tracking
  22732. Utils.warning(`The target has been lost! ${err.toString()}`);
  22733. this._camera.reset();
  22734. // go back to the scanning state
  22735. return {
  22736. nextState: 'scanning',
  22737. trackerOutput: {
  22738. image: image,
  22739. screenSize: this.screenSize,
  22740. },
  22741. };
  22742. });
  22743. }
  22744. /**
  22745. * Find quality matches between two sets of keypoints
  22746. * @param currKeypoints keypoints of the current frame
  22747. * @param prevKeypoints keypoints of the previous frame
  22748. * @returns quality matches
  22749. */
  22750. _findQualityMatches(currKeypoints, prevKeypoints) {
  22751. const result = [[], []];
  22752. const n = currKeypoints.length;
  22753. for (let i = 0; i < n; i++) {
  22754. const currKeypoint = currKeypoints[i];
  22755. if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
  22756. const d1 = currKeypoint.matches[0].distance;
  22757. const d2 = currKeypoint.matches[1].distance;
  22758. if (d1 <= TRACK_MATCH_RATIO * d2) {
  22759. const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
  22760. result[0].push(currKeypoint);
  22761. result[1].push(prevKeypoint);
  22762. }
  22763. }
  22764. }
  22765. return result;
  22766. }
  22767. /**
  22768. * Find a better spatial distribution of the input matches
  22769. * @param matches quality matches
  22770. * @returns refined quality matches
  22771. */
  22772. _refineQualityMatches(matches) {
  22773. const currKeypoints = matches[0];
  22774. const prevKeypoints = matches[1];
  22775. // find a better spatial distribution of the keypoints
  22776. const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
  22777. const n = indices.length; // number of refined matches
  22778. // assemble output
  22779. const result = [new Array(n), new Array(n)];
  22780. for (let i = 0; i < n; i++) {
  22781. result[0][i] = currKeypoints[indices[i]];
  22782. result[1][i] = prevKeypoints[indices[i]];
  22783. }
  22784. // done!
  22785. return result;
  22786. }
  22787. /**
  22788. * Spatially distribute keypoints over a grid
  22789. * @param keypoints keypoints to be distributed
  22790. * @param gridCells number of grid elements in each axis
  22791. * @returns a list of indices of keypoints[]
  22792. */
  22793. _distributeKeypoints(keypoints, gridCells) {
  22794. // get the coordinates of the keypoints
  22795. const n = keypoints.length;
  22796. const points = new Array(2 * n);
  22797. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22798. points[j] = keypoints[i].x;
  22799. points[j + 1] = keypoints[i].y;
  22800. }
  22801. // normalize the coordinates to [0,1] x [0,1]
  22802. this._normalizePoints(points);
  22803. // distribute the keypoints over a grid
  22804. const numberOfCells = gridCells * gridCells;
  22805. const grid = (new Array(numberOfCells)).fill(-1);
  22806. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22807. // find the grid location of the i-th point
  22808. const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
  22809. const yg = Math.floor(points[j + 1] * gridCells);
  22810. // store the index of the i-th point in the grid
  22811. grid[yg * gridCells + xg] = i;
  22812. }
  22813. // retrieve points of the grid
  22814. const indices = [];
  22815. for (let g = 0; g < numberOfCells; g++) {
  22816. if (grid[g] >= 0) {
  22817. const i = grid[g];
  22818. indices.push(i);
  22819. }
  22820. }
  22821. // done!
  22822. return indices;
  22823. }
  22824. /**
  22825. * Normalize points to [0,1)^2
  22826. * @param points 2 x n matrix of points in column-major format
  22827. * @returns points
  22828. */
  22829. _normalizePoints(points) {
  22830. Utils.assert(points.length % 2 == 0);
  22831. const n = points.length / 2;
  22832. if (n == 0)
  22833. return points;
  22834. let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
  22835. let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
  22836. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22837. const x = points[j], y = points[j + 1];
  22838. xmin = x < xmin ? x : xmin;
  22839. ymin = y < ymin ? y : ymin;
  22840. xmax = x > xmax ? x : xmax;
  22841. ymax = y > ymax ? y : ymax;
  22842. }
  22843. const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
  22844. const ylen = ymax - ymin + 1;
  22845. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22846. points[j] = (points[j] - xmin) / xlen;
  22847. points[j + 1] = (points[j + 1] - ymin) / ylen;
  22848. }
  22849. return points;
  22850. }
  22851. /**
  22852. * Find a matrix with the coordinates of quality matches
  22853. * @param matches n quality matches
  22854. * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
  22855. */
  22856. _findMatrixOfMatches(matches) {
  22857. const n = matches[0].length;
  22858. Utils.assert(n > 0);
  22859. // sets of keypoints
  22860. const currKeypoints = matches[0];
  22861. const prevKeypoints = matches[1];
  22862. // get the coordinates of the keypoints of the set of refined matches
  22863. const src = new Array(2 * n);
  22864. const dst = new Array(2 * n);
  22865. for (let i = 0, j = 0; i < n; i++, j += 2) {
  22866. src[j] = prevKeypoints[i].x;
  22867. src[j + 1] = prevKeypoints[i].y;
  22868. dst[j] = currKeypoints[i].x;
  22869. dst[j + 1] = currKeypoints[i].y;
  22870. }
  22871. // assemble the matrix
  22872. return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
  22873. }
  22874. /**
  22875. * Preprocess keypoint matches
  22876. * @param currKeypoints keypoints of the current frame
  22877. * @param prevKeypoints keypoints of the previous frame
  22878. * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
  22879. * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
  22880. */
  22881. _preprocessMatches(currKeypoints, prevKeypoints) {
  22882. // find and refine quality matches
  22883. const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
  22884. const refinedMatches = this._refineQualityMatches(qualityMatches);
  22885. // not enough matches?
  22886. const n = refinedMatches[0].length;
  22887. if (n < TRACK_MIN_MATCHES)
  22888. return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
  22889. // find matrix of matches
  22890. const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
  22891. // warp matrix of matches
  22892. const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
  22893. return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
  22894. }
  22895. /**
  22896. * Find an affine motion model of the target image
  22897. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  22898. * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
  22899. */
  22900. _findAffineMotion(preprocessedMatches) {
  22901. const model = speedy_vision_default().Matrix.Eye(3);
  22902. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  22903. // find motion model
  22904. return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  22905. method: 'pransac',
  22906. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  22907. numberOfHypotheses: 512,
  22908. bundleSize: 128,
  22909. }).then(_ => {
  22910. // validate the model
  22911. const a00 = model.at(0, 0);
  22912. if (Number.isNaN(a00))
  22913. throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
  22914. // done!
  22915. return model;
  22916. });
  22917. }
  22918. /**
  22919. * Find a perspective motion model of the target image
  22920. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  22921. * @returns a promise that resolves to a 3x3 perspective motion model
  22922. */
  22923. _findPerspectiveMotion(preprocessedMatches) {
  22924. /*
  22925. We can probably get more accurate motion estimates if we
  22926. work in 3D rather than in 2D. We're currently estimating
  22927. an affine transform in image space. What if we projected
  22928. the keypoints into world space, estimated the camera motion
  22929. (rotation and translation) that best describes the observed
  22930. observed motion of the keypoints, and then projected things
  22931. back to image space? Need to figure this out; we'll get a
  22932. homography matrix.
  22933. Note: keypoints are in rectified image space.
  22934. Note: work with a 6 DoF perspective transform instead of 8.
  22935. */
  22936. const model = speedy_vision_default().Matrix.Zeros(3);
  22937. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  22938. // find motion model
  22939. return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  22940. method: 'pransac',
  22941. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  22942. numberOfHypotheses: 512 * 2,
  22943. bundleSize: 128 * 4, //*4
  22944. }).then(_ => {
  22945. // validate the model
  22946. const a00 = model.at(0, 0);
  22947. if (Number.isNaN(a00))
  22948. throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
  22949. // done!
  22950. return model;
  22951. });
  22952. }
  22953. /**
  22954. * Find a rectification matrix to be applied to the target image
  22955. * @param homography maps a reference image to the AR screen
  22956. * @param media target
  22957. * @param screenSize AR screen
  22958. * @returns promise that resolves to a rectification matrix
  22959. */
  22960. _findImageWarp(homography, screenSize) {
  22961. const referenceImage = this._referenceImage;
  22962. const media = this._imageTracker.database._findMedia(referenceImage.name);
  22963. const mat = speedy_vision_default().Matrix.Zeros(3);
  22964. return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
  22965. }
  22966. /**
  22967. * Find a warp to be applied to the keypoints
  22968. * @returns affine transform
  22969. */
  22970. _findKeypointWarp() {
  22971. const referenceImage = this._referenceImage;
  22972. const media = this._imageTracker.database._findMedia(referenceImage.name);
  22973. const screenSize = this.screenSize;
  22974. const sw = screenSize.width, sh = screenSize.height;
  22975. const mat = speedy_vision_default().Matrix.Eye(3, 3);
  22976. // no rotation is needed
  22977. if (!this._mustRotateWarpedImage(media, screenSize))
  22978. return speedy_vision_default().Promise.resolve(mat);
  22979. // rotate by 90 degrees clockwise and scale
  22980. return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
  22981. }
  22982. /**
  22983. * Predict the keypoints without actually looking at the image
  22984. * @param curr keypoints at time t (will modify the contents)
  22985. * @param initial keypoints at time t-1 (not just t = 0)
  22986. * @returns keypoints at time t+1
  22987. */
  22988. _predictKeypoints(curr, initial) {
  22989. // the target image is likely to be moving roughly in
  22990. // the same manner as it was in the previous frame
  22991. const next = [];
  22992. const n = curr.length;
  22993. for (let i = 0; i < n; i++) {
  22994. const cur = curr[i];
  22995. if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
  22996. continue;
  22997. /*
  22998. else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
  22999. continue;
  23000. */
  23001. const ini = initial[cur.matches[0].index];
  23002. const dx = cur.position.x - ini.position.x;
  23003. const dy = cur.position.y - ini.position.y;
  23004. // a better mathematical model is needed
  23005. const alpha = 0.8; //0.2;
  23006. cur.position.x = ini.position.x + alpha * dx;
  23007. cur.position.y = ini.position.y + alpha * dy;
  23008. next.push(cur);
  23009. }
  23010. // done!
  23011. return next;
  23012. }
  23013. /**
  23014. * Create & setup the pipeline
  23015. * @returns pipeline
  23016. */
  23017. _createPipeline() {
  23018. const pipeline = speedy_vision_default().Pipeline();
  23019. const source = speedy_vision_default().Image.Source('source');
  23020. const screen = speedy_vision_default().Transform.Resize('screen');
  23021. const greyscale = speedy_vision_default().Filter.Greyscale();
  23022. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  23023. const nightvision = speedy_vision_default().Filter.Nightvision();
  23024. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  23025. const blur = speedy_vision_default().Filter.GaussianBlur();
  23026. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  23027. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  23028. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  23029. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  23030. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  23031. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  23032. const clipper = speedy_vision_default().Keypoint.Clipper();
  23033. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  23034. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  23035. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  23036. const imageSink = speedy_vision_default().Image.Sink('image');
  23037. source.media = null;
  23038. screen.size = speedy_vision_default().Size(0, 0);
  23039. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  23040. nightvision.gain = NIGHTVISION_GAIN;
  23041. nightvision.offset = NIGHTVISION_OFFSET;
  23042. nightvision.decay = NIGHTVISION_DECAY;
  23043. nightvision.quality = NIGHTVISION_QUALITY;
  23044. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  23045. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  23046. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  23047. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  23048. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  23049. detector.quality = TRACK_HARRIS_QUALITY;
  23050. detector.capacity = TRACK_DETECTOR_CAPACITY;
  23051. subpixel.method = SUBPIXEL_METHOD;
  23052. clipper.size = TRACK_MAX_KEYPOINTS;
  23053. borderClipper.imageSize = screen.size;
  23054. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  23055. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  23056. matcher.k = 2;
  23057. keypointPortalSource.source = null;
  23058. keypointSink.turbo = USE_TURBO;
  23059. // prepare input
  23060. source.output().connectTo(screen.input());
  23061. screen.output().connectTo(greyscale.input());
  23062. // preprocess images
  23063. greyscale.output().connectTo(imageRectifier.input());
  23064. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  23065. imageRectifier.output().connectTo(nightvision.input());
  23066. nightvision.output().connectTo(nightvisionMux.input('in1'));
  23067. // keypoint detection & clipping
  23068. nightvisionMux.output().connectTo(detector.input());
  23069. detector.output().connectTo(borderClipper.input());
  23070. borderClipper.output().connectTo(clipper.input());
  23071. // keypoint refinement
  23072. imageRectifier.output().connectTo(denoiser.input());
  23073. denoiser.output().connectTo(subpixel.input('image'));
  23074. clipper.output().connectTo(subpixel.input('keypoints'));
  23075. // keypoint description
  23076. imageRectifier.output().connectTo(blur.input());
  23077. blur.output().connectTo(descriptor.input('image'));
  23078. subpixel.output().connectTo(descriptor.input('keypoints'));
  23079. // keypoint matching
  23080. keypointPortalSource.output().connectTo(matcher.input('database'));
  23081. descriptor.output().connectTo(matcher.input('keypoints'));
  23082. // prepare output
  23083. descriptor.output().connectTo(keypointRectifier.input());
  23084. //preMatcher.output().connectTo(keypointRectifier.input());
  23085. keypointRectifier.output().connectTo(keypointSink.input());
  23086. matcher.output().connectTo(keypointSink.input('matches'));
  23087. //imageRectifier.output().connectTo(imageSink.input());
  23088. // done!
  23089. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
  23090. return pipeline;
  23091. }
  23092. }
  23093. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
  23094. /*
  23095. * MARTINS.js Free Edition
  23096. * GPU-accelerated Augmented Reality for the web
  23097. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23098. * https://github.com/alemart/martins-js
  23099. *
  23100. * This program is free software: you can redistribute it and/or modify
  23101. * it under the terms of the GNU Affero General Public License version 3
  23102. * as published by the Free Software Foundation.
  23103. *
  23104. * This program is distributed in the hope that it will be useful,
  23105. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23106. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23107. * GNU Affero General Public License for more details.
  23108. *
  23109. * You should have received a copy of the GNU Affero General Public License
  23110. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23111. *
  23112. * image-tracker.ts
  23113. * Image Tracker
  23114. */
  23115. /** A helper */
  23116. const formatSize = (size) => `${size.width}x${size.height}`;
  23117. /**
  23118. * The ImageTracker tracks an image (one at a time)
  23119. */
  23120. class ImageTracker extends AREventTarget {
  23121. /**
  23122. * Constructor
  23123. */
  23124. constructor() {
  23125. super();
  23126. // the states
  23127. this._state = {
  23128. 'initial': new ImageTrackerInitialState(this),
  23129. 'training': new ImageTrackerTrainingState(this),
  23130. 'scanning': new ImageTrackerScanningState(this),
  23131. 'pre-tracking': new ImageTrackerPreTrackingState(this),
  23132. 'tracking': new ImageTrackerTrackingState(this),
  23133. };
  23134. // initial setup
  23135. this._session = null;
  23136. this._activeStateName = 'initial';
  23137. this._lastOutput = {};
  23138. this._database = new ReferenceImageDatabase();
  23139. // user settings
  23140. this._resolution = DEFAULT_TRACKING_RESOLUTION;
  23141. }
  23142. /**
  23143. * The type of the tracker
  23144. */
  23145. get type() {
  23146. return 'image-tracker';
  23147. }
  23148. /**
  23149. * Current state name
  23150. */
  23151. get state() {
  23152. return this._activeStateName;
  23153. }
  23154. /**
  23155. * Reference Image Database
  23156. * Must be configured before training the tracker
  23157. */
  23158. get database() {
  23159. return this._database;
  23160. }
  23161. /**
  23162. * Resolution of the AR screen space
  23163. */
  23164. get resolution() {
  23165. return this._resolution;
  23166. }
  23167. /**
  23168. * Resolution of the AR screen space
  23169. */
  23170. set resolution(resolution) {
  23171. this._resolution = resolution;
  23172. }
  23173. /**
  23174. * Size of the AR screen space, in pixels
  23175. * @internal
  23176. */
  23177. get screenSize() {
  23178. return this._state[this._activeStateName].screenSize;
  23179. }
  23180. /**
  23181. * Last emitted output
  23182. * @internal
  23183. */
  23184. get _output() {
  23185. return this._lastOutput;
  23186. }
  23187. /**
  23188. * Stats related to this tracker
  23189. * @internal
  23190. */
  23191. get _stats() {
  23192. return `${formatSize(this.screenSize)} ${this.state}`;
  23193. }
  23194. /**
  23195. * Initialize this tracker
  23196. * @param session
  23197. * @returns promise that resolves after the tracker has been initialized
  23198. * @internal
  23199. */
  23200. _init(session) {
  23201. // store the session
  23202. this._session = session;
  23203. // initialize states
  23204. for (const state of Object.values(this._state))
  23205. state.init();
  23206. // done!
  23207. return speedy_vision_default().Promise.resolve();
  23208. }
  23209. /**
  23210. * Release this tracker
  23211. * @returns promise that resolves after the tracker has been released
  23212. * @internal
  23213. */
  23214. _release() {
  23215. // release states
  23216. for (const state of Object.values(this._state))
  23217. state.release();
  23218. // unlink session
  23219. this._session = null;
  23220. // done!
  23221. return speedy_vision_default().Promise.resolve();
  23222. }
  23223. /**
  23224. * Update the tracker
  23225. * @returns promise
  23226. * @internal
  23227. */
  23228. _update() {
  23229. // validate
  23230. if (this._session == null)
  23231. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
  23232. // compute the screen size for image processing purposes
  23233. // note: this may change over time...!
  23234. const media = this._session.media;
  23235. const aspectRatio = media.width / media.height;
  23236. const screenSize = Utils.resolution(this._resolution, aspectRatio);
  23237. // run the active state
  23238. const activeState = this._state[this._activeStateName];
  23239. return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
  23240. // update the output of the tracker
  23241. this._lastOutput = trackerOutput;
  23242. // need to change the state?
  23243. if (this._activeStateName != nextState) {
  23244. activeState.onLeaveState();
  23245. this._activeStateName = nextState;
  23246. this._state[nextState].onEnterState(nextStateSettings || {});
  23247. }
  23248. });
  23249. }
  23250. /**
  23251. * Get reference image
  23252. * @param keypointIndex -1 if not found
  23253. * @returns reference image
  23254. * @internal
  23255. */
  23256. _referenceImageOfKeypoint(keypointIndex) {
  23257. const training = this._state.training;
  23258. return training.referenceImageOfKeypoint(keypointIndex);
  23259. }
  23260. /**
  23261. * Get reference image index
  23262. * @param keypointIndex -1 if not found
  23263. * @returns reference image index, or -1 if not found
  23264. * @internal
  23265. */
  23266. _referenceImageIndexOfKeypoint(keypointIndex) {
  23267. const training = this._state.training;
  23268. return training.referenceImageIndexOfKeypoint(keypointIndex);
  23269. }
  23270. /**
  23271. * Get a keypoint of the trained set
  23272. * @param keypointIndex
  23273. * @returns a keypoint
  23274. * @internal
  23275. */
  23276. _referenceKeypoint(keypointIndex) {
  23277. const training = this._state.training;
  23278. return training.referenceKeypoint(keypointIndex);
  23279. }
  23280. }
  23281. ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
  23282. /*
  23283. * MARTINS.js Free Edition
  23284. * GPU-accelerated Augmented Reality for the web
  23285. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23286. * https://github.com/alemart/martins-js
  23287. *
  23288. * This program is free software: you can redistribute it and/or modify
  23289. * it under the terms of the GNU Affero General Public License version 3
  23290. * as published by the Free Software Foundation.
  23291. *
  23292. * This program is distributed in the hope that it will be useful,
  23293. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23294. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23295. * GNU Affero General Public License for more details.
  23296. *
  23297. * You should have received a copy of the GNU Affero General Public License
  23298. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23299. *
  23300. * tracker-factory.ts
  23301. * Tracker factory
  23302. */
  23303. /**
  23304. * Tracker factory
  23305. */
  23306. class TrackerFactory {
  23307. /**
  23308. * Create an Image Tracker
  23309. */
  23310. static ImageTracker() {
  23311. return new ImageTracker();
  23312. }
  23313. }
  23314. ;// CONCATENATED MODULE: ./src/sources/media-source.ts
  23315. /*
  23316. * MARTINS.js Free Edition
  23317. * GPU-accelerated Augmented Reality for the web
  23318. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23319. * https://github.com/alemart/martins-js
  23320. *
  23321. * This program is free software: you can redistribute it and/or modify
  23322. * it under the terms of the GNU Affero General Public License version 3
  23323. * as published by the Free Software Foundation.
  23324. *
  23325. * This program is distributed in the hope that it will be useful,
  23326. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23327. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23328. * GNU Affero General Public License for more details.
  23329. *
  23330. * You should have received a copy of the GNU Affero General Public License
  23331. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23332. *
  23333. * media-source.ts
  23334. * SpeedyMedia-based source of data
  23335. */
  23336. /**
  23337. * SpeedyMedia-based source of data
  23338. */
  23339. class MediaSource {
  23340. /**
  23341. * Constructor
  23342. */
  23343. constructor(source) {
  23344. this._media = null;
  23345. this._source = source;
  23346. }
  23347. /**
  23348. * A type-identifier of the source of data
  23349. * @internal
  23350. */
  23351. get _type() {
  23352. return 'video';
  23353. }
  23354. /**
  23355. * Get media
  23356. * @internal
  23357. */
  23358. get _data() {
  23359. if (this._media == null)
  23360. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  23361. return this._media;
  23362. }
  23363. /**
  23364. * Initialize this source of data
  23365. * @returns a promise that resolves as soon as this source of data is initialized
  23366. * @internal
  23367. */
  23368. _init() {
  23369. return speedy_vision_default().load(this._source).then(media => {
  23370. Utils.log(`Source of data is ${media.width}x${media.height}`);
  23371. this._media = media;
  23372. });
  23373. }
  23374. /**
  23375. * Release this source of data
  23376. * @returns a promise that resolves as soon as this source of data is released
  23377. * @internal
  23378. */
  23379. _release() {
  23380. if (this._media)
  23381. this._media.release();
  23382. this._media = null;
  23383. return speedy_vision_default().Promise.resolve();
  23384. }
  23385. /**
  23386. * A string featuring the size of the media, in pixels
  23387. */
  23388. get _size() {
  23389. const media = this._media;
  23390. if (media != null)
  23391. return `${media.width}x${media.height}`;
  23392. else
  23393. return '-';
  23394. }
  23395. }
  23396. ;// CONCATENATED MODULE: ./src/sources/video-source.ts
  23397. /*
  23398. * MARTINS.js Free Edition
  23399. * GPU-accelerated Augmented Reality for the web
  23400. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23401. * https://github.com/alemart/martins-js
  23402. *
  23403. * This program is free software: you can redistribute it and/or modify
  23404. * it under the terms of the GNU Affero General Public License version 3
  23405. * as published by the Free Software Foundation.
  23406. *
  23407. * This program is distributed in the hope that it will be useful,
  23408. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23409. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23410. * GNU Affero General Public License for more details.
  23411. *
  23412. * You should have received a copy of the GNU Affero General Public License
  23413. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23414. *
  23415. * video-source.ts
  23416. * <video>-based source of data
  23417. */
  23418. /**
  23419. * <video>-based source of data
  23420. */
  23421. class VideoSource extends MediaSource {
  23422. /**
  23423. * Constructor
  23424. */
  23425. constructor(video) {
  23426. Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
  23427. super(video);
  23428. }
  23429. /**
  23430. * Stats related to this source of data
  23431. * @internal
  23432. */
  23433. get _stats() {
  23434. return `${this._size} video`;
  23435. }
  23436. }
  23437. ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
  23438. /*
  23439. * MARTINS.js Free Edition
  23440. * GPU-accelerated Augmented Reality for the web
  23441. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23442. * https://github.com/alemart/martins-js
  23443. *
  23444. * This program is free software: you can redistribute it and/or modify
  23445. * it under the terms of the GNU Affero General Public License version 3
  23446. * as published by the Free Software Foundation.
  23447. *
  23448. * This program is distributed in the hope that it will be useful,
  23449. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23450. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23451. * GNU Affero General Public License for more details.
  23452. *
  23453. * You should have received a copy of the GNU Affero General Public License
  23454. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23455. *
  23456. * canvas-source.ts
  23457. * <canvas>-based source of data
  23458. */
  23459. /**
  23460. * <canvas>-based source of data
  23461. */
  23462. class CanvasSource extends MediaSource {
  23463. /**
  23464. * Constructor
  23465. */
  23466. constructor(canvas) {
  23467. Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
  23468. super(canvas);
  23469. }
  23470. /**
  23471. * Stats related to this source of data
  23472. * @internal
  23473. */
  23474. get _stats() {
  23475. return `${this._size} canvas`;
  23476. }
  23477. }
  23478. ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
  23479. /*
  23480. * MARTINS.js Free Edition
  23481. * GPU-accelerated Augmented Reality for the web
  23482. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23483. * https://github.com/alemart/martins-js
  23484. *
  23485. * This program is free software: you can redistribute it and/or modify
  23486. * it under the terms of the GNU Affero General Public License version 3
  23487. * as published by the Free Software Foundation.
  23488. *
  23489. * This program is distributed in the hope that it will be useful,
  23490. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23491. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23492. * GNU Affero General Public License for more details.
  23493. *
  23494. * You should have received a copy of the GNU Affero General Public License
  23495. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23496. *
  23497. * camera-source.ts
  23498. * Webcam-based source of data
  23499. */
  23500. /** Default options for camera sources */
  23501. const DEFAULT_CAMERA_OPTIONS = {
  23502. resolution: 'md',
  23503. aspectRatio: 16 / 9,
  23504. constraints: { facingMode: 'environment' },
  23505. };
  23506. /**
  23507. * Webcam-based source of data
  23508. */
  23509. class CameraSource extends VideoSource {
  23510. /**
  23511. * Constructor
  23512. */
  23513. constructor(options) {
  23514. const video = document.createElement('video');
  23515. super(video);
  23516. this._video = video;
  23517. this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
  23518. }
  23519. /**
  23520. * Camera resolution
  23521. */
  23522. get resolution() {
  23523. return this._options.resolution;
  23524. }
  23525. /**
  23526. * Stats related to this source of data
  23527. * @internal
  23528. */
  23529. get _stats() {
  23530. return `${this._size} webcam`;
  23531. }
  23532. /**
  23533. * Initialize this source of data
  23534. * @returns a promise that resolves as soon as this source of data is initialized
  23535. * @internal
  23536. */
  23537. _init() {
  23538. Utils.log('Accessing the webcam...');
  23539. // validate
  23540. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  23541. throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
  23542. // set up media constraints
  23543. const options = this._options;
  23544. const size = Utils.resolution(options.resolution, options.aspectRatio);
  23545. const constraints = {
  23546. audio: false,
  23547. video: Object.assign({ width: size.width, height: size.height }, options.constraints)
  23548. };
  23549. // load camera stream
  23550. return new (speedy_vision_default()).Promise((resolve, reject) => {
  23551. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  23552. const video = this._video;
  23553. video.onloadedmetadata = () => {
  23554. video.play();
  23555. Utils.log('Access to the webcam has been granted.');
  23556. resolve(video);
  23557. };
  23558. video.srcObject = stream;
  23559. video.muted = true;
  23560. }).catch(err => {
  23561. reject(new AccessDeniedError('Please give access to the webcam and reload the page.', err));
  23562. });
  23563. }).then(_ => super._init());
  23564. }
  23565. /**
  23566. * Release this source of data
  23567. * @returns a promise that resolves as soon as this source of data is released
  23568. * @internal
  23569. */
  23570. _release() {
  23571. const stream = this._video.srcObject;
  23572. const tracks = stream.getTracks();
  23573. // stop camera feed
  23574. tracks.forEach(track => track.stop());
  23575. this._video.onloadedmetadata = null;
  23576. this._video.srcObject = null;
  23577. // release the media
  23578. return super._release();
  23579. }
  23580. }
  23581. ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
  23582. /*
  23583. * MARTINS.js Free Edition
  23584. * GPU-accelerated Augmented Reality for the web
  23585. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23586. * https://github.com/alemart/martins-js
  23587. *
  23588. * This program is free software: you can redistribute it and/or modify
  23589. * it under the terms of the GNU Affero General Public License version 3
  23590. * as published by the Free Software Foundation.
  23591. *
  23592. * This program is distributed in the hope that it will be useful,
  23593. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23594. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23595. * GNU Affero General Public License for more details.
  23596. *
  23597. * You should have received a copy of the GNU Affero General Public License
  23598. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23599. *
  23600. * source-factory.ts
  23601. * Factory of sources of data
  23602. */
  23603. /**
  23604. * Factory of sources of data
  23605. */
  23606. class SourceFactory {
  23607. /**
  23608. * Create a <video>-based source of data
  23609. * @param video video element
  23610. */
  23611. static Video(video) {
  23612. return new VideoSource(video);
  23613. }
  23614. /**
  23615. * Create a <canvas>-based source of data
  23616. * @param canvas canvas element
  23617. */
  23618. static Canvas(canvas) {
  23619. return new CanvasSource(canvas);
  23620. }
  23621. /**
  23622. * Create a Webcam-based source of data
  23623. * @param options optional options object
  23624. */
  23625. static Camera(options = {}) {
  23626. return new CameraSource(options);
  23627. }
  23628. }
  23629. ;// CONCATENATED MODULE: ./src/main.ts
  23630. /*
  23631. * MARTINS.js Free Edition
  23632. * GPU-accelerated Augmented Reality for the web
  23633. * Copyright (C) 2022 Alexandre Martins <alemartf(at)gmail.com>
  23634. * https://github.com/alemart/martins-js
  23635. *
  23636. * This program is free software: you can redistribute it and/or modify
  23637. * it under the terms of the GNU Affero General Public License version 3
  23638. * as published by the Free Software Foundation.
  23639. *
  23640. * This program is distributed in the hope that it will be useful,
  23641. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23642. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23643. * GNU Affero General Public License for more details.
  23644. *
  23645. * You should have received a copy of the GNU Affero General Public License
  23646. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  23647. *
  23648. * main.ts
  23649. * Entry point
  23650. */
  23651. /**
  23652. * GPU-accelerated Augmented Reality for the web
  23653. */
  23654. class Martins {
  23655. /**
  23656. * Start a new session
  23657. * @param options
  23658. * @returns a promise that resolves to a new session
  23659. */
  23660. static startSession(options) {
  23661. return Session.instantiate(options);
  23662. }
  23663. /**
  23664. * Trackers
  23665. */
  23666. static get Tracker() {
  23667. return TrackerFactory;
  23668. }
  23669. /**
  23670. * Sources of data
  23671. */
  23672. static get Source() {
  23673. return SourceFactory;
  23674. }
  23675. /**
  23676. * Create a viewport
  23677. * @param settings
  23678. * @returns a new viewport with the specified settings
  23679. */
  23680. static Viewport(settings) {
  23681. return new BaseViewport(settings);
  23682. }
  23683. /**
  23684. * Global Settings
  23685. */
  23686. static get Settings() {
  23687. return Settings;
  23688. }
  23689. /**
  23690. * Engine version
  23691. */
  23692. static get version() {
  23693. if (false)
  23694. {}
  23695. else
  23696. return "0.1.2-wip";
  23697. }
  23698. /**
  23699. * Engine edition
  23700. */
  23701. static get edition() {
  23702. return 'Free Edition';
  23703. }
  23704. /**
  23705. * Speedy Vision
  23706. */
  23707. static get Speedy() {
  23708. return (speedy_vision_default());
  23709. }
  23710. /**
  23711. * Checks if the engine can be run in the browser the client is using
  23712. * @returns true if the engine is compatible with the browser
  23713. */
  23714. static isSupported() {
  23715. return Session.isSupported();
  23716. }
  23717. }
  23718. // Freeze the namespace
  23719. Object.freeze(Martins);
  23720. // Add Speedy Vision to global scope
  23721. ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
  23722. // Display a notice
  23723. Utils.log(`MARTINS.js ${Martins.edition} version ${Martins.version}. ` +
  23724. `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
  23725. "https://github.com/alemart/martins-js");
  23726. })();
  23727. __webpack_exports__ = __webpack_exports__["default"];
  23728. /******/ return __webpack_exports__;
  23729. /******/ })()
  23730. ;
  23731. });