Переглянути джерело

Update scripts

customisations
alemart 9 місяці тому
джерело
коміт
e5d0debc2d
2 змінених файлів з 266 додано та 54 видалено
  1. 264
    52
      dist/encantar.js
  2. 2
    2
      dist/encantar.min.js

+ 264
- 52
dist/encantar.js Переглянути файл

@@ -5,7 +5,7 @@
5 5
  * https://github.com/alemart/encantar-js
6 6
  *
7 7
  * @license LGPL-3.0-or-later
8
- * Date: 2024-11-25T01:46:40.728Z
8
+ * Date: 2024-12-17T18:08:41.979Z
9 9
  */
10 10
 (function webpackUniversalModuleDefinition(root, factory) {
11 11
 	if(typeof exports === 'object' && typeof module === 'object')
@@ -19952,6 +19952,8 @@ const POWER_ICON = Object.freeze({
19952 19952
     'low-power': '&#x1F50B',
19953 19953
     'high-performance': '&#x26A1'
19954 19954
 });
19955
+/** Button icons (atlas) */
19956
+const BUTTON_ICONS = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAVUlEQVRIS2NkGGDAOMD2M4w6YDQE8IbAfyBgBAJSEipIDy712MzCaTiyQdRwBC4zsDoAmy8ocQQ+vRgOIDUI8UUPMVFIUvySkhaIVTvqgNEQGPAQAABSNiARgz5LggAAAABJRU5ErkJggg==';
19955 19957
 /**
19956 19958
  * Stats panel used for development purposes
19957 19959
  */
@@ -19975,15 +19977,16 @@ class StatsPanel {
19975 19977
     /**
19976 19978
      * A method to be called in the update loop
19977 19979
      * @param time current time in ms
19978
-     * @param trackers the trackers attached to the session
19979 19980
      * @param sources the sources of media linked to the session
19981
+     * @param trackers the trackers attached to the session
19982
+     * @param viewport the viewport
19980 19983
      * @param gpu GPU cycles per second
19981 19984
      * @param fps frames per second
19982 19985
      */
19983
-    update(time, trackers, sources, gpu, fps) {
19986
+    update(time, sources, trackers, viewport, gpu, fps) {
19984 19987
         if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
19985 19988
             this._lastUpdate = time;
19986
-            this._update(trackers, sources, fps, gpu);
19989
+            this._update(sources, trackers, viewport, fps, gpu);
19987 19990
         }
19988 19991
     }
19989 19992
     /**
@@ -20000,12 +20003,13 @@ class StatsPanel {
20000 20003
     }
20001 20004
     /**
20002 20005
      * Update the contents of the panel
20003
-     * @param trackers the trackers attached to the session
20004 20006
      * @param sources the sources of media linked to the session
20007
+     * @param trackers the trackers attached to the session
20008
+     * @param viewport the viewport
20005 20009
      * @param fps frames per second
20006 20010
      * @param gpu GPU cycles per second
20007 20011
      */
20008
-    _update(trackers, sources, fps, gpu) {
20012
+    _update(sources, trackers, viewport, fps, gpu) {
20009 20013
         // all sanitized
20010 20014
         const lfps = this._label('_ar_fps');
20011 20015
         if (lfps !== null) {
@@ -20030,6 +20034,11 @@ class StatsPanel {
20030 20034
             const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
20031 20035
             lout.innerText = trackerStats;
20032 20036
         }
20037
+        const lview = this._label('_ar_view');
20038
+        if (lview !== null) {
20039
+            const size = viewport.virtualSize;
20040
+            lview.innerText = `${size.width}x${size.height} rendering`;
20041
+        }
20033 20042
     }
20034 20043
     /**
20035 20044
      * Get a label of the panel
@@ -20071,13 +20080,33 @@ class StatsPanel {
20071 20080
      */
20072 20081
     _createTitle() {
20073 20082
         const title = document.createElement('div');
20083
+        const button = document.createElement('button');
20084
+        title.style.display = 'flex';
20074 20085
         title.style.backgroundColor = '#7e56c2';
20075 20086
         title.style.color = 'white';
20076 20087
         title.style.fontFamily = 'monospace';
20077 20088
         title.style.fontSize = '14px';
20078 20089
         title.style.fontWeight = 'bold';
20079
-        title.style.padding = '2px';
20090
+        title.style.paddingRight = '4px';
20080 20091
         title.innerText = 'encantar.js ' + AR.version;
20092
+        button.style.width = '18px';
20093
+        button.style.height = '18px';
20094
+        button.style.marginRight = '4px';
20095
+        button.style.backgroundColor = '#7e56c2';
20096
+        button.style.backgroundImage = 'url(' + BUTTON_ICONS + ')';
20097
+        button.style.backgroundRepeat = 'no-repeat';
20098
+        button.style.backgroundPosition = '0 0';
20099
+        button.style.borderWidth = '2px';
20100
+        button.style.borderColor = '#b588fb #46346a #46346a #b588fb';
20101
+        title.insertBefore(button, title.firstChild);
20102
+        button.addEventListener('click', () => {
20103
+            const container = title.parentNode;
20104
+            const details = container && container.querySelector('._ar_details');
20105
+            if (!details)
20106
+                return;
20107
+            details.hidden = !details.hidden;
20108
+            button.style.backgroundPosition = details.hidden ? '0 0 ' : '-16px 0';
20109
+        });
20081 20110
         return title;
20082 20111
     }
20083 20112
     /**
@@ -20086,21 +20115,25 @@ class StatsPanel {
20086 20115
      */
20087 20116
     _createContent() {
20088 20117
         const content = document.createElement('div');
20089
-        const print = (html) => content.insertAdjacentHTML('beforeend', html);
20118
+        const details = document.createElement('div');
20090 20119
         content.style.backgroundColor = 'rgba(0,0,0,0.5)';
20091 20120
         content.style.color = 'white';
20092 20121
         content.style.fontFamily = 'monospace';
20093 20122
         content.style.fontSize = '14px';
20094 20123
         content.style.padding = '2px';
20095 20124
         content.style.whiteSpace = 'pre-line';
20125
+        details.classList.add('_ar_details');
20126
+        details.hidden = true;
20096 20127
         // all sanitized
20097
-        print('FPS: <span class="_ar_fps"></span> | ');
20098
-        print('GPU: <span class="_ar_gpu"></span> ');
20099
-        print('<span class="_ar_power"></span>');
20100
-        print('<br>');
20101
-        print('IN: <span class="_ar_in"></span>');
20102
-        print('<br>');
20103
-        print('OUT: <span class="_ar_out"></span>');
20128
+        const append = (div, html) => div.insertAdjacentHTML('beforeend', html);
20129
+        append(content, 'FPS: <span class="_ar_fps"></span> | ');
20130
+        append(content, 'GPU: <span class="_ar_gpu"></span> ');
20131
+        append(content, '<span class="_ar_power"></span>');
20132
+        append(details, 'IN: <span class="_ar_in"></span><br>');
20133
+        append(details, 'OUT: <span class="_ar_out"></span><br>');
20134
+        append(details, 'VIEW: <span class="_ar_view"></span>');
20135
+        // done!
20136
+        content.appendChild(details);
20104 20137
         return content;
20105 20138
     }
20106 20139
 }
@@ -20148,7 +20181,7 @@ const SCAN_MIN_MATCHES = 20; //30;
20148 20181
 /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
20149 20182
 const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
20150 20183
 /** Reprojection error, in NIS pixels, used when estimating a motion model (scanning state) */
20151
-const SCAN_RANSAC_REPROJECTIONERROR_NIS = (NIS_SIZE * 0.02) | 0;
20184
+const SCAN_RANSAC_REPROJECTIONERROR_NIS = (NIS_SIZE * 0.0125) | 0;
20152 20185
 /** Reprojection error, in NDC, used when estimating a motion model (scanning state) */
20153 20186
 const SCAN_RANSAC_REPROJECTIONERROR_NDC = SCAN_RANSAC_REPROJECTIONERROR_NIS / (NIS_SIZE / 2);
20154 20187
 /** Number of tables used in the LSH-based keypoint matching */
@@ -20177,6 +20210,12 @@ const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
20177 20210
 const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
20178 20211
 /** Minimum acceptable number of matched keypoints when in a pre-tracking state */
20179 20212
 const PRE_TRACK_MIN_MATCHES = 4;
20213
+/** Maximum number of iterations in Pre-tracking B */
20214
+const PRE_TRACK_MAX_ITERATIONS = 3;
20215
+/** Reprojection error, in NIS pixels, used when pre-tracking */
20216
+const PRE_TRACK_RANSAC_REPROJECTIONERROR_NIS = (NIS_SIZE * 0.0125 * 0.5) | 0;
20217
+/** Reprojection error, in NDC, used when pre-tracking */
20218
+const PRE_TRACK_RANSAC_REPROJECTIONERROR_NDC = PRE_TRACK_RANSAC_REPROJECTIONERROR_NIS / (NIS_SIZE / 2);
20180 20219
 /** Minimum acceptable number of matched keypoints when in the tracking state */
20181 20220
 const TRACK_MIN_MATCHES = 4; //10; //20;
20182 20221
 /** Maximum number of keypoints to be analyzed in the tracking state */
@@ -21124,7 +21163,7 @@ class Session extends AREventTarget {
21124 21163
                     rafQueue[i][1].call(undefined, time, frame);
21125 21164
                 // update internals
21126 21165
                 this._renderStats.update();
21127
-                this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
21166
+                this._statsPanel.update(time, this._sources, this._trackers, this._viewport, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
21128 21167
                 this._frameReady = false;
21129 21168
             }
21130 21169
             else {
@@ -22524,7 +22563,7 @@ class ImageTrackerScanningState extends ImageTrackerState {
22524 22563
         return ImageTrackerUtils.findPerspectiveWarpNDC(points, {
22525 22564
             method: 'pransac',
22526 22565
             reprojectionError: SCAN_RANSAC_REPROJECTIONERROR_NDC,
22527
-            numberOfHypotheses: 512,
22566
+            numberOfHypotheses: 512 * 2,
22528 22567
             bundleSize: 128,
22529 22568
         });
22530 22569
     }
@@ -22850,6 +22889,10 @@ class ImageTrackerPreTrackingAState extends ImageTrackerState {
22850 22889
 
22851 22890
 
22852 22891
 
22892
+/** Port of the source image multiplexer: get data from the portal */
22893
+const PORT_PORTAL = 0;
22894
+/** Port of the source image multiplexer: get data from the camera */
22895
+const pre_tracking_b_PORT_CAMERA = 1;
22853 22896
 /**
22854 22897
  * In Pre-Tracking B, we refine the homography obtained at the scanning state.
22855 22898
  * We find a transformation that warps the snapshot obtained from the scanning
@@ -22866,6 +22909,7 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
22866 22909
         this._referenceImage = null;
22867 22910
         this._snapshot = null;
22868 22911
         this._referenceKeypointPortalSink = null;
22912
+        this._iterations = 0;
22869 22913
     }
22870 22914
     /**
22871 22915
      * Called as soon as this becomes the active state, just before update() runs for the first time
@@ -22876,11 +22920,17 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
22876 22920
         const referenceImage = settings.referenceImage;
22877 22921
         const snapshot = settings.snapshot;
22878 22922
         const referenceKeypointPortalSink = settings.referenceKeypointPortalSink;
22923
+        const sourceMux = this._pipeline.node('sourceMux');
22924
+        const sourceBuffer = this._pipeline.node('sourceBuffer');
22879 22925
         // set attributes
22880 22926
         this._homography = homography;
22881 22927
         this._referenceImage = referenceImage;
22882 22928
         this._snapshot = snapshot;
22883 22929
         this._referenceKeypointPortalSink = referenceKeypointPortalSink;
22930
+        this._iterations = 0;
22931
+        // reset nodes
22932
+        sourceMux.port = PORT_PORTAL;
22933
+        sourceBuffer.frozen = false;
22884 22934
     }
22885 22935
     /**
22886 22936
      * Called just before the GPU processing
@@ -22922,6 +22972,8 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
22922 22972
         const keypoints = result.keypoints; // from Pre-Tracking B
22923 22973
         const image = result.image;
22924 22974
         const keypointPortalSink = this._pipeline.node('keypointPortalSink');
22975
+        const sourceMux = this._pipeline.node('sourceMux');
22976
+        const sourceBuffer = this._pipeline.node('sourceBuffer');
22925 22977
         // tracker output
22926 22978
         const trackerOutput = {
22927 22979
             keypointsNIS: image !== undefined ? keypoints : undefined,
@@ -22936,19 +22988,21 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
22936 22988
                 throw new TrackingError('Not enough data points');
22937 22989
             // find a warp
22938 22990
             const points = ImageTrackerUtils.compilePairsOfKeypointsNDC(pairs);
22939
-            return this._findAffineMotionNDC(points);
22991
+            return this._findMotionNDC(points);
22940 22992
         })
22941 22993
             .then(warp => {
22994
+            // get the camera image in the next iteration
22995
+            // the warped snapshot from the scanning state is occasionally very blurry
22996
+            sourceMux.port = pre_tracking_b_PORT_CAMERA;
22997
+            sourceBuffer.frozen = true;
22942 22998
             // refine the homography
22943 22999
             return this._homography.setTo(warp.times(this._homography));
22944 23000
         })
22945 23001
             .then(_ => ({
22946
-            nextState: 'tracking',
22947
-            //nextState: 'pre-tracking-b',
23002
+            nextState: (++this._iterations < PRE_TRACK_MAX_ITERATIONS) ? 'pre-tracking-b' : 'tracking',
22948 23003
             trackerOutput: trackerOutput,
22949 23004
             nextStateSettings: {
22950 23005
                 // we export keypoints obtained in Pre-Tracking B, not in A.
22951
-                // lighting conditions match, but what if the snapshot is too blurry?
22952 23006
                 templateKeypoints: keypoints,
22953 23007
                 templateKeypointPortalSink: keypointPortalSink,
22954 23008
                 referenceImage: this._referenceImage,
@@ -22965,16 +23019,17 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
22965 23019
         });
22966 23020
     }
22967 23021
     /**
22968
-     * Find an affine motion model in NDC between pairs of keypoints in NDC
23022
+     * Find a motion model in NDC between pairs of keypoints in NDC
22969 23023
      * given as a 2 x 2n [ src | dest ] matrix
22970 23024
      * @param points compiled pairs of keypoints in NDC
22971 23025
      * @returns a promise that resolves to a 3x3 warp in NDC that maps source to destination
22972 23026
      */
22973
-    _findAffineMotionNDC(points) {
22974
-        return ImageTrackerUtils.findAffineWarpNDC(points, {
23027
+    _findMotionNDC(points) {
23028
+        //return ImageTrackerUtils.findAffineWarpNDC(points, {
23029
+        return ImageTrackerUtils.findPerspectiveWarpNDC(points, {
22975 23030
             method: 'pransac',
22976
-            reprojectionError: TRACK_RANSAC_REPROJECTIONERROR_NDC,
22977
-            numberOfHypotheses: 512 * 4,
23031
+            reprojectionError: PRE_TRACK_RANSAC_REPROJECTIONERROR_NDC,
23032
+            numberOfHypotheses: 512 * 8,
22978 23033
             bundleSize: 128,
22979 23034
             mask: undefined // score is not needed
22980 23035
         }).then(([warp, score]) => {
@@ -23023,6 +23078,8 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
23023 23078
         const pipeline = speedy_vision_default().Pipeline();
23024 23079
         const source = speedy_vision_default().Image.Source('source');
23025 23080
         const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
23081
+        const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
23082
+        const sourceBuffer = speedy_vision_default().Image.Buffer('sourceBuffer');
23026 23083
         const referenceKeypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('referenceKeypointPortalSource');
23027 23084
         const screen = speedy_vision_default().Transform.Resize('screen');
23028 23085
         const greyscale = speedy_vision_default().Filter.Greyscale();
@@ -23044,6 +23101,8 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
23044 23101
         //const imageSink = Speedy.Image.Sink('image');
23045 23102
         source.media = null;
23046 23103
         imagePortalSource.source = null;
23104
+        sourceMux.port = PORT_PORTAL;
23105
+        sourceBuffer.frozen = false;
23047 23106
         referenceKeypointPortalSource.source = null;
23048 23107
         imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
23049 23108
         screen.size = speedy_vision_default().Size(0, 0);
@@ -23066,8 +23125,10 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
23066 23125
         keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
23067 23126
         keypointSink.turbo = false;
23068 23127
         // prepare input
23069
-        //source.output(); // ignore, but keep it in the pipeline
23070
-        imagePortalSource.output().connectTo(screen.input());
23128
+        imagePortalSource.output().connectTo(sourceMux.input('in0'));
23129
+        source.output().connectTo(sourceBuffer.input());
23130
+        sourceBuffer.output().connectTo(sourceMux.input('in1'));
23131
+        sourceMux.output().connectTo(screen.input());
23071 23132
         screen.output().connectTo(greyscale.input());
23072 23133
         // preprocess images
23073 23134
         greyscale.output().connectTo(imageRectifier.input());
@@ -23097,7 +23158,7 @@ class ImageTrackerPreTrackingBState extends ImageTrackerState {
23097 23158
         referenceKeypointPortalSource.output().connectTo(referenceKeypointSink.input());
23098 23159
         //imageRectifier.output().connectTo(imageSink.input());
23099 23160
         // done!
23100
-        pipeline.init(source, screen, imagePortalSource, referenceKeypointPortalSource, greyscale, imageRectifier, nightvision, nightvisionMux, detector, borderClipper, clipper, denoiser, subpixel, blur, descriptor, matcher, keypointScaler, keypointSink, keypointPortalSink, referenceKeypointSink);
23161
+        pipeline.init(source, imagePortalSource, sourceBuffer, sourceMux, screen, referenceKeypointPortalSource, greyscale, imageRectifier, nightvision, nightvisionMux, detector, borderClipper, clipper, denoiser, subpixel, blur, descriptor, matcher, keypointScaler, keypointSink, keypointPortalSink, referenceKeypointSink);
23101 23162
         return pipeline;
23102 23163
     }
23103 23164
 }
@@ -23599,6 +23660,37 @@ class Vector3 {
23599 23660
         return new Vector3(x, y, z);
23600 23661
     }
23601 23662
     /**
23663
+     * Compute a unit vector with the same direction as this
23664
+     * @returns a new unit vector with the same direction as this
23665
+     */
23666
+    normalized() {
23667
+        return this._clone()._normalize();
23668
+    }
23669
+    /**
23670
+     * Compute the sum between this vector and v
23671
+     * @param v a vector
23672
+     * @returns a new vector equal to the sum between this and v
23673
+     */
23674
+    plus(v) {
23675
+        return this._clone()._add(v);
23676
+    }
23677
+    /**
23678
+     * Compute the difference between this vector and v
23679
+     * @param v a vector
23680
+     * @returns a new vector equal to the difference this - v
23681
+     */
23682
+    minus(v) {
23683
+        return this._clone()._subtract(v);
23684
+    }
23685
+    /**
23686
+     * Compute the multiplication between this vector and a scale factor
23687
+     * @param scale scalar quantity
23688
+     * @returns a new vector equal to the multiplication between this and the scale factor
23689
+     */
23690
+    times(scale) {
23691
+        return this._clone()._scale(scale);
23692
+    }
23693
+    /**
23602 23694
      * Check if this and v have the same coordinates
23603 23695
      * @param v a vector
23604 23696
      * @returns true if this and v have the same coordinates
@@ -25508,6 +25600,10 @@ class ImageTrackerTrackingState extends ImageTrackerState {
25508 25600
                only affect the rendering of virtual elements positioned at the
25509 25601
                local space linked to the target ("ar.root").
25510 25602
 
25603
+               (that's right, and movements of the real camera in physical space
25604
+               shouldn't affect the world space either. Note: the real camera is
25605
+               expected to be shaky. Example: a user holding a mobile phone.)
25606
+
25511 25607
             */
25512 25608
             // the target moves and the camera stays fixed at the origin
25513 25609
             const modelMatrix = this._camera.computeViewMatrix(); // p_view = V M p_model
@@ -25595,7 +25691,7 @@ class ImageTrackerTrackingState extends ImageTrackerState {
25595 25691
         return ImageTrackerUtils.findAffineWarpNDC(points, {
25596 25692
             method: 'pransac',
25597 25693
             reprojectionError: TRACK_RANSAC_REPROJECTIONERROR_NDC,
25598
-            numberOfHypotheses: 512 * 4,
25694
+            numberOfHypotheses: 512 * 2,
25599 25695
             bundleSize: 128,
25600 25696
             mask: undefined // score is not needed
25601 25697
         }).then(([warp, score]) => {
@@ -25812,8 +25908,6 @@ class ImageTrackerTrackingState extends ImageTrackerState {
25812 25908
 
25813 25909
 
25814 25910
 
25815
-/** A helper */
25816
-const formatSize = (size) => `${size.width}x${size.height}`;
25817 25911
 /** Default options for instantiating an ImageTracker */
25818 25912
 const image_tracker_DEFAULT_OPTIONS = {
25819 25913
     resolution: 'sm'
@@ -25867,16 +25961,18 @@ class ImageTracker extends AREventTarget {
25867 25961
         return this._database;
25868 25962
     }
25869 25963
     /**
25870
-     * Resolution of the AR screen space
25964
+     * Resolution of the tracker
25871 25965
      */
25872 25966
     get resolution() {
25873 25967
         return this._resolution;
25874 25968
     }
25875 25969
     /**
25876
-     * Resolution of the AR screen space
25970
+     * Resolution of the tracker
25971
+     * @readonly
25877 25972
      */
25878 25973
     set resolution(resolution) {
25879
-        this._resolution = resolution;
25974
+        // this property is readonly, but this setter has been kept because
25975
+        // it wasn't readonly in previous versions of the engine. FIXME
25880 25976
     }
25881 25977
     /**
25882 25978
      * Size of the AR screen space, in pixels
@@ -25897,7 +25993,8 @@ class ImageTracker extends AREventTarget {
25897 25993
      * @internal
25898 25994
      */
25899 25995
     get _stats() {
25900
-        return `${formatSize(this.screenSize)} ${this.state}`;
25996
+        const screenSize = this.screenSize;
25997
+        return `${screenSize.width}x${screenSize.height} ${this.state}`;
25901 25998
     }
25902 25999
     /**
25903 26000
      * Initialize this tracker
@@ -26110,6 +26207,37 @@ class Vector2 {
26110 26207
         return v._clone()._subtract(this)._normalize();
26111 26208
     }
26112 26209
     /**
26210
+     * Compute a unit vector with the same direction as this
26211
+     * @returns a new unit vector with the same direction as this
26212
+     */
26213
+    normalized() {
26214
+        return this._clone()._normalize();
26215
+    }
26216
+    /**
26217
+     * Compute the sum between this vector and v
26218
+     * @param v a vector
26219
+     * @returns a new vector equal to the sum between this and v
26220
+     */
26221
+    plus(v) {
26222
+        return this._clone()._add(v);
26223
+    }
26224
+    /**
26225
+     * Compute the difference between this vector and v
26226
+     * @param v a vector
26227
+     * @returns a new vector equal to the difference this - v
26228
+     */
26229
+    minus(v) {
26230
+        return this._clone()._subtract(v);
26231
+    }
26232
+    /**
26233
+     * Compute the multiplication between this vector and a scale factor
26234
+     * @param scale scalar quantity
26235
+     * @returns a new vector equal to the multiplication between this and the scale factor
26236
+     */
26237
+    times(scale) {
26238
+        return this._clone()._scale(scale);
26239
+    }
26240
+    /**
26113 26241
      * Check if this and v have the same coordinates
26114 26242
      * @param v a vector
26115 26243
      * @returns true if this and v have the same coordinates
@@ -26240,16 +26368,23 @@ const EVENTTYPE2PHASE = {
26240 26368
     'pointerleave': 'ended',
26241 26369
     'pointerenter': 'began',
26242 26370
 };
26371
+/** Default options for instantiating a PointerTracker */
26372
+const pointer_tracker_DEFAULT_OPTIONS = {
26373
+    space: 'normalized'
26374
+};
26243 26375
 /**
26244 26376
  * A tracker of pointer-based input such as mouse, touch or pen
26245 26377
  */
26246 26378
 class PointerTracker {
26247 26379
     /**
26248 26380
      * Constructor
26381
+     * @param options
26249 26382
      */
26250
-    constructor() {
26383
+    constructor(options) {
26384
+        const settings = this._buildSettings(options);
26251 26385
         this._source = null;
26252 26386
         this._viewport = null;
26387
+        this._space = settings.space;
26253 26388
         this._activePointers = new Map();
26254 26389
         this._newPointers = new Map();
26255 26390
         this._idMap = new Map();
@@ -26260,6 +26395,17 @@ class PointerTracker {
26260 26395
         this._resetInTheNextUpdate = this._resetInTheNextUpdate.bind(this);
26261 26396
     }
26262 26397
     /**
26398
+     * Build a full and validated options object
26399
+     * @param options
26400
+     * @returns validated options with defaults
26401
+     */
26402
+    _buildSettings(options) {
26403
+        const settings = Object.assign({}, pointer_tracker_DEFAULT_OPTIONS, options);
26404
+        if (settings.space != 'normalized' && settings.space != 'adjusted')
26405
+            throw new IllegalArgumentError(`Invalid pointer space: "${settings.space}"`);
26406
+        return settings;
26407
+    }
26408
+    /**
26263 26409
      * The type of the tracker
26264 26410
      */
26265 26411
     get type() {
@@ -26394,12 +26540,24 @@ class PointerTracker {
26394 26540
                     this._newPointers.clear();
26395 26541
                     continue;
26396 26542
             }
26397
-            // determine the current position
26543
+            // determine the current position in normalized space
26398 26544
             const absX = event.pageX - (rect.left + window.scrollX);
26399 26545
             const absY = event.pageY - (rect.top + window.scrollY);
26400 26546
             const relX = 2 * absX / rect.width - 1; // convert to [-1,1]
26401 26547
             const relY = -(2 * absY / rect.height - 1); // flip Y axis
26402 26548
             const position = new Vector2(relX, relY);
26549
+            // scale the normalized space so that it matches the aspect ratio of the viewport
26550
+            if (this._space == 'adjusted') {
26551
+                const a = this._viewport.aspectRatio;
26552
+                if (a >= 1) {
26553
+                    // landscape
26554
+                    position._set(relX, relY / a);
26555
+                }
26556
+                else {
26557
+                    // portrait
26558
+                    position._set(relX * a, relY);
26559
+                }
26560
+            }
26403 26561
             // determine the position delta
26404 26562
             const deltaPosition = !previous ? Vector2.ZERO :
26405 26563
                 position._clone()._subtract(previous.position);
@@ -26410,13 +26568,15 @@ class PointerTracker {
26410 26568
             const velocity = deltaPosition._clone()._scale(inverseDeltaTime);
26411 26569
             // determine the elapsed time since the tracking began
26412 26570
             const elapsedTime = previous ? previous.elapsedTime + deltaTime : 0;
26571
+            // determine how much this pointer has moved since its tracking began
26572
+            const totalDistance = previous ? previous.totalDistance + deltaPosition.length() : 0;
26413 26573
             // determine whether or not this is the primary pointer for this type
26414 26574
             const isPrimary = event.isPrimary;
26415 26575
             // determine the type of the originating device
26416 26576
             const kind = event.pointerType;
26417 26577
             // we create new trackable instances on each frame;
26418 26578
             // these will be exported and consumed by the user
26419
-            this._newPointers.set(id, { id, phase, position, deltaPosition, initialPosition, velocity, elapsedTime, isPrimary, kind });
26579
+            this._newPointers.set(id, { id, phase, position, deltaPosition, initialPosition, velocity, elapsedTime, totalDistance, isPrimary, kind });
26420 26580
         }
26421 26581
         // update trackables
26422 26582
         this._newPointers.forEach((trackable, id) => this._activePointers.set(id, trackable));
@@ -26449,6 +26609,13 @@ class PointerTracker {
26449 26609
         return n + ' pointer' + s;
26450 26610
     }
26451 26611
     /**
26612
+     * The space in which pointers are located.
26613
+     * You may set it when instantiating the tracker.
26614
+     */
26615
+    get space() {
26616
+        return this._space;
26617
+    }
26618
+    /**
26452 26619
      * Generate tracker output
26453 26620
      * @returns a new PointerTrackerOutput object
26454 26621
      */
@@ -26628,9 +26795,10 @@ class TrackerFactory {
26628 26795
     }
26629 26796
     /**
26630 26797
      * Create a Pointer Tracker
26798
+     * @param options
26631 26799
      */
26632
-    static Pointer() {
26633
-        return new PointerTracker();
26800
+    static Pointer(options = {}) {
26801
+        return new PointerTracker(options);
26634 26802
     }
26635 26803
 }
26636 26804
 
@@ -27984,7 +28152,7 @@ class BestFitResizeStrategy extends ImmersiveResizeStrategy {
27984 28152
     resize(viewport) {
27985 28153
         const subContainer = viewport._subContainer;
27986 28154
         const windowAspectRatio = window.innerWidth / window.innerHeight;
27987
-        const viewportAspectRatio = viewport._realSize.width / viewport._realSize.height;
28155
+        const viewportAspectRatio = viewport.aspectRatio;
27988 28156
         let width = 1, height = 1, left = '0px', top = '0px';
27989 28157
         if (viewportAspectRatio <= windowAspectRatio) {
27990 28158
             height = window.innerHeight;
@@ -28093,9 +28261,14 @@ class Viewport extends ViewportEventTarget {
28093 28261
      * on which the virtual scene will be drawn
28094 28262
      */
28095 28263
     get virtualSize() {
28264
+        return Utils.resolution(this._resolution, this.aspectRatio);
28265
+    }
28266
+    /**
28267
+     * Aspect ratio of the viewport
28268
+     */
28269
+    get aspectRatio() {
28096 28270
         const size = this._realSize;
28097
-        const aspectRatio = size.width / size.height;
28098
-        return Utils.resolution(this._resolution, aspectRatio);
28271
+        return size.width / size.height;
28099 28272
     }
28100 28273
     /**
28101 28274
      * Is the viewport currently being displayed in fullscreen mode?
@@ -28151,17 +28324,56 @@ class Viewport extends ViewportEventTarget {
28151 28324
         return this._fullscreen.exit();
28152 28325
     }
28153 28326
     /**
28154
-     * Convert a position given in normalized units to a corresponding pixel
28155
-     * position in canvas space. Normalized units range from -1 to +1. The
28156
-     * center of the canvas is at (0,0). The top right corner is at (1,1).
28327
+     * Convert a position given in space units to a corresponding pixel
28328
+     * position in canvas space. Units in normalized space range from -1 to +1.
28329
+     * The center of the canvas is at (0,0). The top right corner is at (1,1).
28157 28330
      * The bottom left corner is at (-1,-1).
28158
-     * @param position in normalized units
28331
+     * @param position in space units
28332
+     * @param space either "normalized" (default) or "adjusted"; @see PointerSpace
28159 28333
      * @returns an equivalent pixel position in canvas space
28160 28334
      */
28161
-    convertToPixels(position) {
28335
+    convertToPixels(position, space = 'normalized') {
28162 28336
         const canvas = this.canvas;
28163
-        const x = 0.5 * (1 + position.x) * canvas.width;
28164
-        const y = 0.5 * (1 - position.y) * canvas.height;
28337
+        let px = position.x, py = position.y;
28338
+        if (space == 'adjusted') {
28339
+            // convert from adjusted to normalized space
28340
+            const a = canvas.width / canvas.height;
28341
+            if (a >= 1)
28342
+                py *= a;
28343
+            else
28344
+                px /= a;
28345
+        }
28346
+        else if (space != 'normalized')
28347
+            throw new IllegalArgumentError(`Invalid space: "${space}"`);
28348
+        // convert from normalized to canvas space
28349
+        const x = 0.5 * (1 + px) * canvas.width;
28350
+        const y = 0.5 * (1 - py) * canvas.height;
28351
+        // done!
28352
+        return new Vector2(x, y);
28353
+    }
28354
+    /**
28355
+     * Convert a pixel position given in canvas space to a corresponding
28356
+     * position in space units. This is the inverse of convertToPixels().
28357
+     * @param position in canvas space
28358
+     * @space either "normalized" (default) or "adjusted"; see @PointerSpace
28359
+     * @returns an equivalent position in space units
28360
+     */
28361
+    convertFromPixels(position, space = 'normalized') {
28362
+        const canvas = this.canvas;
28363
+        // convert from canvas to normalized space
28364
+        let x = 2 * position.x / canvas.width - 1;
28365
+        let y = -2 * position.y / canvas.height + 1;
28366
+        if (space == 'adjusted') {
28367
+            // convert from normalized to adjusted space
28368
+            const a = canvas.width / canvas.height;
28369
+            if (a >= 1)
28370
+                y /= a;
28371
+            else
28372
+                x *= a;
28373
+        }
28374
+        else if (space != 'normalized')
28375
+            throw new IllegalArgumentError(`Invalid space: "${space}"`);
28376
+        // done!
28165 28377
         return new Vector2(x, y);
28166 28378
     }
28167 28379
     /**

+ 2
- 2
dist/encantar.min.js
Різницю між файлами не показано, бо вона завелика
Переглянути файл


Завантаження…
Відмінити
Зберегти