|
@@ -1,517 +0,0 @@
|
1
|
|
-/*
|
2
|
|
- * encantar.js
|
3
|
|
- * GPU-accelerated Augmented Reality for the web
|
4
|
|
- * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
|
5
|
|
- *
|
6
|
|
- * This program is free software: you can redistribute it and/or modify
|
7
|
|
- * it under the terms of the GNU Lesser General Public License as published
|
8
|
|
- * by the Free Software Foundation, either version 3 of the License, or
|
9
|
|
- * (at your option) any later version.
|
10
|
|
- *
|
11
|
|
- * This program is distributed in the hope that it will be useful,
|
12
|
|
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13
|
|
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14
|
|
- * GNU Lesser General Public License for more details.
|
15
|
|
- *
|
16
|
|
- * You should have received a copy of the GNU Lesser General Public License
|
17
|
|
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
|
18
|
|
- *
|
19
|
|
- * pre-tracking.ts
|
20
|
|
- * Pre-tracking state of the Image Tracker
|
21
|
|
- */
|
22
|
|
-
|
23
|
|
-import Speedy from 'speedy-vision';
|
24
|
|
-import { SpeedySize } from 'speedy-vision/types/core/speedy-size';
|
25
|
|
-import { SpeedyPoint2 } from 'speedy-vision/types/core/speedy-point';
|
26
|
|
-import { SpeedyMedia } from 'speedy-vision/types/core/speedy-media';
|
27
|
|
-import { SpeedyMatrix } from 'speedy-vision/types/core/speedy-matrix';
|
28
|
|
-import { SpeedyPromise } from 'speedy-vision/types/core/speedy-promise';
|
29
|
|
-import { SpeedyPipeline, SpeedyPipelineOutput } from 'speedy-vision/types/core/pipeline/pipeline';
|
30
|
|
-import { SpeedyPipelineNodeImageSource } from 'speedy-vision/types/core/pipeline/nodes/images/source';
|
31
|
|
-import { SpeedyPipelineNodeImageMultiplexer } from 'speedy-vision/types/core/pipeline/nodes/images/multiplexer';
|
32
|
|
-import { SpeedyPipelineNodeImagePortalSource, SpeedyPipelineNodeImagePortalSink } from 'speedy-vision/types/core/pipeline/nodes/images/portal';
|
33
|
|
-import { SpeedyPipelineNodeKeypointPortalSource, SpeedyPipelineNodeKeypointPortalSink } from 'speedy-vision/types/core/pipeline/nodes/keypoints/portal';
|
34
|
|
-import { SpeedyPipelineNodeResize } from 'speedy-vision/types/core/pipeline/nodes/transforms/resize';
|
35
|
|
-import { SpeedyPipelineNodePerspectiveWarp } from 'speedy-vision/types/core/pipeline/nodes/transforms/perspective-warp';
|
36
|
|
-import { SpeedyPipelineNodeKeypointBorderClipper } from 'speedy-vision/types/core/pipeline/nodes/keypoints/border-clipper';
|
37
|
|
-import { SpeedyPipelineNodeKeypointTransformer } from 'speedy-vision/types/core/pipeline/nodes/keypoints/transformer';
|
38
|
|
-import { SpeedyPipelineNodeKeypointMultiplexer } from 'speedy-vision/types/core/pipeline/nodes/keypoints/multiplexer';
|
39
|
|
-import { SpeedyPipelineNodeKeypointBuffer } from 'speedy-vision/types/core/pipeline/nodes/keypoints/buffer';
|
40
|
|
-import { SpeedyPipelineNodeStaticLSHTables } from 'speedy-vision/types/core/pipeline/nodes/keypoints/matchers/lsh-static-tables';
|
41
|
|
-import { SpeedyKeypoint, SpeedyMatchedKeypoint } from 'speedy-vision/types/core/speedy-keypoint';
|
42
|
|
-import { ImageTracker, ImageTrackerOutput, ImageTrackerStateName } from '../image-tracker';
|
43
|
|
-import { ImageTrackerTrackingState } from './tracking';
|
44
|
|
-import { ImageTrackerState, ImageTrackerStateOutput } from './state';
|
45
|
|
-import { Nullable, Utils } from '../../../utils/utils';
|
46
|
|
-import { IllegalOperationError, TrackingError } from '../../../utils/errors';
|
47
|
|
-import { ReferenceImage } from '../reference-image';
|
48
|
|
-import {
|
49
|
|
- TRACK_RECTIFIED_BORDER, TRACK_CLIPPING_BORDER, TRACK_REFINEMENT_ITERATIONS,
|
50
|
|
- NIGHTVISION_GAIN, NIGHTVISION_OFFSET, NIGHTVISION_DECAY, TRACK_WITH_NIGHTVISION,
|
51
|
|
- ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_SIGMA,
|
52
|
|
- TRACK_HARRIS_QUALITY, TRACK_DETECTOR_CAPACITY, TRACK_MAX_KEYPOINTS,
|
53
|
|
- SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_SIGMA,
|
54
|
|
- TRACK_RANSAC_REPROJECTIONERROR,
|
55
|
|
- TRAIN_TARGET_NORMALIZED_SIZE,
|
56
|
|
- TRACK_MATCH_RATIO,
|
57
|
|
- NIGHTVISION_QUALITY,
|
58
|
|
- SUBPIXEL_METHOD,
|
59
|
|
-} from '../settings';
|
60
|
|
-
|
61
|
|
-
|
62
|
|
-/** The pre-tracking follows a fixed sequence of steps */
|
63
|
|
-type PreTrackingStep = 'read-reference-image' | 'warp-camera-image' | 'train-camera-image';
|
64
|
|
-
|
65
|
|
-/** Default target space size (used when training) */
|
66
|
|
-const DEFAULT_TARGET_SPACE_SIZE = Speedy.Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
|
67
|
|
-
|
68
|
|
-/** Use the camera stream as the input of the pipeline */
|
69
|
|
-const PORT_CAMERA_IMAGE = 1;
|
70
|
|
-
|
71
|
|
-/** Use the reference image as the input of the pipeline */
|
72
|
|
-const PORT_REFERENCE_IMAGE = 0;
|
73
|
|
-
|
74
|
|
-
|
75
|
|
-
|
76
|
|
-/**
|
77
|
|
- * The pre-tracking state of the Image Tracker is a new training
|
78
|
|
- * phase for the particular, actual target we'll be tracking
|
79
|
|
- */
|
80
|
|
-export class ImageTrackerPreTrackingState extends ImageTrackerState
|
81
|
|
-{
|
82
|
|
- /** reference image */
|
83
|
|
- private _referenceImage: Nullable<ReferenceImage>;
|
84
|
|
-
|
85
|
|
- /** initial homography mapping the target image space to the AR screen space */
|
86
|
|
- private _homography: SpeedyMatrix;
|
87
|
|
-
|
88
|
|
- /** current step */
|
89
|
|
- private _step: PreTrackingStep;
|
90
|
|
-
|
91
|
|
- /** stored keypoints of the reference image */
|
92
|
|
- private _referenceKeypoints: SpeedyKeypoint[];
|
93
|
|
-
|
94
|
|
- /** current number of iterations for warp refinement */
|
95
|
|
- private _iterations: number;
|
96
|
|
-
|
97
|
|
-
|
98
|
|
-
|
99
|
|
- /**
|
100
|
|
- * Constructor
|
101
|
|
- * @param imageTracker
|
102
|
|
- */
|
103
|
|
- constructor(imageTracker: ImageTracker)
|
104
|
|
- {
|
105
|
|
- super('pre-tracking', imageTracker);
|
106
|
|
-
|
107
|
|
- this._homography = Speedy.Matrix.Eye(3);
|
108
|
|
- this._referenceImage = null;
|
109
|
|
- this._step = 'read-reference-image';
|
110
|
|
- this._referenceKeypoints = [];
|
111
|
|
- this._iterations = 0;
|
112
|
|
- }
|
113
|
|
-
|
114
|
|
- /**
|
115
|
|
- * Called as soon as this becomes the active state, just before update() runs for the first time
|
116
|
|
- * @param settings
|
117
|
|
- */
|
118
|
|
- onEnterState(settings: Record<string,any>)
|
119
|
|
- {
|
120
|
|
- const imagePortalSource = this._pipeline.node('imagePortalSource') as SpeedyPipelineNodeImagePortalSource;
|
121
|
|
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints') as SpeedyPipelineNodeKeypointMultiplexer;
|
122
|
|
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints') as SpeedyPipelineNodeKeypointMultiplexer;
|
123
|
|
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints') as SpeedyPipelineNodeKeypointBuffer;
|
124
|
|
- const homography = settings.homography as SpeedyMatrix;
|
125
|
|
- const referenceImage = settings.referenceImage as Nullable<ReferenceImage>;
|
126
|
|
- const snapshot = settings.snapshot as SpeedyPipelineNodeImagePortalSink;
|
127
|
|
-
|
128
|
|
- // this shouldn't happen
|
129
|
|
- if(!referenceImage)
|
130
|
|
- throw new TrackingError(`Can't track a null reference image`);
|
131
|
|
-
|
132
|
|
- // set attributes
|
133
|
|
- this._homography = homography;
|
134
|
|
- this._referenceImage = referenceImage;
|
135
|
|
- this._step = 'read-reference-image';
|
136
|
|
- this._referenceKeypoints = [];
|
137
|
|
- this._iterations = 0;
|
138
|
|
-
|
139
|
|
- // setup the pipeline
|
140
|
|
- imagePortalSource.source = snapshot;
|
141
|
|
- muxOfReferenceKeypoints.port = 0;
|
142
|
|
- muxOfBufferOfReferenceKeypoints.port = 0;
|
143
|
|
- bufferOfReferenceKeypoints.frozen = false;
|
144
|
|
- }
|
145
|
|
-
|
146
|
|
- /**
|
147
|
|
- * Called just before the GPU processing
|
148
|
|
- * @returns promise
|
149
|
|
- */
|
150
|
|
- protected _beforeUpdate(): SpeedyPromise<void>
|
151
|
|
- {
|
152
|
|
- const referenceImage = this._referenceImage as ReferenceImage;
|
153
|
|
- const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
|
154
|
|
- const sourceMux = this._pipeline.node('sourceMux') as SpeedyPipelineNodeImageMultiplexer;
|
155
|
|
- const imageRectifier = this._pipeline.node('imageRectifier') as SpeedyPipelineNodePerspectiveWarp;
|
156
|
|
- const keypointRectifier = this._pipeline.node('keypointRectifier') as SpeedyPipelineNodeKeypointTransformer;
|
157
|
|
- const borderClipper = this._pipeline.node('borderClipper') as SpeedyPipelineNodeKeypointBorderClipper;
|
158
|
|
- const screenSize = this.screenSize;
|
159
|
|
-
|
160
|
|
- // set the source media to the reference image we're going to track
|
161
|
|
- const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
|
162
|
|
- source.media = targetMedia;
|
163
|
|
-
|
164
|
|
- // setup the source multiplexer
|
165
|
|
- if(this._step == 'read-reference-image')
|
166
|
|
- sourceMux.port = PORT_REFERENCE_IMAGE;
|
167
|
|
- else
|
168
|
|
- sourceMux.port = PORT_CAMERA_IMAGE;
|
169
|
|
-
|
170
|
|
- // clip keypoints from the borders of the target image
|
171
|
|
- borderClipper.imageSize = screenSize;
|
172
|
|
- borderClipper.borderSize = Speedy.Vector2(
|
173
|
|
- screenSize.width * TRACK_CLIPPING_BORDER,
|
174
|
|
- screenSize.height * TRACK_CLIPPING_BORDER
|
175
|
|
- );
|
176
|
|
-
|
177
|
|
- // rectify the image
|
178
|
|
- const rectify = (this._step == 'read-reference-image') ?
|
179
|
|
- this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
|
180
|
|
- this._findRectificationMatrixOfCameraImage(this._homography, DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
|
181
|
|
-
|
182
|
|
- return rectify.then(rectificationMatrix => {
|
183
|
|
- imageRectifier.transform = rectificationMatrix;
|
184
|
|
- });
|
185
|
|
- }
|
186
|
|
-
|
187
|
|
- /**
|
188
|
|
- * Post processing that takes place just after the GPU processing
|
189
|
|
- * @param result pipeline results
|
190
|
|
- * @returns state output
|
191
|
|
- */
|
192
|
|
- protected _afterUpdate(result: SpeedyPipelineOutput): SpeedyPromise<ImageTrackerStateOutput>
|
193
|
|
- {
|
194
|
|
- const referenceImage = this._referenceImage as ReferenceImage;
|
195
|
|
- const imagePortalSink = this._pipeline.node('imagePortal') as SpeedyPipelineNodeImagePortalSink;
|
196
|
|
- const keypointPortalSink = this._pipeline.node('keypointPortalSink') as SpeedyPipelineNodeKeypointPortalSink;
|
197
|
|
- const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints') as SpeedyPipelineNodeKeypointMultiplexer;
|
198
|
|
- const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints') as SpeedyPipelineNodeKeypointMultiplexer;
|
199
|
|
- const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints') as SpeedyPipelineNodeKeypointBuffer;
|
200
|
|
- const keypoints = result.keypoints as SpeedyMatchedKeypoint[];
|
201
|
|
- const image = result.image as SpeedyMedia | undefined;
|
202
|
|
-
|
203
|
|
- // tracker output
|
204
|
|
- const trackerOutput: ImageTrackerOutput = {
|
205
|
|
- keypoints: image !== undefined ? keypoints : undefined, // debug only
|
206
|
|
- image: image,
|
207
|
|
- screenSize: this.screenSize,
|
208
|
|
- };
|
209
|
|
-
|
210
|
|
- // decide what to do next
|
211
|
|
- switch(this._step) {
|
212
|
|
- case 'read-reference-image': {
|
213
|
|
- // enable matching
|
214
|
|
- muxOfReferenceKeypoints.port = 1;
|
215
|
|
-
|
216
|
|
- // store reference keypoints
|
217
|
|
- this._referenceKeypoints = keypoints;
|
218
|
|
-
|
219
|
|
- // next step
|
220
|
|
- this._step = 'warp-camera-image';
|
221
|
|
- return Speedy.Promise.resolve({
|
222
|
|
- nextState: 'pre-tracking',
|
223
|
|
- trackerOutput: trackerOutput,
|
224
|
|
- });
|
225
|
|
- }
|
226
|
|
-
|
227
|
|
- case 'warp-camera-image': {
|
228
|
|
- // freeze reference keypoints
|
229
|
|
- bufferOfReferenceKeypoints.frozen = true;
|
230
|
|
- muxOfBufferOfReferenceKeypoints.port = 1;
|
231
|
|
-
|
232
|
|
- // refine warp?
|
233
|
|
- if(++this._iterations < TRACK_REFINEMENT_ITERATIONS)
|
234
|
|
- this._step = 'warp-camera-image';
|
235
|
|
- else
|
236
|
|
- this._step = 'train-camera-image';
|
237
|
|
-
|
238
|
|
- // warp image & go to next step
|
239
|
|
- return this._findWarp(keypoints, this._referenceKeypoints).then(warp =>
|
240
|
|
- this._homography.setTo(this._homography.times(warp))
|
241
|
|
- ).then(_ => ({
|
242
|
|
- nextState: 'pre-tracking',
|
243
|
|
- trackerOutput: trackerOutput,
|
244
|
|
- })).catch(err => {
|
245
|
|
- Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
|
246
|
|
- return {
|
247
|
|
- nextState: 'scanning',
|
248
|
|
- trackerOutput: trackerOutput,
|
249
|
|
- };
|
250
|
|
- });
|
251
|
|
- }
|
252
|
|
-
|
253
|
|
- case 'train-camera-image': {
|
254
|
|
- // log
|
255
|
|
- Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
|
256
|
|
-
|
257
|
|
- // change the coordinates
|
258
|
|
- return this._changeSpace(this._homography, this.screenSize).then(homography => {
|
259
|
|
-
|
260
|
|
- // we're ready to track the target!
|
261
|
|
- return Speedy.Promise.resolve({
|
262
|
|
- //nextState: 'pre-tracking',
|
263
|
|
- nextState: 'tracking',
|
264
|
|
- trackerOutput: trackerOutput,
|
265
|
|
- nextStateSettings: {
|
266
|
|
- homography: homography,
|
267
|
|
- referenceImage: referenceImage,
|
268
|
|
- templateKeypoints: keypoints,
|
269
|
|
- keypointPortalSink: keypointPortalSink,
|
270
|
|
- imagePortalSink: imagePortalSink,
|
271
|
|
- screenSize: this.screenSize,
|
272
|
|
- },
|
273
|
|
- });
|
274
|
|
-
|
275
|
|
- });
|
276
|
|
- }
|
277
|
|
- }
|
278
|
|
- }
|
279
|
|
-
|
280
|
|
- /**
|
281
|
|
- * Find an adjustment warp between the camera image and the reference image
|
282
|
|
- * @param dstKeypoints destination
|
283
|
|
- * @param srcKeypoints source
|
284
|
|
- * @returns a promise that resolves to a 3x3 homography
|
285
|
|
- */
|
286
|
|
- private _findWarp(dstKeypoints: SpeedyMatchedKeypoint[], srcKeypoints: SpeedyKeypoint[]): SpeedyPromise<SpeedyMatrix>
|
287
|
|
- {
|
288
|
|
- //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
|
289
|
|
- const srcCoords: number[] = [];
|
290
|
|
- const dstCoords: number[] = [];
|
291
|
|
-
|
292
|
|
- // find matching coordinates of the keypoints
|
293
|
|
- for(let i = 0; i < dstKeypoints.length; i++) {
|
294
|
|
- const dstKeypoint = dstKeypoints[i];
|
295
|
|
- if(dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
|
296
|
|
- const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
|
297
|
|
-
|
298
|
|
- // the best match should be "much better" than the second best match,
|
299
|
|
- // which means that they are "distinct enough"
|
300
|
|
- if(d1 <= TRACK_MATCH_RATIO * d2) {
|
301
|
|
- const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
|
302
|
|
- srcCoords.push(srcKeypoint.x);
|
303
|
|
- srcCoords.push(srcKeypoint.y);
|
304
|
|
- dstCoords.push(dstKeypoint.x);
|
305
|
|
- dstCoords.push(dstKeypoint.y);
|
306
|
|
- }
|
307
|
|
- }
|
308
|
|
- }
|
309
|
|
-
|
310
|
|
- // too few points?
|
311
|
|
- const n = srcCoords.length / 2;
|
312
|
|
- if(n < 4) {
|
313
|
|
- return Speedy.Promise.reject(
|
314
|
|
- new TrackingError('Too few points to compute a warp')
|
315
|
|
- );
|
316
|
|
- }
|
317
|
|
-
|
318
|
|
- // compute warp
|
319
|
|
- const model = Speedy.Matrix.Eye(3);
|
320
|
|
- return this._findKeypointWarp().then(transform =>
|
321
|
|
-
|
322
|
|
- // rectify keypoints
|
323
|
|
- Speedy.Matrix.applyAffineTransform(
|
324
|
|
- Speedy.Matrix.Zeros(2, 2*n),
|
325
|
|
- Speedy.Matrix(2, 2*n, srcCoords.concat(dstCoords)),
|
326
|
|
- transform.block(0,1,0,2)
|
327
|
|
- )
|
328
|
|
-
|
329
|
|
- ).then(points =>
|
330
|
|
-
|
331
|
|
- // find warp
|
332
|
|
- Speedy.Matrix.findAffineTransform(
|
333
|
|
- model.block(0,1,0,2),
|
334
|
|
- points.block(0,1,0,n-1),
|
335
|
|
- points.block(0,1,n,2*n-1), {
|
336
|
|
- method: 'pransac',
|
337
|
|
- reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
|
338
|
|
- numberOfHypotheses: 512*4,
|
339
|
|
- bundleSize: 128,
|
340
|
|
- })
|
341
|
|
-
|
342
|
|
- ).then(_ => {
|
343
|
|
-
|
344
|
|
- // validate the model
|
345
|
|
- const a00 = model.at(0,0);
|
346
|
|
- if(Number.isNaN(a00))
|
347
|
|
- throw new TrackingError(`Can't compute warp: bad keypoints`);
|
348
|
|
-
|
349
|
|
- // done!
|
350
|
|
- return model;
|
351
|
|
-
|
352
|
|
- });
|
353
|
|
- }
|
354
|
|
-
|
355
|
|
- /**
|
356
|
|
- * Find a warp to be applied to the keypoints
|
357
|
|
- * @returns affine transform
|
358
|
|
- */
|
359
|
|
- private _findKeypointWarp(): SpeedyPromise<SpeedyMatrix>
|
360
|
|
- {
|
361
|
|
- const referenceImage = this._referenceImage as ReferenceImage;
|
362
|
|
- const media = this._imageTracker.database._findMedia(referenceImage.name);
|
363
|
|
- const screenSize = this.screenSize;
|
364
|
|
-
|
365
|
|
- // no rotation is needed
|
366
|
|
- if(!this._mustRotateWarpedImage(media, screenSize))
|
367
|
|
- return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
|
368
|
|
-
|
369
|
|
- // rotate by 90 degrees clockwise around the pivot
|
370
|
|
- const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
|
371
|
|
- return Speedy.Promise.resolve(Speedy.Matrix(3, 3, [
|
372
|
|
- 0, 1, 0,
|
373
|
|
- -1, 0, 0,
|
374
|
|
- py+px, py-px, 1,
|
375
|
|
- ]));
|
376
|
|
- }
|
377
|
|
-
|
378
|
|
- /**
|
379
|
|
- * Change the space of the homography in order to improve tracking quality
|
380
|
|
- * @param homography mapping coordinates from normalized target space to AR screen space
|
381
|
|
- * @param screenSize AR screen size
|
382
|
|
- * @returns homography mapping coordinates from AR screen space to AR screen space
|
383
|
|
- */
|
384
|
|
- private _changeSpace(homography: SpeedyMatrix, screenSize: SpeedySize): SpeedyPromise<SpeedyMatrix>
|
385
|
|
- {
|
386
|
|
- const sw = screenSize.width, sh = screenSize.height;
|
387
|
|
- const screen = Speedy.Matrix(2, 4, [ 0, 0, sw, 0, sw, sh, 0, sh ]);
|
388
|
|
-
|
389
|
|
- const mat = Speedy.Matrix.Zeros(3);
|
390
|
|
- return this._findPolylineCoordinates(homography, DEFAULT_TARGET_SPACE_SIZE).then(polyline =>
|
391
|
|
- Speedy.Matrix.perspective(mat, screen, polyline)
|
392
|
|
- );
|
393
|
|
- }
|
394
|
|
-
|
395
|
|
- /**
|
396
|
|
- * Create & setup the pipeline
|
397
|
|
- * @returns pipeline
|
398
|
|
- */
|
399
|
|
- protected _createPipeline(): SpeedyPipeline
|
400
|
|
- {
|
401
|
|
- const pipeline = Speedy.Pipeline();
|
402
|
|
-
|
403
|
|
- const source = Speedy.Image.Source('source');
|
404
|
|
- const imagePortalSource = Speedy.Image.Portal.Source('imagePortalSource');
|
405
|
|
- const sourceMux = Speedy.Image.Multiplexer('sourceMux');
|
406
|
|
- const screen = Speedy.Transform.Resize('screen');
|
407
|
|
- const greyscale = Speedy.Filter.Greyscale();
|
408
|
|
- const imageRectifier = Speedy.Transform.PerspectiveWarp('imageRectifier');
|
409
|
|
- const nightvision = Speedy.Filter.Nightvision();
|
410
|
|
- const nightvisionMux = Speedy.Image.Multiplexer();
|
411
|
|
- const detector = Speedy.Keypoint.Detector.Harris();
|
412
|
|
- const descriptor = Speedy.Keypoint.Descriptor.ORB();
|
413
|
|
- const blur = Speedy.Filter.GaussianBlur();
|
414
|
|
- const clipper = Speedy.Keypoint.Clipper();
|
415
|
|
- const borderClipper = Speedy.Keypoint.BorderClipper('borderClipper');
|
416
|
|
- const denoiser = Speedy.Filter.GaussianBlur();
|
417
|
|
- const subpixel = Speedy.Keypoint.SubpixelRefiner();
|
418
|
|
- const matcher = Speedy.Keypoint.Matcher.BFKNN();
|
419
|
|
- const keypointRectifier = Speedy.Keypoint.Transformer('keypointRectifier');
|
420
|
|
- const keypointPortalSink = Speedy.Keypoint.Portal.Sink('keypointPortalSink');
|
421
|
|
- const keypointPortalSource = Speedy.Keypoint.Portal.Source('keypointPortalSource');
|
422
|
|
- const muxOfReferenceKeypoints = Speedy.Keypoint.Multiplexer('muxOfReferenceKeypoints');
|
423
|
|
- const bufferOfReferenceKeypoints = Speedy.Keypoint.Buffer('bufferOfReferenceKeypoints');
|
424
|
|
- const muxOfBufferOfReferenceKeypoints = Speedy.Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
|
425
|
|
- const keypointSink = Speedy.Keypoint.SinkOfMatchedKeypoints('keypoints');
|
426
|
|
- const imageSink = Speedy.Image.Sink('image');
|
427
|
|
-
|
428
|
|
- source.media = null;
|
429
|
|
- screen.size = Speedy.Size(0,0);
|
430
|
|
- imagePortalSource.source = null;
|
431
|
|
- imageRectifier.transform = Speedy.Matrix.Eye(3);
|
432
|
|
- sourceMux.port = PORT_REFERENCE_IMAGE;
|
433
|
|
- nightvision.gain = NIGHTVISION_GAIN;
|
434
|
|
- nightvision.offset = NIGHTVISION_OFFSET;
|
435
|
|
- nightvision.decay = NIGHTVISION_DECAY;
|
436
|
|
- nightvision.quality = NIGHTVISION_QUALITY;
|
437
|
|
- nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
|
438
|
|
- blur.kernelSize = Speedy.Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
|
439
|
|
- blur.sigma = Speedy.Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
|
440
|
|
- denoiser.kernelSize = Speedy.Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
|
441
|
|
- denoiser.sigma = Speedy.Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
|
442
|
|
- detector.quality = TRACK_HARRIS_QUALITY;
|
443
|
|
- detector.capacity = TRACK_DETECTOR_CAPACITY;
|
444
|
|
- subpixel.method = SUBPIXEL_METHOD;
|
445
|
|
- clipper.size = TRACK_MAX_KEYPOINTS;
|
446
|
|
- borderClipper.imageSize = screen.size;
|
447
|
|
- borderClipper.borderSize = Speedy.Vector2(0,0);
|
448
|
|
- matcher.k = 2;
|
449
|
|
- keypointRectifier.transform = Speedy.Matrix.Eye(3);
|
450
|
|
- keypointPortalSource.source = keypointPortalSink;
|
451
|
|
- muxOfReferenceKeypoints.port = 0;
|
452
|
|
- muxOfBufferOfReferenceKeypoints.port = 0;
|
453
|
|
- bufferOfReferenceKeypoints.frozen = false;
|
454
|
|
- keypointSink.turbo = false;
|
455
|
|
-
|
456
|
|
- // prepare input
|
457
|
|
- source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
|
458
|
|
- imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
|
459
|
|
- sourceMux.output().connectTo(screen.input());
|
460
|
|
- screen.output().connectTo(greyscale.input());
|
461
|
|
-
|
462
|
|
- // preprocess images
|
463
|
|
- greyscale.output().connectTo(imageRectifier.input());
|
464
|
|
- imageRectifier.output().connectTo(nightvisionMux.input('in0'));
|
465
|
|
- imageRectifier.output().connectTo(nightvision.input());
|
466
|
|
- nightvision.output().connectTo(nightvisionMux.input('in1'));
|
467
|
|
- nightvisionMux.output().connectTo(blur.input());
|
468
|
|
-
|
469
|
|
- // keypoint detection & clipping
|
470
|
|
- nightvisionMux.output().connectTo(detector.input());
|
471
|
|
- detector.output().connectTo(borderClipper.input());
|
472
|
|
- borderClipper.output().connectTo(clipper.input());
|
473
|
|
-
|
474
|
|
- // keypoint refinement
|
475
|
|
- imageRectifier.output().connectTo(denoiser.input());
|
476
|
|
- denoiser.output().connectTo(subpixel.input('image'));
|
477
|
|
- clipper.output().connectTo(subpixel.input('keypoints'));
|
478
|
|
-
|
479
|
|
- // keypoint description
|
480
|
|
- blur.output().connectTo(descriptor.input('image'));
|
481
|
|
- subpixel.output().connectTo(descriptor.input('keypoints'));
|
482
|
|
-
|
483
|
|
- // keypoint matching
|
484
|
|
- descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
|
485
|
|
- muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
|
486
|
|
- muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
|
487
|
|
- descriptor.output().connectTo(matcher.input('keypoints'));
|
488
|
|
-
|
489
|
|
- // store reference keypoints
|
490
|
|
- keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
|
491
|
|
- bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
|
492
|
|
- keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
|
493
|
|
-
|
494
|
|
- // portals
|
495
|
|
- descriptor.output().connectTo(keypointPortalSink.input());
|
496
|
|
-
|
497
|
|
- // prepare output
|
498
|
|
- descriptor.output().connectTo(keypointRectifier.input());
|
499
|
|
- keypointRectifier.output().connectTo(keypointSink.input());
|
500
|
|
- matcher.output().connectTo(keypointSink.input('matches'));
|
501
|
|
- //imageRectifier.output().connectTo(imageSink.input());
|
502
|
|
-
|
503
|
|
- // done!
|
504
|
|
- pipeline.init(
|
505
|
|
- source, imagePortalSource, sourceMux, screen,
|
506
|
|
- greyscale, imageRectifier, nightvision, nightvisionMux, blur,
|
507
|
|
- detector, subpixel, clipper, borderClipper, denoiser, descriptor,
|
508
|
|
- keypointPortalSource, muxOfReferenceKeypoints, matcher,
|
509
|
|
- bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints,
|
510
|
|
- keypointRectifier, keypointSink,
|
511
|
|
- keypointPortalSink,
|
512
|
|
- //imageSink
|
513
|
|
- );
|
514
|
|
-
|
515
|
|
- return pipeline;
|
516
|
|
- }
|
517
|
|
-}
|