You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

demo.js 8.4KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343
  1. /**
  2. * Augmented Reality demo using the three.js plugin for encantar.js
  3. * @author Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart/encantar-js)
  4. */
  5. (function() {
  6. /**
  7. * Utilities for the Demo scene
  8. */
  9. class DemoUtils
  10. {
  11. async loadGLTF(filepath, yAxisIsUp = true)
  12. {
  13. const loader = new THREE.GLTFLoader();
  14. const gltf = await loader.loadAsync(filepath);
  15. // glTF defines +y as up. We expect +z to be up.
  16. if(yAxisIsUp)
  17. gltf.scene.rotateX(Math.PI / 2);
  18. return gltf;
  19. }
  20. createAnimationAction(gltf, name = null, loop = THREE.LoopRepeat)
  21. {
  22. const mixer = new THREE.AnimationMixer(gltf.scene);
  23. const clips = gltf.animations;
  24. if(clips.length == 0)
  25. throw new Error('No animation clips');
  26. if(name === null) {
  27. const sortedNames = clips.map(clip => clip.name).sort();
  28. name = sortedNames[0];
  29. }
  30. const clip = THREE.AnimationClip.findByName(clips, name);
  31. const action = mixer.clipAction(clip);
  32. action.loop = loop;
  33. return action;
  34. }
  35. createImagePlane(imagepath)
  36. {
  37. const texture = new THREE.TextureLoader().load(imagepath);
  38. const geometry = new THREE.PlaneGeometry(1, 1);
  39. const material = new THREE.MeshBasicMaterial({
  40. map: texture,
  41. side: THREE.DoubleSide,
  42. });
  43. const mesh = new THREE.Mesh(geometry, material);
  44. return mesh;
  45. }
  46. switchToFrontView(ar)
  47. {
  48. // top view is the default
  49. ar.root.rotation.set(-Math.PI / 2, 0, 0);
  50. }
  51. referenceImageName(ar)
  52. {
  53. if(ar.frame === null)
  54. return null;
  55. for(const result of ar.frame.results) {
  56. if(result.tracker.type == 'image-tracker') {
  57. if(result.trackables.length > 0) {
  58. const trackable = result.trackables[0];
  59. return trackable.referenceImage.name;
  60. }
  61. }
  62. }
  63. return null;
  64. }
  65. }
  66. /**
  67. * Demo scene
  68. */
  69. class DemoScene extends ARScene
  70. {
  71. /**
  72. * Constructor
  73. */
  74. constructor()
  75. {
  76. super();
  77. this._utils = new DemoUtils();
  78. this._objects = { };
  79. }
  80. /**
  81. * Start the AR session
  82. * @returns {Promise<Session>}
  83. */
  84. async startSession()
  85. {
  86. if(!AR.isSupported()) {
  87. throw new Error(
  88. 'This device is not compatible with this AR experience.\n\n' +
  89. 'User agent: ' + navigator.userAgent
  90. );
  91. }
  92. const tracker = AR.Tracker.ImageTracker();
  93. await tracker.database.add([
  94. {
  95. name: 'mage',
  96. image: document.getElementById('mage')
  97. },
  98. {
  99. name: 'cat',
  100. image: document.getElementById('cat')
  101. }
  102. ]);
  103. const viewport = AR.Viewport({
  104. container: document.getElementById('ar-viewport'),
  105. hudContainer: document.getElementById('ar-hud')
  106. });
  107. const video = document.getElementById('my-video');
  108. const useWebcam = (video === null);
  109. const source = useWebcam ? AR.Source.Camera() : AR.Source.Video(video);
  110. const session = await AR.startSession({
  111. mode: 'immersive',
  112. viewport: viewport,
  113. trackers: [ tracker ],
  114. sources: [ source ],
  115. stats: true,
  116. gizmos: true,
  117. });
  118. const scan = document.getElementById('scan');
  119. tracker.addEventListener('targetfound', event => {
  120. session.gizmos.visible = false;
  121. if(scan)
  122. scan.hidden = true;
  123. this._onTargetFound(event.referenceImage);
  124. });
  125. tracker.addEventListener('targetlost', event => {
  126. session.gizmos.visible = true;
  127. if(scan)
  128. scan.hidden = false;
  129. this._onTargetLost(event.referenceImage);
  130. });
  131. return session;
  132. }
  133. /**
  134. * Initialize the augmented scene
  135. * @param {ARSystem} ar
  136. * @returns {Promise<void>}
  137. */
  138. async init(ar)
  139. {
  140. // Change the point of view. All virtual objects are descendants of
  141. // ar.root, a node that is automatically aligned to the physical scene.
  142. // Adjusting ar.root will adjust all virtual objects.
  143. this._utils.switchToFrontView(ar);
  144. ar.root.position.set(0, -0.5, 0);
  145. // initialize objects
  146. this._initLight(ar);
  147. this._initText(ar);
  148. this._initMagicCircle(ar);
  149. await Promise.all([
  150. this._initMage(ar),
  151. this._initCat(ar),
  152. ]);
  153. }
  154. /**
  155. * Update / animate the augmented scene
  156. * @param {ARSystem} ar
  157. * @returns {void}
  158. */
  159. update(ar)
  160. {
  161. const delta = ar.session.time.delta; // given in seconds
  162. // animate the objects of the scene
  163. this._animateMagicCircle(delta);
  164. this._animateMage(delta);
  165. this._animateCat(delta);
  166. }
  167. // ------------------------------------------------------------------------
  168. _initLight(ar)
  169. {
  170. const ambientLight = new THREE.AmbientLight(0xffffff);
  171. ambientLight.intensity = 1.5;
  172. ar.scene.add(ambientLight);
  173. }
  174. _initMagicCircle(ar)
  175. {
  176. // load the object
  177. const magicCircle = this._utils.createImagePlane('../assets/magic-circle.png');
  178. magicCircle.material.transparent = true;
  179. magicCircle.material.opacity = 1;
  180. magicCircle.scale.set(4, 4, 1);
  181. // add the object to the scene
  182. ar.root.add(magicCircle);
  183. // save a reference
  184. this._objects.magicCircle = magicCircle;
  185. }
  186. _initText(ar)
  187. {
  188. const text = this._utils.createImagePlane('../assets/it-works.png');
  189. text.material.transparent = true;
  190. text.material.opacity = 1;
  191. text.position.set(0, -0.5, 2);
  192. text.scale.set(3, 1.5, 1);
  193. text.rotateX(Math.PI / 2);
  194. ar.root.add(text);
  195. this._objects.text = text;
  196. }
  197. async _initMage(ar)
  198. {
  199. // load the mage
  200. const gltf = await this._utils.loadGLTF('../assets/mage.glb');
  201. const mage = gltf.scene;
  202. mage.scale.set(0.7, 0.7, 0.7);
  203. // prepare the animation of the mage
  204. const mageAction = this._utils.createAnimationAction(gltf, 'Idle');
  205. mageAction.play();
  206. // add the mage to the scene
  207. ar.root.add(mage);
  208. // save references
  209. this._objects.mage = mage;
  210. this._objects.mageAction = mageAction;
  211. }
  212. async _initCat(ar)
  213. {
  214. const gltf = await this._utils.loadGLTF('../assets/cat.glb');
  215. const cat = gltf.scene;
  216. cat.scale.set(0.7, 0.7, 0.7);
  217. const catAction = this._utils.createAnimationAction(gltf, 'Cheer');
  218. catAction.play();
  219. ar.root.add(cat);
  220. this._objects.cat = cat;
  221. this._objects.catAction = catAction;
  222. }
  223. _animate(action, delta)
  224. {
  225. const mixer = action.getMixer();
  226. mixer.update(delta);
  227. }
  228. _animateMage(delta)
  229. {
  230. this._animate(this._objects.mageAction, delta);
  231. }
  232. _animateCat(delta)
  233. {
  234. this._animate(this._objects.catAction, delta);
  235. }
  236. _animateMagicCircle(delta)
  237. {
  238. const TWO_PI = 2.0 * Math.PI;
  239. const ROTATIONS_PER_SECOND = 1.0 / 8.0;
  240. this._objects.magicCircle.rotateZ(-TWO_PI * ROTATIONS_PER_SECOND * delta);
  241. }
  242. _onTargetFound(referenceImage)
  243. {
  244. // change the scene based on the scanned image
  245. switch(referenceImage.name) {
  246. case 'mage':
  247. this._objects.mage.visible = true;
  248. this._objects.cat.visible = false;
  249. this._objects.text.visible = false;
  250. this._objects.magicCircle.material.color.set(0xbeefff);
  251. break;
  252. case 'cat':
  253. this._objects.mage.visible = false;
  254. this._objects.cat.visible = true;
  255. this._objects.text.visible = true;
  256. this._objects.magicCircle.material.color.set(0xffffaa);
  257. break;
  258. }
  259. }
  260. _onTargetLost(referenceImage)
  261. {
  262. }
  263. }
  264. /**
  265. * Enchant the scene
  266. * @returns {void}
  267. */
  268. function main()
  269. {
  270. const scene = new DemoScene();
  271. if(typeof encantar === 'undefined')
  272. throw new Error(`Can't find the three.js plugin for encantar.js`);
  273. encantar(scene).catch(error => {
  274. alert(error.message);
  275. });
  276. }
  277. document.addEventListener('DOMContentLoaded', main);
  278. })();