WebXR: Beyond
 WebGL

3c557c6103a4addc52f7b76ffd0a0f27?s=47 yomotsu
November 25, 2018
860

WebXR: Beyond
 WebGL

3c557c6103a4addc52f7b76ffd0a0f27?s=128

yomotsu

November 25, 2018
Tweet

Transcript

  1. 6.

    • The API for both AR and VR
 (WebVR API

    will be replaced by WebXR Device API) • Close relationship
 with Khronos’ OpenXR WebXR Device API
  2. 7.

    • Currently, only available in
 Chrome Canary with flags •

    Only works on either
 https or localhost As of Nov. 2018
  3. 10.
  4. 11.
  5. 19.

    XRSession const glCanvas = document.createElement( 'canvas' ); const gl =

    glCanvas.getContext( 'webgl', { xrCompatible: true } ); xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); WebGL context to be used
 as a source for XR imagery
  6. 22.
  7. 26.
  8. 28.

    const width = window.innerWidth; const height = window.innerHeight; navigator.xr.requestDevice().then( (

    device ) => { const outputCanvas = document.getElementById( 'xrCanvas' ); outputCanvas.width = width; outputCanvas.height = height; const xrContext = outputCanvas.getContext( 'xrpresent' ); // session request must be done in user action such as click window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height );
  9. 29.

    const width = window.innerWidth; const height = window.innerHeight; navigator.xr.requestDevice().then( (

    device ) => { const outputCanvas = document.getElementById( 'xrCanvas' ); outputCanvas.width = width; outputCanvas.height = height; const xrContext = outputCanvas.getContext( 'xrpresent' ); // session request must be done in user action such as click window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height );
  10. 30.

    const width = window.innerWidth; const height = window.innerHeight; navigator.xr.requestDevice().then( (

    device ) => { const outputCanvas = document.getElementById( 'xrCanvas' ); outputCanvas.width = width; outputCanvas.height = height; const xrContext = outputCanvas.getContext( 'xrpresent' ); // session request must be done in user action such as click window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height );
  11. 31.

    window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession

    = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height ); // bind gl context to XR session const gl = renderer.getContext(); gl.setCompatibleXRDevice( xrSession.device ); xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); const scene = new THREE.Scene(); const camera = new THREE.PerspectiveCamera(); camera.matrixAutoUpdate = false; const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial()
  12. 32.

    window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession

    = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height ); // bind gl context to XR session const gl = renderer.getContext(); gl.setCompatibleXRDevice( xrSession.device ); xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); const scene = new THREE.Scene(); const camera = new THREE.PerspectiveCamera(); camera.matrixAutoUpdate = false; const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial()
  13. 33.

    window.addEventListener( 'click', onEnterAR ); async function onEnterAR() { const xrSession

    = await device.requestSession( { outputContext: xrContext, environmentIntegration: true, } ); const renderer = new THREE.WebGLRenderer(); renderer.autoClear = false; renderer.setSize( width, height ); // bind gl context to XR session const gl = renderer.getContext(); gl.setCompatibleXRDevice( xrSession.device ); xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); const scene = new THREE.Scene(); const camera = new THREE.PerspectiveCamera(); camera.matrixAutoUpdate = false; const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial()
  14. 34.

    xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); const scene =

    new THREE.Scene(); const camera = new THREE.PerspectiveCamera(); camera.matrixAutoUpdate = false; const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() ); scene.add( box ); const frameOfRef = await xrSession.requestFrameOfReference( 'eye-level' ); xrSession.requestAnimationFrame( onDrawFrame ); function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session; // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer );
  15. 35.

    scene.add( box ); const frameOfRef = await xrSession.requestFrameOfReference( 'eye-level' );

    xrSession.requestAnimationFrame( onDrawFrame ); function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session; // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height );
  16. 36.

    scene.add( box ); const frameOfRef = await xrSession.requestFrameOfReference( 'eye-level' );

    xrSession.requestAnimationFrame( onDrawFrame ); function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session; // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height );
  17. 37.

    function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session;

    // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height ); camera.projectionMatrix.fromArray( view.projectionMatrix ); const viewMatrix = new THREE.Matrix4().fromArray( pose.getViewMatrix( view ) ); camera.matrix.getInverse( viewMatrix ); camera.updateMatrixWorld( true ); renderer.clearDepth(); renderer.render( scene, camera );
  18. 38.

    function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session;

    // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height ); camera.projectionMatrix.fromArray( view.projectionMatrix ); const viewMatrix = new THREE.Matrix4().fromArray( pose.getViewMatrix( view ) ); camera.matrix.getInverse( viewMatrix ); camera.updateMatrixWorld( true ); renderer.clearDepth(); renderer.render( scene, camera );
  19. 39.

    if ( ! pose ) return; // if the session

    is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height ); camera.projectionMatrix.fromArray( view.projectionMatrix ); const viewMatrix = new THREE.Matrix4().fromArray( pose.getViewMatrix( view ) ); camera.matrix.getInverse( viewMatrix ); camera.updateMatrixWorld( true ); renderer.clearDepth(); renderer.render( scene, camera ); } ); } } } );
  20. 40.
  21. 42.
  22. 43.
  23. 44.

    const width = window.innerWidth; const height = window.innerHeight; const startButton

    = document.getElementById( 'startButton' ); navigator.xr.requestDevice().then( ( device ) => { const outputCanvas = document.getElementById( 'xrCanvas' ); outputCanvas.width = width; outputCanvas.height = height; const xrContext = outputCanvas.getContext( 'xrpresent' ); // session request must be done in user action such as click startButton.addEventListener( 'click', onEnterAR ); async function onEnterAR() { startButton.style.display = 'none'; const xrSession = await device.requestSession( { The same as previous one…
  24. 45.

    renderer.render( scene, camera ); } ); } window.addEventListener( 'click', onClick

    ); // use Raycaster to make ray origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { Add a click action
  25. 46.

    renderer.render( scene, camera ); } ); } window.addEventListener( 'click', onClick

    ); // use Raycaster to make ray origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) {
  26. 47.

    window.addEventListener( 'click', onClick ); // use Raycaster to make ray

    origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh(
  27. 48.

    const raycaster = new THREE.Raycaster(); // onClick must be async,

    since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() );
  28. 49.

    const raycaster = new THREE.Raycaster(); // onClick must be async,

    since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() );
  29. 50.

    raycaster.setFromCamera( { x, y }, camera ); const origin =

    new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() ); box.position.setFromMatrixPosition( hitMatrix ); scene.add( box ); } } }
  30. 51.

    raycaster.setFromCamera( { x, y }, camera ); const origin =

    new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() ); box.position.setFromMatrixPosition( hitMatrix ); scene.add( box ); } } }
  31. 52.
  32. 55.

    • Upon the release of iOS 12 • Only works

    exclusively on Safari
 (Doesn’t work even in iOS Chrome) • Special HTML syntax • Apple’s propriety AR Quick Look
  33. 57.
  34. 58.
  35. 59.
  36. 60.

    • Stands for “Universal Scene Description” archived with Zip •

    3D model format for AR Quick Look • Created by Apple and Pixar USDZ
  37. 61.

    • With “usdz_converter”
 which is a Xcode command line
 Only

    for macOS • Vectary (Web service)
 Cannot configure details such as size How to prepare USDZ
  38. 62.

    • Up to 10M Polygons • Up to 10 seconds

    for the animation • Up to 2048×2048 texture size Limitations
  39. 63.

    $ xcrun usdz_converter ./my-model.obj my-model.usdz -color_map albedo.jpg -metallic_map metallic.jpg
 -roughness_map

    roughness.jpg -normal_map normal.jpg -ao_map ao.jpg -emissive_map emissive.jpg USDZ Converter Terminal.app
  40. 64.

    $ xcrun usdz_converter ./my-model.obj my-model.usdz -color_map albedo.jpg -metallic_map metallic.jpg
 -roughness_map

    roughness.jpg -normal_map normal.jpg -ao_map ao.jpg -emissive_map emissive.jpg Terminal.app
  41. 65.

    $ xcrun usdz_converter ./my-model.obj my-model.usdz -color_map albedo.jpg -metallic_map metallic.jpg
 -roughness_map

    roughness.jpg -normal_map normal.jpg -ao_map ao.jpg -emissive_map emissive.jpg Terminal.app File input Output name Input option name Option value
  42. 69.

    • Apple's propriety
 (Hopefully it’s a temporary spec until WebXR

    Device API) • Some limitations of USDZ • Just pop and show in AR
 (Cannot be utilized as game and others)
  43. 71.
  44. 72.

    • Stands for GL Transmission Format • Open standard 3D

    model format • JPEG of 3D • Maintained by Khronos What is glTF
  45. 73.

    • JSON format as the container with binary payloads
 or

    packed single binary file called glb • Animation supported • Extensible just like WebGL Spec
 (Like Adobe Fireworks PNG) What is glTF
  46. 74.
  47. 75.

    • Supported by many 3D modeling tools • Several WebGL

    libraries support glTF loading
 (Such as three.js, BabylonJS, Cesium) • Microsoft Paint3D, Office and others use glb as 3D model format • Adobe Animate has glTF exporter • Facebook's 3D posts use glb • VRM: glTF extended format for humanoid avatar
 (For Virtual-YouTuber, VRChat and others) glTF of the present
  48. 79.

    const width = window.innerWidth; const height = window.innerHeight; const scene

    = new THREE.Scene(); const camera = new THREE.PerspectiveCamera( 45, width / height, 0.001, 100 ); camera.position.set( 0, 0, 0.5 ); const renderer = new THREE.WebGLRenderer(); renderer.setSize( width, height ); renderer.gammaInput = true; renderer.gammaOutput = true; document.body.appendChild( renderer.domElement ); scene.add( new THREE.HemisphereLight( 0xffffff , 0x332222 ) );
  49. 80.

    document.body.appendChild( renderer.domElement ); scene.add( new THREE.HemisphereLight( 0xffffff , 0x332222 )

    ); const loader = new THREE.GLTFLoader(); loader.load( './models/barger/barger.gltf', function ( gltf ) { scene.add( gltf.scene ); } ); ( function anim () { requestAnimationFrame( anim );
  50. 81.

    scene.add( gltf.scene ); } ); ( function anim () {

    requestAnimationFrame( anim ); renderer.render( scene, camera ); } )();
  51. 82.
  52. 83.
  53. 85.
  54. 88.

    • Gaze tracking
 Detect virtual keyboard type with gaze direction

    in a VR environment • Trusted environment
 Motion sickness by low FPS and pose track error • Fingerprinting
 Identify users room shape or even face shape with depth
  55. 91.

    • A Web API (in development) • For both VR

    and AR • Just works on WebBrowsers
 no add-ons nor installation required WebXR Device API
  56. 92.
  57. 93.

    • The standard • 3D model format in JSON or

    Binary • Can be seen many places • Loaders are available in JavaScript glTF
  58. 94.

    Web will be connected
 to the real world 94 Source:

    https://www.netflix.com/jp/title/80182418 © ࢜࿠ਖ਼फɾProduction I.G/ߨஊࣾɾ߈֪ػಈୂ੡࡞ҕһձ