glCanvas.getContext( 'webgl', { xrCompatible: true } ); xrSession.baseLayer = new XRWebGLLayer( xrSession, gl ); WebGL context to be used as a source for XR imagery
xrSession.requestAnimationFrame( onDrawFrame ); function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session; // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height );
xrSession.requestAnimationFrame( onDrawFrame ); function onDrawFrame( timestamp, xrFrame ) { const session = xrFrame.session; // xrSession === xrFrame.session const pose = xrFrame.getDevicePose( frameOfRef ); session.requestAnimationFrame( onDrawFrame ); gl.bindFramebuffer( gl.FRAMEBUFFER, session.baseLayer.framebuffer ); if ( ! pose ) return; // if the session is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height );
is for both right and left eyes, length of views would be 2. // if not, length is 1, xrFrame.views.forEach( ( view ) => { const viewport = session.baseLayer.getViewport( view ); renderer.setSize( viewport.width, viewport.height ); camera.projectionMatrix.fromArray( view.projectionMatrix ); const viewMatrix = new THREE.Matrix4().fromArray( pose.getViewMatrix( view ) ); camera.matrix.getInverse( viewMatrix ); camera.updateMatrixWorld( true ); renderer.clearDepth(); renderer.render( scene, camera ); } ); } } } );
); // use Raycaster to make ray origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { Add a click action
); // use Raycaster to make ray origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) {
origin and direction const raycaster = new THREE.Raycaster(); // onClick must be async, since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh(
since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() );
since hitTest will be done with await async function onClick() { const x = 0; const y = 0; raycaster.setFromCamera( { x, y }, camera ); const origin = new Float32Array( raycaster.ray.origin.toArray() ); const direction = new Float32Array( raycaster.ray.direction.toArray() ); const hits = await xrSession.requestHitTest( origin, direction, frameOfRef ); if ( hits.length ) { const hit = hits[ 0 ]; const hitMatrix = new THREE.Matrix4().fromArray( hit.hitMatrix ); const box = new THREE.Mesh( new THREE.BoxBufferGeometry( .2, .2, .2 ), new THREE.MeshNormalMaterial() );
libraries support glTF loading (Such as three.js, BabylonJS, Cesium) • Microsoft Paint3D, Office and others use glb as 3D model format • Adobe Animate has glTF exporter • Facebook's 3D posts use glb • VRM: glTF extended format for humanoid avatar (For Virtual-YouTuber, VRChat and others) glTF of the present
in a VR environment • Trusted environment Motion sickness by low FPS and pose track error • Fingerprinting Identify users room shape or even face shape with depth