Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Add Reality to your App with ARCore

Add Reality to your App with ARCore

AR is a buzzword we hear everywhere. But is it really possible to build enhanced AR experiences quickly? There were quite a few attempts to bring AR to Android, but they were not easy to implement or required special hardware. ARCore solves both of the problems. It is a fast and performant SDK providing great API to build AR focused apps using just a phone’s camera. The slides cover AR main concepts, geometry detection, object rendering and potential problems. Lessons learnt are also shared from the experience of building e-commerce AR experience at Jet.com.

Avatar for Yuliya Kaleda

Yuliya Kaleda

May 06, 2018
Tweet

More Decks by Yuliya Kaleda

Other Decks in Programming

Transcript

  1. Tango several cameras sensors high cost $ few devices (Lenovo

    Phab 2 Pro, Asus Zenfone) D E P R E C A T E D
  2. <manifest xmlns:android="http://schemas.android.com/apk/res/android" android:targetSandboxVersion="2" package=“com.jet.baselib"> <uses-feature android:name="android.hardware.camera.ar" android:required="true"/> <application android:name=“JetApp"> .

    . . <activity android:name=“.activities.SplashScreenActivity" /> <meta-data android:name="com.google.ar.core" android:value=“required" /> </application> </manifest> AR Required
  3. <manifest xmlns:android="http://schemas.android.com/apk/res/android" android:targetSandboxVersion="2" package="com.jet.baselib"> <application android:name=“JetApp"> . . . <activity

    android:name=“.activities.SplashScreenActivity" /> <meta-data android:name="com.google.ar.core" android:value="optional" /> </application> </manifest> AR Optional
  4. private void showArBanner() { ArCoreApk.Availability availability = ArCoreApk.getInstance() .checkAvailability(arCard.getContext()); if

    (availability.isTransient()) { new Handler().postDelayed(() -> showArBanner(), 200); } HelperViews.setVisibleOrGone(arCard, availability.isSupported()); } AR Optional
  5. private void showArBanner() { ArCoreApk.Availability availability = ArCoreApk.getInstance() .checkAvailability(arCard.getContext().getApplicationContext()); if

    (availability.isTransient()) { new Handler().postDelayed(() -> showArBanner(), 200); } HelperViews.setVisibleOrGone(arCard, availability.isSupported()); } AR Optional
  6. private boolean requestedARCoreInstall = true; public void onResume() { try

    { switch (ArCoreApk.getInstance() .requestInstall(activity, requestedARCoreInstall)) { case INSTALLED: //Success break; case INSTALL_REQUESTED: //Request installation requestedARCoreInstall = false; return; } } catch (UnavailableUserDeclinedInstallationException e) { message = "Please install or update ARCore"; } catch (Exception e) { message = "This device does not support AR"; } if (message != null) { showSnackbarMessage(message, true); return; } } ARCore installation
  7. private boolean requestedARCoreInstall = true; public void onResume() { try

    { switch (ArCoreApk.getInstance() .requestInstall(activity, requestedARCoreInstall)) { case INSTALLED: //Success break; case INSTALL_REQUESTED: //Request installation requestedARCoreInstall = false; return; } } catch (UnavailableUserDeclinedInstallationException e) { message = "Please install or update ARCore"; } catch (Exception e) { message = "This device does not support AR"; } if (message != null) { showSnackbarMessage(message, true); return; } } ARCore installation
  8. public class ArCoreRenderer implements GLSurfaceView.Renderer { @Override public void onSurfaceCreated(GL10

    gl, EGLConfig config) { } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { } @Override public void onDrawFrame(GL10 gl) { } } UI Rendering GL thread
  9. public class ArActivity extends JetActivity { @Override protected void onCreate(Bundle

    savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_ar); GLSurfaceView surfaceView = findViewById(R.id.surfaceview); ArCoreRenderer coreRenderer = new ArCoreRenderer(this); surfaceView.setPreserveEGLContextOnPause(true); surfaceView.setEGLContextClientVersion(2); surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); surfaceView.setRenderer(coreRenderer); surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); } } UI Rendering
  10. public void onResume() { if (session == null) { try

    { switch (ArCoreApk.getInstance() .requestInstall(activity, requestedARCoreInstall)) { case INSTALLED: session = new Session(activity); break; case INSTALL_REQUESTED: requestedARCoreInstall = false; return; } } catch (UnavailableUserDeclinedInstallationException e) { ... } Config config = new Config(session); session.configure(config); } session.resume(); } Session
  11. public void onPause() { if (session != null) { session.pause();

    } } Session * no guarantees that objects are trackable
  12. Session Config light estimation mode (AMBIENT_INTENSITY, DISABLED) plane finding mode

    (DISABLED, HORIZONTAL) update mode (BLOCKING, LATEST_CAMERA_IMAGE)
  13. public void onResume() { if (session == null) { try

    { switch (ArCoreApk.getInstance() .requestInstall(activity, requestedARCoreInstall)) { case INSTALLED: session = new Session(activity); break; case INSTALL_REQUESTED: requestedARCoreInstall = false; return; } } catch (UnavailableUserDeclinedInstallationException e) { ... } // Create default config Config config = new Config(session); session.configure(config); } session.resume(); } Session Config
  14. @Override public void onDrawFrame(GL10 gl) { if (session == null)

    { return; } Frame frame = session.update(); } Frame
  15. @Override public void onDrawFrame(GL10 gl) { if (session == null)

    { return; } Frame frame = session.update(); Camera camera = frame.getCamera(); } Frame
  16. @Override public void onDrawFrame(GL10 gl) { if (session == null)

    { return; } Frame frame = session.update(); Camera camera = frame.getCamera(); float lightIntensity = frame.getLightEstimate().getPixelIntensity(); } Frame
  17. @Override public void onDrawFrame(GL10 gl) { if (session == null)

    { return; } Frame frame = session.update(); Camera camera = frame.getCamera(); float lightIntensity = frame.getLightEstimate().getPixelIntensity(); List<HitResult> hitResults = frame.hitTest(position.x, position.y); //List<HitResult> hitResults = frame.hitTest(motionEvent); } Frame
  18. @Override public void onDrawFrame(GL10 gl) { if (session == null)

    { return; } Frame frame = session.update(); Camera camera = frame.getCamera(); float lightIntensity = frame.getLightEstimate().getPixelIntensity(); //List<HitResult> hitResults = frame.hitTest(position.x, position.y); List<HitResult> hitResults = frame.hitTest(motionEvent); } Frame
  19. @Override public void onDrawFrame(GL10 gl) { . . . List<HitResult>

    hitResults = frame.hitTest(position.x, position.y); if (hitResults.size() > 0) { HitResult hit = getClosestHit(hitResults); } } private HitResult getClosestHit(List<HitResult> hitResults) { for (HitResult hitResult : hitResults) { if (hitResult.getTrackable() instanceof Plane) { return hitResult; } } return hitResults.get(0); } HitResult
  20. @Override public void onDrawFrame(GL10 gl) { . . . List<HitResult>

    hitResults = frame.hitTest(position.x, position.y); if (hitResults.size() > 0) { HitResult hit = getClosestHit(hitResults); Anchor anchor = hit.createAnchor(); } } Anchor
  21. Object rendering geometry texture o GoogleHome_GEO v 9.16292e-006 0.00498037 -0.0290344

    v -0.00769833 0.00176132 -0.0292111 v -0.0148647 0.00176132 -0.0261927 v -0.0210186 0.00176132 -0.0213912 v -0.0257407 0.00176132 -0.0151337 v -0.0287091 0.00176132 -0.00784677 v -0.0297216 0.00176132 -2.6893e-005 v -0.0287091 0.00176132 0.00779299 v -0.0257407 0.00176132 0.01508 v -0.0210186 0.00176132 0.0213374 v -0.0148647 0.00176132 0.0261389 v -0.00769833 0.00176132 0.0291573 v -7.85162e-006 0.00176132 0.0301868 v 0.00768262 0.00176132 0.0291573 v 0.014849 0.00176132 0.0261389 v 0.0210029 0.00176132 0.0213374 v 0.025725 0.00176132 0.01508 v 0.0286934 0.00176132 0.00779299 … .obj .webP
  22. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { // Read texture. Bitmap textureBitmap = BitmapFactory.decodeStream(context.getAssets() .open(diffuseTextureAssetName)); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glGenTextures(textures.length, textures, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); textureBitmap.recycle(); } Texture
  23. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { // Read texture. Bitmap textureBitmap = BitmapFactory.decodeStream(context.getAssets() .open(diffuseTextureAssetName)); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glGenTextures(textures.length, textures, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); textureBitmap.recycle(); } Texture
  24. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { // Read texture. Bitmap textureBitmap = BitmapFactory.decodeStream(context.getAssets() .open(diffuseTextureAssetName)); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glGenTextures(textures.length, textures, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); textureBitmap.recycle(); } Texture
  25. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Read the obj file. InputStream objInputStream = context.getAssets().open(objAssetName); Obj obj = ObjReader.read(objInputStream); obj = ObjUtils.convertToRenderable(obj); IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3); FloatBuffer vertices = ObjData.getVertices(obj); FloatBuffer texCoords = ObjData.getTexCoords(obj, 2); FloatBuffer normals = ObjData.getNormals(obj); . . . // Load vertex buffer verticesBaseAddress = 0; texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit(); normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit(); final int totalBytes = normalsBaseAddress + 4 * normals.limit(); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); . . . } Read .obj
  26. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Read the obj file. InputStream objInputStream = context.getAssets().open(objAssetName); Obj obj = ObjReader.read(objInputStream); obj = ObjUtils.convertToRenderable(obj); IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3); FloatBuffer vertices = ObjData.getVertices(obj); FloatBuffer texCoords = ObjData.getTexCoords(obj, 2); FloatBuffer normals = ObjData.getNormals(obj); . . . // Load vertex buffer verticesBaseAddress = 0; texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit(); normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit(); final int totalBytes = normalsBaseAddress + 4 * normals.limit(); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); . . . } Read .obj
  27. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Read the obj file. InputStream objInputStream = context.getAssets().open(objAssetName); Obj obj = ObjReader.read(objInputStream); obj = ObjUtils.convertToRenderable(obj); IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3); FloatBuffer vertices = ObjData.getVertices(obj); FloatBuffer texCoords = ObjData.getTexCoords(obj, 2); FloatBuffer normals = ObjData.getNormals(obj); . . . // Load vertex buffer verticesBaseAddress = 0; texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit(); normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit(); final int totalBytes = normalsBaseAddress + 4 * normals.limit(); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); . . . } Read .obj
  28. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Read the obj file. InputStream objInputStream = context.getAssets().open(objAssetName); Obj obj = ObjReader.read(objInputStream); obj = ObjUtils.convertToRenderable(obj); IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3); FloatBuffer vertices = ObjData.getVertices(obj); FloatBuffer texCoords = ObjData.getTexCoords(obj, 2); FloatBuffer normals = ObjData.getNormals(obj); . . . // Load vertex buffer verticesBaseAddress = 0; texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit(); normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit(); final int totalBytes = normalsBaseAddress + 4 * normals.limit(); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); . . . } Read .obj
  29. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Load shaders final int vertexShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.object_vertex); final int fragmentShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.object_fragment); program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); GLES20.glUseProgram(program); GLES20.glDetachShader(program, vertexShader); GLES20.glDetachShader(program, fragmentShader); GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); textureUniform = GLES20.glGetUniformLocation(program, "u_Texture"); lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters"); materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters"); . . . Matrix.setIdentityM(modelMatrix, 0); } Load shaders
  30. public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException

    { . . . // Load shaders final int vertexShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.object_vertex); final int fragmentShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.object_fragment); program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); GLES20.glUseProgram(program); GLES20.glDetachShader(program, vertexShader); GLES20.glDetachShader(program, fragmentShader); GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); textureUniform = GLES20.glGetUniformLocation(program, "u_Texture"); lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters"); materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters"); . . . Matrix.setIdentityM(modelMatrix, 0); } Load shaders
  31. public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity, float deltaTime)

    { // Build the ModelView and ModelViewProjection matrices to calculate object position Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); GLES20.glUseProgram(program); // Configure light Matrix.multiplyMV(viewLightDirection, 0, cameraView, 0, LIGHT_DIRECTION, 0); normalize(viewLightDirection); GLES20.glUniform4f(lightingParametersUniform,viewLightDirection[0], viewLightDirection[1], viewLightDirection[2], lightIntensity); // Set object material properties GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); // Attach the object texture. GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glUniform1i(textureUniform, 0); . . . } Finally draw
  32. public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity, float deltaTime)

    { // Build the ModelView and ModelViewProjection matrices to calculate object position Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); GLES20.glUseProgram(program); // Configure light Matrix.multiplyMV(viewLightDirection, 0, cameraView, 0, LIGHT_DIRECTION, 0); normalize(viewLightDirection); GLES20.glUniform4f(lightingParametersUniform,viewLightDirection[0], viewLightDirection[1], viewLightDirection[2], lightIntensity); // Set object material properties GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); // Attach the object texture. GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glUniform1i(textureUniform, 0); . . . } Finally draw
  33. public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity, float deltaTime)

    { // Build the ModelView and ModelViewProjection matrices to calculate object position Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); GLES20.glUseProgram(program); // Configure light Matrix.multiplyMV(viewLightDirection, 0, cameraView, 0, LIGHT_DIRECTION, 0); normalize(viewLightDirection); GLES20.glUniform4f(lightingParametersUniform, viewLightDirection[0], viewLightDirection[1], viewLightDirection[2], lightIntensity); // Set object material properties. GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); // Attach the object texture. GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glUniform1i(textureUniform, 0); . . . } Finally draw
  34. public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity, float deltaTime)

    { // Build the ModelView and ModelViewProjection matrices for calculating object position Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); GLES20.glUseProgram(program); // Configure light Matrix.multiplyMV(viewLightDirection, 0, cameraView, 0, LIGHT_DIRECTION, 0); normalize(viewLightDirection); GLES20.glUniform4f(lightingParametersUniform, viewLightDirection[0], viewLightDirection[1], viewLightDirection[2], lightIntensity); // Set object material properties GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); // Attach the object texture. GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glUniform1i(textureUniform, 0); . . . } Finally draw
  35. Performance don’t rely on world coordinates, use anchors > 12

    anchors don’t use vibration detect a plane if available, else closest feature point 30 fps or higher lock orientation
  36. private void moveActiveObject(Session session, Frame frame, MotionEvent event) { List<HitResult>

    hitResults = frame.hitTest(event); Anchor newAnchor = null; if (hitResults.size() > 0) { HitResult hit = hitResults.get(0); newAnchor = hit.createAnchor(); } if (newAnchor != null) { Pose cameraPose = frame.getCamera().getPose(); Pose anchorPose = newAnchor.getPose(); float distance = getDistanceBetweenPoses(cameraPose, anchorPose); if (distance > OBJ_MAX_DISTANCE) return; activeArObject.getAnchor().detach(); activeArObject.setAnchor(newAnchor); } } Distance
  37. if (anchor.getTrackingState() != TrackingState.TRACKING) { //Do not draw objects }

    if (camera.getTrackingState() == TrackingState.PAUSED) { return; } Tracking State TRACKING PAUSED STOPPED