Upgrade to Pro — share decks privately, control downloads, hide ads and more …

360 AnDev 2016 - Developing Apps for Project Tango

360 AnDev 2016 - Developing Apps for Project Tango

Starting an App with a Virtual or Augmented reality component is a bit daunting for most. When starting out, frameworks like Unity3D or Unreal Engine simplify a great many things.

But, from an Android developer perspective, you’re need to forget all about the nice APIs and libraries you’re used to. You’ll need to shoehorn your business logic in frameworks that are designed to build games first, not apps — not a great proposition.

Let’s explore the path less traveled together! In this session, we’ll see how you can build VR and AR applications, in Java and Android Studio, thanks to Project Tango’s Java SDKs, plus a few helpful 3D graphics libraries.

Etienne Caron

August 05, 2016
Tweet

More Decks by Etienne Caron

Other Decks in Programming

Transcript

  1. Title Text Body Level One Body Level Two Body Level

    Three Body Level Four Body Level Five DEVELOPING APPS FOR PROJEC T TA NG O
  2. Use case ideas • Real estate • Interior decoration •

    In-store analytics • VR content production • Augmented museum exhibits
  3. protected void onResume() {
 super.onResume();
 
 if (!isConnected) {
 tango

    = new Tango(ControlRoomActivity.this, () -> {
 try {
 TangoSupport.initialize();
 connectTango();
 isConnected = true;
 } catch (TangoOutOfDateException e) {
 Log.e(TAG, getString(R.string.exception_out_of_date), e);
 }
 });
 }
 }
  4. protected void onPause() {
 super.onPause();
 synchronized (this) {
 if (isConnected)

    {
 tango.disconnectCamera(TANGO_CAMERA_COLOR);
 tango.disconnect();
 isConnected = false;
 }
 }
 }
  5. tango.connectListener(framePairs, new OnTangoUpdateListener() {
 @Override
 public void onPoseAvailable(TangoPoseData pose) {


    // We could process pose data here, but we are not
 // directly using onPoseAvailable() for this app.
 logPose(pose);
 } ...
  6. tango.connectListener(framePairs, new OnTangoUpdateListener() {
 @Override
 public void onPoseAvailable(TangoPoseData pose) {


    // We could process pose data here, but we are not
 // directly using onPoseAvailable() for this app.
 logPose(pose);
 } @Override
 public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
 // Save the cloud and point data for later use.
 tangoPointCloudManager.updateXyzIj(xyzIj);
 }
 ...
  7. // Use default configuration for Tango Service, plus low latency

    IMU integration.
 TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
 config.putBoolean(TangoConfig.KEY_BOOLEAN_DEPTH, true);
 config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true);
 
 // NOTE: Low latency integration is necessary to achieve a precise alignment of
 // NOTE: virtual objects with the RBG image and produce a good AR effect.
 config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
 
 // NOTE: These are extra motion tracking flags.
 config.putBoolean(TangoConfig.KEY_BOOLEAN_MOTIONTRACKING, true);
 config.putBoolean(TangoConfig.KEY_BOOLEAN_AUTORECOVERY, true);
 
 tango.connect(config);

  8. tango.connectListener(framePairs, new OnTangoUpdateListener() {
 
 public void onPoseAvailable(TangoPoseData pose) {


    // We could process pose data here, but we are not
 // directly using onPoseAvailable() for this app.
 logPose(pose);
 }
 
 public void onFrameAvailable(int cameraId) {
 // Check if the frame available is for the camera we want and update its frame on the view.
 if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
 // Mark a camera frame is available for rendering in the OpenGL thread
 isFrameAvailableTangoThread.set(true);
 surfaceView.requestRender();
 }
 }
 
 public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
 // Save the cloud and point data for later use.
 tangoPointCloudManager.updateXyzIj(xyzIj);
 }
 
 public void onTangoEvent(TangoEvent event) {
 // Information about events that occur in the Tango system.
 // Allows you to monitor the health of services at runtime.
 }
 });
  9. 
 public void onFrameAvailable(int cameraId) {
 // Check if frame

    is for the right camera
 if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
 // Mark a camera frame is available for rendering
 isFrameAvailableTangoThread.set(true);
 surfaceView.requestRender();
 }
 }

  10. public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
 // Save the cloud and

    point data for later use.
 tangoPointCloudManager.updateXyzIj(xyzIj);
 }

  11. public boolean onTouch(View view, MotionEvent motionEvent) {
 if (motionEvent.getAction() ==

    MotionEvent.ACTION_UP) {
 
 // Calculate click location in u,v (0;1) coordinates.
 float u = motionEvent.getX() / view.getWidth();
 float v = motionEvent.getY() / view.getHeight();
 
 try {
 float[] planeFitTransform;
 synchronized (this) {
 planeFitTransform = doFitPlane(u, v, rgbTimestampGlThread);
 }
 
 if (planeFitTransform != null) {
 // Update the position of the rendered cube // to the pose of the detected plane
 renderer.updateObjectPose(planeFitTransform);
 }
 
 } catch (TangoException t) { ...
  12. /**
 * Use the TangoSupport library with point cloud data

    to calculate the plane
 * of the world feature pointed at the location the camera is looking.
 * It returns the transform of the fitted plane in a double array.
 */
 private float[] doFitPlane(float u, float v, double rgbTimestamp) {
 TangoXyzIjData xyzIj = tangoPointCloudManager.getLatestXyzIj();
 
 if (xyzIj == null) {
 return null;
 }
 ...

  13. // We need to calculate the transform between the color

    camera at the
 // time the user clicked, and the depth camera at the time the depth
 // cloud was acquired.
 TangoPoseData colorTdepthPose =
 TangoSupport.calculateRelativePose(
 rgbTimestamp, TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,
 xyzIj.timestamp, TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH);
 

  14. // Perform plane fitting with the latest available point cloud

    data.
 IntersectionPointPlaneModelPair intersectionPointPlaneModelPair =
 TangoSupport.fitPlaneModelNearClick( xyzIj, tangoCameraIntrinsics, colorTdepthPose, u, v);
 

  15. // Get the transform from depth camera to OpenGL world

    at
 // the timestamp of the cloud.
 TangoMatrixTransformData transform =
 TangoSupport.getMatrixTransformAtTime(
 xyzIj.timestamp,
 TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
 TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH,
 TANGO_SUPPORT_ENGINE_OPENGL,
 TANGO_SUPPORT_ENGINE_TANGO);
 
 if (transform.statusCode == TangoPoseData.POSE_VALID) {
 float[] openGlTPlane = calculatePlaneTransform(
 intersectionPointPlaneModelPair.intersectionPoint,
 intersectionPointPlaneModelPair.planeModel, transform.matrix);
 
 return openGlTPlane;
 } else { ...
  16. public boolean onTouch(View view, MotionEvent motionEvent) {
 if (motionEvent.getAction() ==

    MotionEvent.ACTION_UP) {
 
 // Calculate click location in u,v (0;1) coordinates.
 float u = motionEvent.getX() / view.getWidth();
 float v = motionEvent.getY() / view.getHeight();
 
 try {
 float[] planeFitTransform;
 synchronized (this) {
 planeFitTransform = doFitPlane(u, v, rgbTimestampGlThread);
 }
 
 if (planeFitTransform != null) {
 // Update the position of the rendered cube // to the pose of the detected plane
 renderer.updateObjectPose(planeFitTransform);
 }
 
 } catch (TangoException t) { ...
  17. protected void onCreate(Bundle savedInstanceState) {
 super.onCreate(savedInstanceState);
 
 setContentView(R.layout.activity_main);
 logTextView =

    (TextView) findViewById(R.id.log_text);
 surfaceView = new RajawaliSurfaceView(this);
 
 renderer = new ControlRoomRenderer(this);
 surfaceView.setSurfaceRenderer(renderer);
 surfaceView.setOnTouchListener(this);
 ((LinearLayout)findViewById(R.id.parent)).addView(surfaceView);
 
 tangoPointCloudManager = new TangoPointCloudManager();
 }
  18. protected void initScene() {
 // A quad covering the whole

    background, where the
 // Tango color camera contents will be rendered.
 ScreenQuad backgroundQuad = new ScreenQuad();
 Material tangoCameraMaterial = new Material();
 tangoCameraMaterial.setColorInfluence(0);
 
 // We need to use Rajawali's {@code StreamingTexture} to set up
 // GL_TEXTURE_EXTERNAL_OES rendering
 tangoCameraTexture =
 new StreamingTexture("camera", (StreamingTexture.ISurfaceListener) null);
 try {
 tangoCameraMaterial.addTexture(tangoCameraTexture);
 backgroundQuad.setMaterial(tangoCameraMaterial);
 } catch (ATexture.TextureException e) {
 Log.e(TAG, "Exception creating texture for RGB camera contents", e);
 }
 getCurrentScene().addChildAt(backgroundQuad, 0);

  19. // Add a directional light in an arbitrary direction.
 DirectionalLight

    light = new DirectionalLight(1, -0.5, -1);
 light.setColor(1, 1, 1);
 light.setPower(1.2f);
 light.setPosition(0, 10, 0);
 getCurrentScene().addLight(light); An example of diffuse, ambient lighting with shadows
  20. private Material buildMaterial(int color) {
 Material material = new Material();


    material.setColor(color);
 material.enableLighting(true);
 material.setDiffuseMethod(new DiffuseMethod.Lambert());
 material.setSpecularMethod(new SpecularMethod.Phong());
 return material;
 }

  21. // Build a Sphere
 sphere = new Sphere(0.25f,20,20);
 sphere.setMaterial(sphereMaterial);
 sphere.setPosition(0,

    0, 0);
 sphere.setVisible(false);
 getCurrentScene().addChild(sphere);

  22. /**
 * Save the updated plane fit pose to update

    the AR object on the next render pass.
 * This is synchronized against concurrent access in the render loop above.
 */
 public synchronized void updateObjectPose(float[] planeFitTransform) {
 objectTransform = new Matrix4(planeFitTransform);
 objectPoseUpdated = true;
 }

  23. protected void onRender(long elapsedRealTime, double deltaTime) {
 // Update the

    AR object if necessary
 // Synchronize against concurrent access with the setter below.
 synchronized (this) {
 if (objectPoseUpdated) {
 sphere.setPosition(objectTransform.getTranslation());
 sphere.setOrientation( new Quaternion().fromMatrix(objectTransform).conjugate());
 sphere.moveForward(0.25f);
 sphere.setVisible(true);
 objectPoseUpdated = false;
 }
 }
 super.onRender(elapsedRealTime, deltaTime);
 }
  24. public void updateRenderCameraPose(TangoPoseData cameraPose) {
 float[] translation = cameraPose.getTranslationAsFloats();
 float[]

    rotation = cameraPose.getRotationAsFloats();
 
 getCurrentCamera().setPosition(translation[0], translation[1], translation[2]);
 Quaternion quaternion = new Quaternion( rotation[3], rotation[0], rotation[1], rotation[2]);
 getCurrentCamera().setRotation(quaternion.conjugate()); 
 }

  25. /**
 * Sets the projection matrix for the scen camera

    to match the * parameters of the color camera,
 * provided by the {@code TangoCameraIntrinsics}.
 */
 public void setProjectionMatrix(TangoCameraIntrinsics intrinsics) {
 Matrix4 projectionMatrix = ScenePoseCalculator.calculateProjectionMatrix(
 intrinsics.width, intrinsics.height,
 intrinsics.fx, intrinsics.fy, intrinsics.cx, intrinsics.cy);
 getCurrentCamera().setProjectionMatrix(projectionMatrix);
 }

  26. "Wire frame" by —Wapcaplet [1]. Licensed under CC BY-SA 3.0

    via Commons - https://commons.wikimedia.org/wiki/File:Wire_frame.svg#/media/File:Wire_frame.svg
  27. private Object3D buildOBJ() {
 Object3D o ;
 LoaderOBJ objParser =

    new LoaderOBJ( mContext.getResources(), mTextureManager, R.raw.simple_tree_obj); 
 try {
 objParser.parse();
 } catch (ParsingException e) {
 e.printStackTrace();
 }
 
 o = objParser.getParsedObject();
 o.setPosition(0,-8,-1);
 
 getCurrentScene().addChild(o);

  28. How to Build a Cardboard App™ The 3 minute edition

    Body Level One Body Level Two Body Level Three Body Level Four Body Level Five
  29. public class RajaVrDemoActivity extends GvrActivity {
 /** Sets the view

    to our GvrView and initializes GvrView */
 public void onCreate(Bundle savedInstanceState) {
 super.onCreate(savedInstanceState);
 
 // Initialize Gvr
 setContentView(R.layout.common_ui);
 
 RajaVrView gvrView = (RajaVrView) findViewById(R.id.gvr_view);
 
 gvrView.setEGLConfigChooser(8, 8, 8, 8, 16, 8);
 gvrView.setRenderer(new DemoVRRenderer(this));
 gvrView.setTransitionViewEnabled(true);
 setGvrView(gvrView);
 }
  30. // Add a directional light in an arbitrary direction.
 DirectionalLight

    light = new DirectionalLight(1, -0.5, -1);
 light.setColor(1, 1, 1);
 light.setPower(1.2f);
 light.setPosition(0, 10, 0);
 getCurrentScene().addLight(light);
 
 // Set-up a material
 Material sphereMaterial = buildMaterial(Color.GREEN);
 
 // Build a Sphere
 sphere = new Sphere(0.25f,20,20);
 sphere.setMaterial(sphereMaterial);
 sphere.setPosition(0, 0, 0);
 sphere.setVisible(false);
 getCurrentScene().addChild(sphere);