Upgrade to Pro — share decks privately, control downloads, hide ads and more …

360 AnDev 2016 - Developing Apps for Project Tango

360 AnDev 2016 - Developing Apps for Project Tango

Starting an App with a Virtual or Augmented reality component is a bit daunting for most. When starting out, frameworks like Unity3D or Unreal Engine simplify a great many things.

But, from an Android developer perspective, you’re need to forget all about the nice APIs and libraries you’re used to. You’ll need to shoehorn your business logic in frameworks that are designed to build games first, not apps — not a great proposition.

Let’s explore the path less traveled together! In this session, we’ll see how you can build VR and AR applications, in Java and Android Studio, thanks to Project Tango’s Java SDKs, plus a few helpful 3D graphics libraries.

Etienne Caron

August 05, 2016
Tweet

More Decks by Etienne Caron

Other Decks in Programming

Transcript

  1. Title Text
    Body Level One
    Body Level Two
    Body Level Three
    Body Level Four
    Body Level Five
    DEVELOPING APPS FOR
    PROJEC T TA NG O

    View Slide

  2. 2

    ETIENNE CARON

    View Slide

  3. View Slide

  4. View Slide

  5. View Slide

  6. View Slide

  7. Use case ideas
    • Real estate
    • Interior decoration
    • In-store analytics
    • VR content production
    • Augmented museum exhibits

    View Slide

  8. View Slide

  9. Motion Tracking

    View Slide

  10. protected void onResume() {

    super.onResume();


    if (!isConnected) {

    tango = new Tango(ControlRoomActivity.this, () -> {

    try {

    TangoSupport.initialize();

    connectTango();

    isConnected = true;

    } catch (TangoOutOfDateException e) {

    Log.e(TAG, getString(R.string.exception_out_of_date), e);

    }

    });

    }

    }

    View Slide

  11. protected void onPause() {

    super.onPause();

    synchronized (this) {

    if (isConnected) {

    tango.disconnectCamera(TANGO_CAMERA_COLOR);

    tango.disconnect();

    isConnected = false;

    }

    }

    }

    View Slide

  12. tango.connectListener(framePairs, new OnTangoUpdateListener() {

    @Override

    public void onPoseAvailable(TangoPoseData pose) {

    // We could process pose data here, but we are not

    // directly using onPoseAvailable() for this app.

    logPose(pose);

    }
    ...

    View Slide

  13. tango.connectListener(framePairs, new OnTangoUpdateListener() {

    @Override

    public void onPoseAvailable(TangoPoseData pose) {

    // We could process pose data here, but we are not

    // directly using onPoseAvailable() for this app.

    logPose(pose);

    }
    @Override

    public void onXyzIjAvailable(TangoXyzIjData xyzIj) {

    // Save the cloud and point data for later use.

    tangoPointCloudManager.updateXyzIj(xyzIj);

    }

    ...

    View Slide

  14. Minimum Effective Dose

    View Slide

  15. Euclidean Space
    "Coord system CA 0" by Jorge Stolfii - Own work.

    View Slide

  16. Vertex
    •(x,y,z)

    View Slide

  17. View Slide

  18. Depth Perception

    View Slide

  19. View Slide

  20. View Slide

  21. View Slide

  22. // Use default configuration for Tango Service, plus low latency IMU integration.

    TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);

    config.putBoolean(TangoConfig.KEY_BOOLEAN_DEPTH, true);

    config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true);


    // NOTE: Low latency integration is necessary to achieve a precise alignment of

    // NOTE: virtual objects with the RBG image and produce a good AR effect.

    config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);


    // NOTE: These are extra motion tracking flags.

    config.putBoolean(TangoConfig.KEY_BOOLEAN_MOTIONTRACKING, true);

    config.putBoolean(TangoConfig.KEY_BOOLEAN_AUTORECOVERY, true);


    tango.connect(config);


    View Slide

  23. tango.connectListener(framePairs, new OnTangoUpdateListener() {


    public void onPoseAvailable(TangoPoseData pose) {

    // We could process pose data here, but we are not

    // directly using onPoseAvailable() for this app.

    logPose(pose);

    }


    public void onFrameAvailable(int cameraId) {

    // Check if the frame available is for the camera we want and update its frame on the view.

    if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {

    // Mark a camera frame is available for rendering in the OpenGL thread

    isFrameAvailableTangoThread.set(true);

    surfaceView.requestRender();

    }

    }


    public void onXyzIjAvailable(TangoXyzIjData xyzIj) {

    // Save the cloud and point data for later use.

    tangoPointCloudManager.updateXyzIj(xyzIj);

    }


    public void onTangoEvent(TangoEvent event) {

    // Information about events that occur in the Tango system.

    // Allows you to monitor the health of services at runtime.

    }

    });

    View Slide


  24. public void onFrameAvailable(int cameraId) {

    // Check if frame is for the right camera

    if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {

    // Mark a camera frame is available for rendering

    isFrameAvailableTangoThread.set(true);

    surfaceView.requestRender();

    }

    }


    View Slide

  25. public void onXyzIjAvailable(TangoXyzIjData xyzIj) {

    // Save the cloud and point data for later use.

    tangoPointCloudManager.updateXyzIj(xyzIj);

    }


    View Slide

  26. View Slide

  27. public boolean onTouch(View view, MotionEvent motionEvent) {

    if (motionEvent.getAction() == MotionEvent.ACTION_UP) {


    // Calculate click location in u,v (0;1) coordinates.

    float u = motionEvent.getX() / view.getWidth();

    float v = motionEvent.getY() / view.getHeight();


    try {

    float[] planeFitTransform;

    synchronized (this) {

    planeFitTransform = doFitPlane(u, v, rgbTimestampGlThread);

    }


    if (planeFitTransform != null) {

    // Update the position of the rendered cube
    // to the pose of the detected plane

    renderer.updateObjectPose(planeFitTransform);

    }


    } catch (TangoException t) {
    ...

    View Slide

  28. /**

    * Use the TangoSupport library with point cloud data to calculate the
    plane

    * of the world feature pointed at the location the camera is looking.

    * It returns the transform of the fitted plane in a double array.

    */

    private float[] doFitPlane(float u, float v, double rgbTimestamp) {

    TangoXyzIjData xyzIj = tangoPointCloudManager.getLatestXyzIj();


    if (xyzIj == null) {

    return null;

    }

    ...


    View Slide

  29. // We need to calculate the transform between the color camera at the

    // time the user clicked, and the depth camera at the time the depth

    // cloud was acquired.

    TangoPoseData colorTdepthPose =

    TangoSupport.calculateRelativePose(

    rgbTimestamp, TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,

    xyzIj.timestamp, TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH);


    View Slide

  30. // Perform plane fitting with the latest available point cloud data.

    IntersectionPointPlaneModelPair intersectionPointPlaneModelPair =

    TangoSupport.fitPlaneModelNearClick(
    xyzIj, tangoCameraIntrinsics, colorTdepthPose, u, v);


    View Slide

  31. // Get the transform from depth camera to OpenGL world at

    // the timestamp of the cloud.

    TangoMatrixTransformData transform =

    TangoSupport.getMatrixTransformAtTime(

    xyzIj.timestamp,

    TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,

    TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH,

    TANGO_SUPPORT_ENGINE_OPENGL,

    TANGO_SUPPORT_ENGINE_TANGO);


    if (transform.statusCode == TangoPoseData.POSE_VALID) {

    float[] openGlTPlane = calculatePlaneTransform(

    intersectionPointPlaneModelPair.intersectionPoint,

    intersectionPointPlaneModelPair.planeModel, transform.matrix);


    return openGlTPlane;

    } else {
    ...

    View Slide

  32. View Slide

  33. View Slide

  34. public boolean onTouch(View view, MotionEvent motionEvent) {

    if (motionEvent.getAction() == MotionEvent.ACTION_UP) {


    // Calculate click location in u,v (0;1) coordinates.

    float u = motionEvent.getX() / view.getWidth();

    float v = motionEvent.getY() / view.getHeight();


    try {

    float[] planeFitTransform;

    synchronized (this) {

    planeFitTransform = doFitPlane(u, v, rgbTimestampGlThread);

    }


    if (planeFitTransform != null) {

    // Update the position of the rendered cube
    // to the pose of the detected plane

    renderer.updateObjectPose(planeFitTransform);

    }


    } catch (TangoException t) {
    ...

    View Slide

  35. View Slide

  36. View Slide

  37. protected void onCreate(Bundle savedInstanceState) {

    super.onCreate(savedInstanceState);


    setContentView(R.layout.activity_main);

    logTextView = (TextView) findViewById(R.id.log_text);

    surfaceView = new RajawaliSurfaceView(this);


    renderer = new ControlRoomRenderer(this);

    surfaceView.setSurfaceRenderer(renderer);

    surfaceView.setOnTouchListener(this);

    ((LinearLayout)findViewById(R.id.parent)).addView(surfaceView);


    tangoPointCloudManager = new TangoPointCloudManager();

    }

    View Slide

  38. protected void initScene() {

    // A quad covering the whole background, where the

    // Tango color camera contents will be rendered.

    ScreenQuad backgroundQuad = new ScreenQuad();

    Material tangoCameraMaterial = new Material();

    tangoCameraMaterial.setColorInfluence(0);


    // We need to use Rajawali's {@code StreamingTexture} to set up

    // GL_TEXTURE_EXTERNAL_OES rendering

    tangoCameraTexture =

    new StreamingTexture("camera",
    (StreamingTexture.ISurfaceListener) null);

    try {

    tangoCameraMaterial.addTexture(tangoCameraTexture);

    backgroundQuad.setMaterial(tangoCameraMaterial);

    } catch (ATexture.TextureException e) {

    Log.e(TAG, "Exception creating texture for RGB camera contents", e);

    }

    getCurrentScene().addChildAt(backgroundQuad, 0);


    View Slide

  39. // Add a directional light in an arbitrary direction.

    DirectionalLight light = new DirectionalLight(1, -0.5, -1);

    light.setColor(1, 1, 1);

    light.setPower(1.2f);

    light.setPosition(0, 10, 0);

    getCurrentScene().addLight(light);
    An example of diffuse, ambient lighting with shadows

    View Slide

  40. private Material buildMaterial(int color) {

    Material material = new Material();

    material.setColor(color);

    material.enableLighting(true);

    material.setDiffuseMethod(new DiffuseMethod.Lambert());

    material.setSpecularMethod(new SpecularMethod.Phong());

    return material;

    }


    View Slide

  41. // Build a Sphere

    sphere = new Sphere(0.25f,20,20);

    sphere.setMaterial(sphereMaterial);

    sphere.setPosition(0, 0, 0);

    sphere.setVisible(false);

    getCurrentScene().addChild(sphere);


    View Slide

  42. /**

    * Save the updated plane fit pose to update the AR object on the next render pass.

    * This is synchronized against concurrent access in the render loop above.

    */

    public synchronized void updateObjectPose(float[] planeFitTransform) {

    objectTransform = new Matrix4(planeFitTransform);

    objectPoseUpdated = true;

    }


    View Slide

  43. protected void onRender(long elapsedRealTime, double deltaTime) {

    // Update the AR object if necessary

    // Synchronize against concurrent access with the setter below.

    synchronized (this) {

    if (objectPoseUpdated) {

    sphere.setPosition(objectTransform.getTranslation());

    sphere.setOrientation(
    new Quaternion().fromMatrix(objectTransform).conjugate());

    sphere.moveForward(0.25f);

    sphere.setVisible(true);

    objectPoseUpdated = false;

    }

    }

    super.onRender(elapsedRealTime, deltaTime);

    }

    View Slide

  44. View Slide

  45. View Slide

  46. View Slide

  47. public void updateRenderCameraPose(TangoPoseData cameraPose) {

    float[] translation = cameraPose.getTranslationAsFloats();

    float[] rotation = cameraPose.getRotationAsFloats();


    getCurrentCamera().setPosition(translation[0], translation[1], translation[2]);

    Quaternion quaternion = new Quaternion(
    rotation[3], rotation[0], rotation[1], rotation[2]);

    getCurrentCamera().setRotation(quaternion.conjugate()); 

    }


    View Slide

  48. /**

    * Sets the projection matrix for the scen camera to match the
    * parameters of the color camera,

    * provided by the {@code TangoCameraIntrinsics}.

    */

    public void setProjectionMatrix(TangoCameraIntrinsics intrinsics) {

    Matrix4 projectionMatrix = ScenePoseCalculator.calculateProjectionMatrix(

    intrinsics.width, intrinsics.height,

    intrinsics.fx, intrinsics.fy, intrinsics.cx, intrinsics.cy);

    getCurrentCamera().setProjectionMatrix(projectionMatrix);

    }


    View Slide

  49. "Wire frame" by —Wapcaplet
    [1]. Licensed under CC BY-SA 3.0 via Commons - https://commons.wikimedia.org/wiki/File:Wire_frame.svg#/media/File:Wire_frame.svg

    View Slide

  50. View Slide

  51. View Slide

  52. View Slide

  53. The Demo

    View Slide

  54. private Object3D buildOBJ() {

    Object3D o ;

    LoaderOBJ objParser = new LoaderOBJ(
    mContext.getResources(), mTextureManager,
    R.raw.simple_tree_obj);

    try {

    objParser.parse();

    } catch (ParsingException e) {

    e.printStackTrace();

    }


    o = objParser.getParsedObject();

    o.setPosition(0,-8,-1);


    getCurrentScene().addChild(o);


    View Slide

  55. Animation3D anim = new RotateOnAxisAnimation(Vector3.Axis.Y, 360);

    anim.setDurationMilliseconds(16000);

    anim.setRepeatMode(Animation.RepeatMode.INFINITE);

    anim.setTransformable3D(o);

    getCurrentScene().registerAnimation(anim);

    anim.play();

    View Slide

  56. View Slide

  57. DEMO TIME!

    View Slide

  58. View Slide

  59. View Slide

  60. How to Build a Cardboard App™
    The 3 minute edition
    Body Level One
    Body Level Two
    Body Level Three
    Body Level Four
    Body Level Five

    View Slide

  61. public class RajaVrDemoActivity extends GvrActivity {

    /** Sets the view to our GvrView and initializes GvrView */

    public void onCreate(Bundle savedInstanceState) {

    super.onCreate(savedInstanceState);


    // Initialize Gvr

    setContentView(R.layout.common_ui);


    RajaVrView gvrView = (RajaVrView) findViewById(R.id.gvr_view);


    gvrView.setEGLConfigChooser(8, 8, 8, 8, 16, 8);

    gvrView.setRenderer(new DemoVRRenderer(this));

    gvrView.setTransitionViewEnabled(true);

    setGvrView(gvrView);

    }

    View Slide

  62. View Slide

  63. // Add a directional light in an arbitrary direction.

    DirectionalLight light = new DirectionalLight(1, -0.5, -1);

    light.setColor(1, 1, 1);

    light.setPower(1.2f);

    light.setPosition(0, 10, 0);

    getCurrentScene().addLight(light);


    // Set-up a material

    Material sphereMaterial = buildMaterial(Color.GREEN);


    // Build a Sphere

    sphere = new Sphere(0.25f,20,20);

    sphere.setMaterial(sphereMaterial);

    sphere.setPosition(0, 0, 0);

    sphere.setVisible(false);

    getCurrentScene().addChild(sphere);


    View Slide

  64. < 200 Lines

    View Slide

  65. View Slide

  66. View Slide

  67. CODE: GI THUB.COM/KANAWI SH
    TWITTER: @K AN AWISH
    ETIENNE CARON

    View Slide