diff --git a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityActivity.java b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityActivity.java index 3c539063..c5884163 100644 --- a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityActivity.java +++ b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityActivity.java @@ -19,290 +19,318 @@ import android.app.Activity; import android.content.Intent; import android.content.pm.PackageInfo; +import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.graphics.Point; import android.opengl.GLSurfaceView; import android.os.Bundle; +import android.util.Log; import android.view.Display; import android.view.MotionEvent; import android.view.View; import android.view.WindowManager; +import android.widget.Button; import android.widget.TextView; import android.widget.Toast; -import java.util.Timer; -import java.util.TimerTask; -/** - * Main activity shows augmented reality scene. - */ -public class AugmentedRealityActivity extends Activity implements View.OnClickListener{ - - public static final String EXTRA_KEY_PERMISSIONTYPE = "PERMISSIONTYPE"; - public static final String EXTRA_VALUE_VIO = "MOTION_TRACKING_PERMISSION"; - public static final String EXTRA_VALUE_VIOADF = "ADF_LOAD_SAVE_PERMISSION"; - - private GLSurfaceView arView; - private TextView tangoPoseStatusText; - - private float[] touchStartPos = new float[2]; - private float[] touchCurPos = new float[2]; - private float touchStartDist = 0.0f; - private float touchCurDist = 0.0f; - private Point screenSize = new Point(); - private float screenDiagnal = 0.0f; - private String appVersionString; - private Timer timer; - private TimerTask refresher; - private int arElement = 0; - private int interactionType = 0; - - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - - getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, - WindowManager.LayoutParams.FLAG_FULLSCREEN); - - Intent intent1 = new Intent(); - intent1.setAction("android.intent.action.REQUEST_TANGO_PERMISSION"); - intent1.putExtra(EXTRA_KEY_PERMISSIONTYPE, EXTRA_VALUE_VIO); - startActivityForResult(intent1, 0); - - Intent intent2 = new Intent(); - intent2.setAction("android.intent.action.REQUEST_TANGO_PERMISSION"); - intent2.putExtra(EXTRA_KEY_PERMISSIONTYPE, EXTRA_VALUE_VIOADF); - startActivityForResult(intent2, 0); - - Display display = getWindowManager().getDefaultDisplay(); - display.getSize(screenSize); - screenDiagnal = (float) Math.sqrt(screenSize.x * screenSize.x - + screenSize.y * screenSize.y); - - setTitle(R.string.app_name); - setContentView(R.layout.activity_augmented_reality); - - arView = (GLSurfaceView) findViewById(R.id.surfaceview); - - AugmentedRealityView arViewRenderer = new AugmentedRealityView(); - arViewRenderer.activity = AugmentedRealityActivity.this; - arViewRenderer.isAutoRecovery = true; - arView.setRenderer(arViewRenderer); - arView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); - tangoPoseStatusText = (TextView) findViewById(R.id.debug_info); - - PackageInfo pInfo; - try { - pInfo = this.getPackageManager().getPackageInfo(this.getPackageName(), 0); - appVersionString = pInfo.versionName; - } catch (NameNotFoundException e) { - e.printStackTrace(); - appVersionString = " "; - } - // Create a timer to request a refresh at 30 Hz. - timer = new Timer(); - refresher = new TimerTask() { - public void run() { - arView.requestRender(); - }; - }; - // Wait 2 seconds, then refresh at a 33 ms period. - timer.scheduleAtFixedRate(refresher, 2000, 33); - - findViewById(R.id.reset).setOnClickListener(this); - findViewById(R.id.third).setOnClickListener(this); - findViewById(R.id.first).setOnClickListener(this); - findViewById(R.id.top).setOnClickListener(this); - findViewById(R.id.place).setOnClickListener(this); -/* - new Thread(new Runnable() { - @Override - public void run() { - while (true) { - try { - Thread.sleep(10); - - runOnUiThread(new Runnable() { - public void run() { - boolean isLocalized = TangoJNINative.getIsLocalized(); - if(isLocalized) { - findViewById(R.id.reset).setVisibility(View.GONE); - } else { - findViewById(R.id.reset).setVisibility(View.VISIBLE); - } - tangoPoseStatusText.setText( - "Service Version:" + TangoJNINative.getVersionNumber() + - "\nApp Version:" + appVersionString + - "\n" + TangoJNINative.getPoseString()); - } - }); - - } catch (Exception e) { - e.printStackTrace(); - } - } - } - }).start(); -*/ - } - @Override - public void onClick(View v) { - switch (v.getId()) { - case R.id.reset: - TangoJNINative.resetMotionTracking(); - break; - case R.id.place: - TangoJNINative.placeObject(); - break; - case R.id.first: - TangoJNINative.setCamera(0); - break; - case R.id.third: - TangoJNINative.setCamera(1); - break; - case R.id.top: - TangoJNINative.setCamera(2); - break; - } +// The main activity of the application which shows debug information and a +// glSurfaceView that renders graphic content. +public class AugmentedRealityActivity extends Activity implements + View.OnClickListener { + // The user has not given permission to use Motion Tracking functionality. + private static final int TANGO_NO_MOTION_TRACKING_PERMISSION = -3; + // The input argument is invalid. + private static final int TANGO_INVALID = -2; + // This error code denotes some sort of hard error occurred. + private static final int TANGO_ERROR = -1; + // This code indicates success. + private static final int TANGO_SUCCESS = 0; + + // Tag for debug logging. + private static final String TAG = + AugmentedRealityActivity.class.getSimpleName(); + + // Motion Tracking permission request action. + private static final String MOTION_TRACKING_PERMISSION_ACTION = + "android.intent.action.REQUEST_TANGO_PERMISSION"; + + // Key string for requesting and checking Motion Tracking permission. + private static final String MOTION_TRACKING_PERMISSION = + "MOTION_TRACKING_PERMISSION"; + + // The interval at which we'll update our UI debug text in milliseconds. + // This is the rate at which we query our native wrapper around the tango + // service for pose and event information. + private static final int kUpdateIntervalMs = 100; + + // Debug information text. + // Current frame's pose information. + private TextView mPoseData; + // Tango Core version. + private TextView mVersion; + // Application version. + private TextView mAppVersion; + // Latest Tango Event received. + private TextView mEvent; + + // Button for manually resetting motion tracking. Resetting motion tracking + // will restart the tracking pipeline, which also means the user will have to + // wait for re-initialization of the motion tracking system. + private Button mMotionReset; + + // GLSurfaceView and its renderer, all of the graphic content is rendered + // through OpenGL ES 2.0 in the native code. + private AugmentedRealityRenderer mRenderer; + private GLSurfaceView mGLView; + + // A flag to check if the Tango Service is connected. This flag avoids the + // program attempting to disconnect from the service while it is not + // connected.This is especially important in the onPause() callback for the + // activity class. + private boolean mIsConnectedService = false; + + // Screen size for normalizing the touch input for orbiting the render camera. + private Point mScreenSize = new Point(); + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setTitle(R.string.app_name); + + getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + + // Querying screen size, used for computing the normalized touch point. + Display display = getWindowManager().getDefaultDisplay(); + display.getSize(mScreenSize); + + // Setting content view of this activity and getting the mIsAutoRecovery + // flag from StartActivity. + setContentView(R.layout.activity_augmented_reality); + + // Text views for displaying translation and rotation data + mPoseData = (TextView) findViewById(R.id.pose_data_textview); + + // Text views for displaying most recent Tango Event + mEvent = (TextView) findViewById(R.id.tango_event_textview); + + // Text views for Tango library versions + mVersion = (TextView) findViewById(R.id.version_textview); + + // Text views for application versions. + mAppVersion = (TextView) findViewById(R.id.appversion); + PackageInfo pInfo; + try { + pInfo = this.getPackageManager().getPackageInfo(this.getPackageName(), 0); + mAppVersion.setText(pInfo.versionName); + } catch (NameNotFoundException e) { + e.printStackTrace(); } - @Override - protected void onResume() { - super.onResume(); - arView.onResume(); - getWindow().getDecorView().setSystemUiVisibility( - View.SYSTEM_UI_FLAG_LAYOUT_STABLE - ); + // Buttons for selecting camera view and Set up button click listeners + findViewById(R.id.first_person_button).setOnClickListener(this); + findViewById(R.id.third_person_button).setOnClickListener(this); + findViewById(R.id.top_down_button).setOnClickListener(this); + + // Button to reset motion tracking + mMotionReset = (Button) findViewById(R.id.resetmotion); + + // OpenGL view where all of the graphics are drawn + mGLView = (GLSurfaceView) findViewById(R.id.gl_surface_view); + + // Set up button click listeners + mMotionReset.setOnClickListener(this); + + // Configure OpenGL renderer + mRenderer = new AugmentedRealityRenderer(); + mGLView.setRenderer(mRenderer); + mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + + // Initialize Tango Service, this function starts the communication + // between the application and Tango Service. + // The activity object is used for checking if the API version is outdated. + TangoJNINative.initialize(this); + + // UI thread handles the task of updating all debug text. + startUIThread(); + } + + @Override + protected void onResume() { + super.onResume(); + mGLView.onResume(); + + // In the onResume function, we first check if the MOTION_TRACKING_PERMISSION is + // granted to this application, if not, we send a permission intent to + // the Tango Service to launch the permission activity. + // Note that the onPause() callback will be called once the permission + // activity is foregrounded. + if (!Util.hasPermission(getApplicationContext(), + MOTION_TRACKING_PERMISSION)) { + getMotionTrackingPermission(); + } else { + // If motion tracking permission is granted to the application, we can + // connect to the Tango Service. For this example, we'll be calling + // through the JNI to the C++ code that actually interfaces with the + // service. + + // Setup the configuration for the TangoService. + TangoJNINative.setupConfig(); + + // Connect the onPoseAvailable callback. + TangoJNINative.connectCallbacks(); + + // Connect to Tango Service. + // This function will start the Tango Service pipeline, in this case, + // it will start Motion Tracking. + TangoJNINative.connect(); + + // Take the TangoCore version number from Tango Service. + mVersion.setText(TangoJNINative.getVersionNumber()); + + // Set the connected service flag to true. + mIsConnectedService = true; } + } + + @Override + protected void onPause() { + super.onPause(); + mGLView.onPause(); + TangoJNINative.freeGLContent(); - @Override - protected void onPause() { - super.onPause(); - arView.onPause(); - TangoJNINative.disconnectService(); + // If the service is connected, we disconnect it here. + if (mIsConnectedService) { + mIsConnectedService = false; + // Disconnect from Tango Service, release all the resources that the app is + // holding from Tango Service. + TangoJNINative.disconnect(); } + } - protected void onDestroy() { - super.onDestroy(); - TangoJNINative.onDestroy(); + @Override + protected void onDestroy() { + super.onDestroy(); + if (mIsConnectedService) { + mIsConnectedService = false; + TangoJNINative.disconnect(); } + } - public void onRadioButtonClicked(View view) { - switch (view.getId()) { - case R.id.radio_world: - arElement = 1; - break; - case R.id.radio_cube: - arElement = 2; - break; - case R.id.radio_grid: - arElement = 3; - break; - case R.id.radio_fx: - arElement = 4; - break; - } + @Override + public void onClick(View v) { + // Handle button clicks. + switch (v.getId()) { + case R.id.first_person_button: + TangoJNINative.setCamera(0); + break; + case R.id.top_down_button: + TangoJNINative.setCamera(2); + break; + case R.id.third_person_button: + TangoJNINative.setCamera(1); + break; + case R.id.resetmotion: + TangoJNINative.resetMotionTracking(); + break; + default: + Log.w(TAG, "Unknown button click"); + return; } + } - public void onDirectionButtonClicked(View view) { - switch (view.getId()) { - case R.id.radio_left: - interactionType = 1; - break; - case R.id.radio_right: - interactionType = 2; - break; - case R.id.radio_down: - interactionType = 3; - break; - case R.id.radio_up: - interactionType = 4; - break; - case R.id.radio_far: - interactionType = 5; - break; - case R.id.radio_near: - interactionType = 6; - break; - } - if (arElement != 0) { - TangoJNINative.updateARElement(arElement, interactionType); - } + @Override + public boolean onTouchEvent(MotionEvent event) { + // Pass the touch event to the native layer for camera control. + // Single touch to rotate the camera around the device. + // Two fingers to zoom in and out. + int pointCount = event.getPointerCount(); + if (pointCount == 1) { + float normalizedX = event.getX(0) / mScreenSize.x; + float normalizedY = event.getY(0) / mScreenSize.y; + TangoJNINative.onTouchEvent(1, + event.getActionMasked(), normalizedX, normalizedY, 0.0f, 0.0f); } + if (pointCount == 2) { + if (event.getActionMasked() == MotionEvent.ACTION_POINTER_UP) { + int index = event.getActionIndex() == 0 ? 1 : 0; + float normalizedX = event.getX(index) / mScreenSize.x; + float normalizedY = event.getY(index) / mScreenSize.y; + TangoJNINative.onTouchEvent(1, + MotionEvent.ACTION_DOWN, normalizedX, normalizedY, 0.0f, 0.0f); + } else { + float normalizedX0 = event.getX(0) / mScreenSize.x; + float normalizedY0 = event.getY(0) / mScreenSize.y; + float normalizedX1 = event.getX(1) / mScreenSize.x; + float normalizedY1 = event.getY(1) / mScreenSize.y; + TangoJNINative.onTouchEvent(2, event.getActionMasked(), + normalizedX0, normalizedY0, normalizedX1, normalizedY1); + } + } + return true; + } - @Override - public boolean onTouchEvent(MotionEvent event) { - int pointCount = event.getPointerCount(); - if (pointCount == 1) { - switch (event.getActionMasked()) { - case MotionEvent.ACTION_DOWN: { - TangoJNINative.startSetCameraOffset(); - touchCurDist = 0.0f; - touchStartPos[0] = event.getX(0); - touchStartPos[1] = event.getY(0); - break; - } - case MotionEvent.ACTION_MOVE: { - touchCurPos[0] = event.getX(0); - touchCurPos[1] = event.getY(0); - - // Normalize to screen width. - float normalizedRotX = (touchCurPos[0] - touchStartPos[0]) - / screenSize.x; - float normalizedRotY = (touchCurPos[1] - touchStartPos[1]) - / screenSize.y; - - TangoJNINative.setCameraOffset(normalizedRotX, normalizedRotY, - touchCurDist / screenDiagnal); - break; - } - } - } - if (pointCount == 2) { - switch (event.getActionMasked()) { - case MotionEvent.ACTION_POINTER_DOWN: { - TangoJNINative.startSetCameraOffset(); - float absX = event.getX(0) - event.getX(1); - float absY = event.getY(0) - event.getY(1); - touchStartDist = (float) Math.sqrt(absX * absX + absY * absY); - break; - } - case MotionEvent.ACTION_MOVE: { - float absX = event.getX(0) - event.getX(1); - float absY = event.getY(0) - event.getY(1); - - touchCurDist = touchStartDist - - (float) Math.sqrt(absX * absX + absY * absY); - - TangoJNINative.setCameraOffset(0.0f, 0.0f, touchCurDist - / screenDiagnal); - break; - } - case MotionEvent.ACTION_POINTER_UP: { - int index = event.getActionIndex() == 0 ? 1 : 0; - touchStartPos[0] = event.getX(index); - touchStartPos[1] = event.getY(index); - break; - } - } - } - return true; + // Call the permission intent for the Tango Service to ask for motion tracking + // permissions. All permission types can be found here: + // https://developers.google.com/project-tango/apis/c/c-user-permissions + private void getMotionTrackingPermission() { + Intent intent = new Intent(); + intent.setAction(MOTION_TRACKING_PERMISSION_ACTION); + intent.putExtra("PERMISSIONTYPE", MOTION_TRACKING_PERMISSION); + + // After the permission activity is dismissed, we will receive a callback + // function onActivityResult() with user's result. + startActivityForResult(intent, 0); + } + + @Override + protected void onActivityResult (int requestCode, int resultCode, Intent data) { + // The result of the permission activity. + // + // Note that when the permission activity is dismissed, the + // MotionTrackingActivity's onResume() callback is called. As the + // TangoService is connected in the onResume() function, we do not call + // connect here. + if (requestCode == 0) { + if (resultCode == RESULT_CANCELED) { + mIsConnectedService = false; + finish(); + } } - @Override - protected void onActivityResult(int requestCode, int resultCode, Intent data) { - if (requestCode == 0) { - if (resultCode == RESULT_CANCELED) { - Toast.makeText(this, - "Motion Tracking Permission Needed!", Toast.LENGTH_SHORT).show(); - finish(); - } - } - if (requestCode == 1) { - if (resultCode == RESULT_CANCELED) { - Toast.makeText(this, - "ADF Permission Needed!", Toast.LENGTH_SHORT).show(); - finish(); - } + } + + // Request render on the glSurfaceView. This function is called from the + // native code, and it is triggered from the onTextureAvailable callback from + // the Tango Service. + public void requestRender() { + mGLView.requestRender(); + } + + // UI thread for handling debug text changes. + private void startUIThread() { + new Thread(new Runnable() { + @Override + public void run() { + while (true) { + try { + Thread.sleep(kUpdateIntervalMs); + runOnUiThread(new Runnable() { + @Override + public void run() { + try { + mEvent.setText(TangoJNINative.getEventString()); + mPoseData.setText(TangoJNINative.getPoseString()); + } catch (Exception e) { + e.printStackTrace(); + } + } + }); + + } catch (Exception e) { + e.printStackTrace(); + } } - } + } + }).start(); + } } - diff --git a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityView.java b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityRenderer.java similarity index 54% rename from augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityView.java rename to augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityRenderer.java index 448c524a..ef8b2128 100644 --- a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityView.java +++ b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/AugmentedRealityRenderer.java @@ -20,27 +20,21 @@ import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; -/** - * AugmentedRealityView renders graphic content. - */ -public class AugmentedRealityView implements GLSurfaceView.Renderer { - - public boolean isAutoRecovery; - public AugmentedRealityActivity activity; - - public void onDrawFrame(GL10 gl) { - TangoJNINative.render(); - } +// AugmentedRealityRenderer renders graphic content. This includes the ground grid, +// camera frustum, camera axis, and trajectory based on the Tango device's pose. +public class AugmentedRealityRenderer implements GLSurfaceView.Renderer { + // Render loop of the Gl context. + public void onDrawFrame(GL10 gl) { + TangoJNINative.render(); + } - public void onSurfaceChanged(GL10 gl, int width, int height) { - TangoJNINative.connectTexture(); - TangoJNINative.connectService(); - TangoJNINative.setupViewport(width, height); - } + // Called when the surface size changes. + public void onSurfaceChanged(GL10 gl, int width, int height) { + TangoJNINative.setupGraphic(width, height); + } - public void onSurfaceCreated(GL10 gl, EGLConfig config) { - TangoJNINative.setupGraphic(); - TangoJNINative.initialize(activity); - TangoJNINative.setupConfig(isAutoRecovery); - } + // Called when the surface is created or recreated. + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + TangoJNINative.initGlContent(); + } } diff --git a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/TangoJNINative.java b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/TangoJNINative.java index 95b352c3..252b34e5 100644 --- a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/TangoJNINative.java +++ b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/TangoJNINative.java @@ -13,52 +13,66 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.projecttango.experiments.nativeaugmentedreality; -/** - * Interfaces between C and Java. - */ +// Interfaces between native C++ code and Java code. public class TangoJNINative { - static { - System.loadLibrary("augmented_reality_jni_example"); - } - - public static native int initialize(AugmentedRealityActivity activity); + static { + System.loadLibrary("augmented_reality_jni_example"); + } - public static native void setupConfig(boolean isAutoRecovery); + // Initialize the Tango Service, this function starts the communication + // between the application and Tango Service. + // The activity object is used for checking if the API version is outdated. + public static native int initialize(AugmentedRealityActivity activity); - public static native void connectTexture(); + // Setup the configuration file of the Tango Service. We are also setting up + // the auto-recovery option from here. + public static native int setupConfig(); - public static native int connectService(); + // Connect the onPoseAvailable callback. + public static native int connectCallbacks(); - public static native void disconnectService(); + // Connect to the Tango Service. + // This function will start the Tango Service pipeline, in this case, it will + // start Motion Tracking. + public static native int connect(); - public static native void onDestroy(); + // Disconnect from the Tango Service, release all the resources that the app is + // holding from the Tango Service. + public static native void disconnect(); - public static native void setupGraphic(); + // Release all OpenGL resources that are allocated from the program. + public static native void freeGLContent(); - public static native void setupViewport(int width, int height); + // Allocate OpenGL resources for rendering. + public static native void initGlContent(); - public static native void render(); + // Setup the view port width and height. + public static native void setupGraphic(int width, int height); - public static native void setCamera(int cameraIndex); + // Main render loop. + public static native void render(); - public static native void resetMotionTracking(); + // Set the render camera's viewing angle: + // first person, third person, or top down. + public static native void setCamera(int cameraIndex); - public static native byte updateStatus(); + // Explicitly reset motion tracking and restart the pipeline. + // Note that this will cause motion tracking to re-initialize. + public static native void resetMotionTracking(); - public static native String getPoseString(); + // Get the latest pose string from our application for display in our debug UI. + public static native String getPoseString(); - public static native String getVersionNumber(); - - public static native boolean getIsLocalized(); + // Get the latest event string from our application for display in our debug UI. + public static native String getEventString(); - public static native void updateARElement(int arElement, int interactionType); + // Get the TangoCore version from our application for display in our debug UI. + public static native String getVersionNumber(); - public static native float startSetCameraOffset(); - - public static native float setCameraOffset(float rotX, float rotY, float zDistance); - - public static native void placeObject(); + // Pass touch events to the native layer. + public static native void onTouchEvent(int touchCount, int event0, + float x0, float y0, float x1, float y1); } - diff --git a/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/Util.java b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/Util.java new file mode 100644 index 00000000..78751710 --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/java/com/projecttango/experiments/nativeaugmentedreality/Util.java @@ -0,0 +1,46 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.projecttango.experiments.nativeaugmentedreality; + +import android.util.Log; +import android.content.Context; +import android.database.Cursor; +import android.net.Uri; + +// Util class provides handy utility functions. +public class Util { + + // Checks if the calling app has the specified permission. + // It is recommended that an app check if it has a permission before trying + // to request it; this will save time by avoiding re-requesting permissions + // that have already been granted. + + // @param context The context of the calling app. + // @param permissionType The type of permission to request; either + // PERMISSIONTYPE_MOTION_TRACKING or PERMISSIONTYPE_ADF_LOAD_SAVE. + // @return boolean Whether or not the permission was already granted. + public static boolean hasPermission(Context context, String permissionType){ + Uri uri = Uri.parse("content://com.google.atap.tango.PermissionStatusProvider/" + + permissionType); + Cursor cursor = context.getContentResolver().query(uri, null, null, null, null); + if (cursor == null) { + return false; + } else { + return true; + } + } +} \ No newline at end of file diff --git a/augmented-reality-jni-example/app/src/main/jni/Android.mk b/augmented-reality-jni-example/app/src/main/jni/Android.mk index 20299f87..68a2a88c 100644 --- a/augmented-reality-jni-example/app/src/main/jni/Android.mk +++ b/augmented-reality-jni-example/app/src/main/jni/Android.mk @@ -22,13 +22,17 @@ LOCAL_MODULE := libaugmented_reality_jni_example LOCAL_SHARED_LIBRARIES := tango_client_api LOCAL_CFLAGS := -std=c++11 -LOCAL_SRC_FILES := tango_augmented_reality.cpp \ - tango_data.cpp \ +LOCAL_SRC_FILES := augmented_reality_app.cc \ + jni_interface.cc \ + pose_data.cc \ + scene.cc \ + tango_event_data.cc \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/axis.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/camera.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/conversions.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/drawable_object.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/frustum.cpp \ + $(PROJECT_ROOT_FROM_JNI)/tango-gl/gesture_camera.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/grid.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/goal_marker.cpp \ $(PROJECT_ROOT_FROM_JNI)/tango-gl/line.cpp \ diff --git a/augmented-reality-jni-example/app/src/main/jni/augmented_reality_app.cc b/augmented-reality-jni-example/app/src/main/jni/augmented_reality_app.cc new file mode 100644 index 00000000..91b8a125 --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/augmented_reality_app.cc @@ -0,0 +1,387 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "tango-augmented-reality/augmented_reality_app.h" + +namespace { +const int kVersionStringLength = 27; + +// Scale frustum size for closer near clipping plane. +const float kFovScaler = 0.1f; + +// Far clipping plane of the AR camera. +const float kArCameraFarClippingPlane = 100.0f; + +// This function routes onTangoEvent callbacks to the application object for +// handling. +// +// @param context, context will be a pointer to a AugmentedRealityApp +// instance on which to call callbacks. +// @param event, TangoEvent to route to onTangoEventAvailable function. +void onTangoEventAvailableRouter(void* context, const TangoEvent* event) { + using namespace tango_augmented_reality; + AugmentedRealityApp* app = static_cast(context); + app->onTangoEventAvailable(event); +} + +// This function routes texture callbacks to the application object for +// handling. +// +// @param context, context will be a pointer to a AugmentedRealityApp +// instance on which to call callbacks. +// @param id, id of the updated camera.. +void onTextureAvailableRouter(void* context, TangoCameraId id) { + using namespace tango_augmented_reality; + AugmentedRealityApp* app = static_cast(context); + app->onTextureAvailable(id); +} +} // namespace + +namespace tango_augmented_reality { +void AugmentedRealityApp::onTangoEventAvailable(const TangoEvent* event) { + std::lock_guard lock(tango_event_mutex_); + tango_event_data_.UpdateTangoEvent(event); +} + +void AugmentedRealityApp::onTextureAvailable(TangoCameraId id) { + if (id == TANGO_CAMERA_COLOR) { + RequestRender(); + } +} + +AugmentedRealityApp::AugmentedRealityApp() {} + +AugmentedRealityApp::~AugmentedRealityApp() { + TangoConfig_free(tango_config_); + JNIEnv* env; + java_vm_->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_6); + env->DeleteGlobalRef(calling_activity_obj_); +} + +int AugmentedRealityApp::TangoInitialize(JNIEnv* env, jobject caller_activity) { + // The first thing we need to do for any Tango enabled application is to + // initialize the service. We'll do that here, passing on the JNI environment + // and jobject corresponding to the Android activity that is calling us. + int ret = TangoService_initialize(env, caller_activity); + jclass cls = env->GetObjectClass(caller_activity); + on_demand_render_ = env->GetMethodID(cls, "requestRender", "()V"); + + calling_activity_obj_ = + reinterpret_cast(env->NewGlobalRef(caller_activity)); + + return ret; +} + +int AugmentedRealityApp::TangoSetupConfig() { + // Here, we'll configure the service to run in the way we'd want. For this + // application, we'll start from the default configuration + // (TANGO_CONFIG_DEFAULT). This enables basic motion tracking capabilities. + tango_config_ = TangoService_getConfig(TANGO_CONFIG_DEFAULT); + if (tango_config_ == nullptr) { + LOGE("AugmentedRealityApp: Failed to get default config form"); + return TANGO_ERROR; + } + + // Set auto-recovery for motion tracking as requested by the user. + int ret = TangoConfig_setBool(tango_config_, "config_enable_auto_recovery", + true); + if (ret != TANGO_SUCCESS) { + LOGE("AugmentedRealityApp: config_enable_auto_recovery() failed with error" + "code: %d", ret); + return ret; + } + + // Enable color camera from config. + ret = TangoConfig_setBool(tango_config_, "config_enable_color_camera", true); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: config_enable_color_camera() failed with error" + "code: %d", + ret); + return ret; + } + + // Low latency IMU integration enables aggressive integration of the latest + // inertial measurements to provide lower latency pose estimates. This will + // improve the AR experience. + ret = TangoConfig_setBool(tango_config_, + "config_enable_low_latency_imu_integration", true); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: config_enable_low_latency_imu_integration() " + "failed with error code: %d", + ret); + return ret; + } + + // Get TangoCore version string from service. + ret = TangoConfig_getString( + tango_config_, "tango_service_library_version", + const_cast(tango_core_version_string_.c_str()), + kVersionStringLength); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: get tango core version failed with error" + "code: %d", + ret); + return ret; + } + + return ret; +} + +int AugmentedRealityApp::TangoConnectCallbacks() { + // Attach onEventAvailable callback. + // The callback will be called after the service is connected. + int ret = TangoService_connectOnTangoEvent(onTangoEventAvailableRouter); + if (ret != TANGO_SUCCESS) { + LOGE("AugmentedRealityApp: Failed to connect to event callback with error" + "code: %d", ret); + return ret; + } + + return ret; +} + +// Connect to Tango Service, service will start running, and +// pose can be queried. +int AugmentedRealityApp::TangoConnect() { + TangoErrorType ret = TangoService_connect(this, tango_config_); + if (ret != TANGO_SUCCESS) { + LOGE("AugmentedRealityApp: Failed to connect to the Tango service with" + "error code: %d", ret); + return ret; + } + + ret = UpdateExtrinsics(); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: Failed to query sensor extrinsic with error " + "code: %d", + ret); + return ret; + } + return ret; +} + +void AugmentedRealityApp::TangoDisconnect() { + // When disconnecting from the Tango Service, it is important to make sure to + // free your configuration object. Note that disconnecting from the service, + // resets all configuration, and disconnects all callbacks. If an application + // resumes after disconnecting, it must re-register configuration and + // callbacks with the service. + TangoConfig_free(tango_config_); + tango_config_ = nullptr; + TangoService_disconnect(); +} + +void AugmentedRealityApp::TangoResetMotionTracking() { + main_scene_.ResetTrajectory(); + TangoService_resetMotionTracking(); +} + +void AugmentedRealityApp::InitializeGLContent() { + main_scene_.InitGLContent(); + + LOGI("jasonps: texture id = %d", main_scene_.GetVideoOverlayTextureId()); + + // Connect color camera texture. TangoService_connectTextureId expects a valid + // texture id from the caller, so we will need to wait until the GL content is + // properly allocated. + TangoErrorType ret = TangoService_connectTextureId( + TANGO_CAMERA_COLOR, main_scene_.GetVideoOverlayTextureId(), this, + onTextureAvailableRouter); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: Failed to connect the texture id with error" + "code: %d", + ret); + } +} + +void AugmentedRealityApp::SetViewPort(int width, int height) { + // Query intrinsics for the color camera from the Tango Service, because we + // want to match the virtual render camera's intrinsics to the physical + // camera, we will compute the actually projection matrix and the view port + // ratio for the render. + TangoErrorType ret = TangoService_getCameraIntrinsics( + TANGO_CAMERA_COLOR, &color_camera_intrinsics_); + if (ret != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: Failed to get camera intrinsics with error" + "code: %d", + ret); + } + + float image_width = static_cast(color_camera_intrinsics_.width); + float image_height = static_cast(color_camera_intrinsics_.height); + float focus_length = static_cast(color_camera_intrinsics_.fx); + + float image_plane_ratio = image_height / image_width; + float image_plane_distance = 2.0f * focus_length / image_width; + + glm::mat4 projection_mat_ar = glm::frustum( + -1.0f * kFovScaler, 1.0f * kFovScaler, -image_plane_ratio * kFovScaler, + image_plane_ratio * kFovScaler, image_plane_distance * kFovScaler, + kArCameraFarClippingPlane); + + main_scene_.SetFrustumScale( + glm::vec3(1.0f, image_plane_ratio, image_plane_distance)); + main_scene_.SetCameraImagePlaneRatio(image_plane_ratio); + main_scene_.SetImagePlaneDistance(image_plane_distance); + main_scene_.SetARCameraProjectionMatrix(projection_mat_ar); + + float screen_ratio = static_cast(height) / static_cast(width); + // In the following code, we place the view port at (0, 0) from the bottom + // left corner of the screen. By placing it at (0,0), the view port may not + // be exactly centered on the screen. However, this won't affect AR + // visualization as the correct registration of AR objects relies on the + // aspect ratio of the screen and video overlay, but not the position of the + // view port. + // + // To place the view port in the center of the screen, please use following + // code: + // + // if (image_plane_ratio < screen_ratio) { + // glViewport(-(h / image_plane_ratio - w) / 2, 0, + // h / image_plane_ratio, h); + // } else { + // glViewport(0, -(w * image_plane_ratio - h) / 2, w, + // w * image_plane_ratio); + // } + + if (image_plane_ratio < screen_ratio) { + glViewport(0, 0, height / image_plane_ratio, height); + } else { + glViewport(0, 0, width, width * image_plane_ratio); + } + main_scene_.SetCameraType(tango_gl::GestureCamera::CameraType::kFirstPerson); +} + +void AugmentedRealityApp::Render() { + double video_overlay_timestamp; + TangoErrorType status = + TangoService_updateTexture(TANGO_CAMERA_COLOR, &video_overlay_timestamp); + + glm::mat4 color_camera_pose = + GetPoseMatrixAtTimestamp(video_overlay_timestamp); + color_camera_pose = + pose_data_.GetExtrinsicsAppliedOpenGLWorldFrame(color_camera_pose); + if (status != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: Failed to update video overlay texture with" + "error code: %d", + status); + } + main_scene_.Render(color_camera_pose); +} + +void AugmentedRealityApp::FreeGLContent() { main_scene_.FreeGLContent(); } + +std::string AugmentedRealityApp::GetPoseString() { + std::lock_guard lock(pose_mutex_); + return pose_data_.GetPoseDebugString(); +} + +std::string AugmentedRealityApp::GetEventString() { + std::lock_guard lock(tango_event_mutex_); + return tango_event_data_.GetTangoEventString().c_str(); +} + +std::string AugmentedRealityApp::GetVersionString() { + return tango_core_version_string_.c_str(); +} + +void AugmentedRealityApp::SetCameraType( + tango_gl::GestureCamera::CameraType camera_type) { + main_scene_.SetCameraType(camera_type); +} + +void AugmentedRealityApp::OnTouchEvent(int touch_count, + tango_gl::GestureCamera::TouchEvent event, + float x0, float y0, float x1, float y1) { + main_scene_.OnTouchEvent(touch_count, event, x0, y0, x1, y1); +} + +glm::mat4 AugmentedRealityApp::GetPoseMatrixAtTimestamp(double timstamp) { + TangoPoseData pose_start_service_T_device; + TangoCoordinateFramePair frame_pair; + frame_pair.base = TANGO_COORDINATE_FRAME_START_OF_SERVICE; + frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE; + TangoErrorType status = TangoService_getPoseAtTime( + timstamp, frame_pair, &pose_start_service_T_device); + if (status != TANGO_SUCCESS) { + LOGE( + "AugmentedRealityApp: Failed to get transform between the Start of " + "service and device frames at timstamp %lf", + timstamp); + } + + { + std::lock_guard lock(pose_mutex_); + pose_data_.UpdatePose(&pose_start_service_T_device); + } + + if (pose_start_service_T_device.status_code != TANGO_POSE_VALID) { + return glm::mat4(1.0f); + } + return pose_data_.GetMatrixFromPose(pose_start_service_T_device); +} + +TangoErrorType AugmentedRealityApp::UpdateExtrinsics() { + TangoErrorType ret; + TangoPoseData pose_data; + TangoCoordinateFramePair frame_pair; + + // TangoService_getPoseAtTime function is used for query device extrinsics + // as well. We use timestamp 0.0 and the target frame pair to get the + // extrinsics from the sensors. + // + // Get device with respect to imu transformation matrix. + frame_pair.base = TANGO_COORDINATE_FRAME_IMU; + frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE; + ret = TangoService_getPoseAtTime(0.0, frame_pair, &pose_data); + if (ret != TANGO_SUCCESS) { + LOGE( + "PointCloudApp: Failed to get transform between the IMU frame and " + "device frames"); + return ret; + } + pose_data_.SetImuTDevice(pose_data_.GetMatrixFromPose(pose_data)); + + // Get color camera with respect to imu transformation matrix. + frame_pair.base = TANGO_COORDINATE_FRAME_IMU; + frame_pair.target = TANGO_COORDINATE_FRAME_CAMERA_COLOR; + ret = TangoService_getPoseAtTime(0.0, frame_pair, &pose_data); + if (ret != TANGO_SUCCESS) { + LOGE( + "PointCloudApp: Failed to get transform between the color camera frame " + "and device frames"); + return ret; + } + pose_data_.SetImuTColorCamera(pose_data_.GetMatrixFromPose(pose_data)); + return ret; +} + +void AugmentedRealityApp::RequestRender() { + JNIEnv* env; + java_vm_->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_6); + env->CallVoidMethod(calling_activity_obj_, on_demand_render_); +} + +} // namespace tango_augmented_reality diff --git a/augmented-reality-jni-example/app/src/main/jni/jni_interface.cc b/augmented-reality-jni-example/app/src/main/jni/jni_interface.cc new file mode 100644 index 00000000..cb29cbad --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/jni_interface.cc @@ -0,0 +1,131 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define GLM_FORCE_RADIANS + +#include +#include + +static tango_augmented_reality::AugmentedRealityApp app; + +#ifdef __cplusplus +extern "C" { +#endif +jint JNI_OnLoad(JavaVM* vm, void*) { + app.SetJavaVM(vm); + return JNI_VERSION_1_6; +} + +JNIEXPORT jint JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_initialize( + JNIEnv* env, jobject, jobject activity) { + return app.TangoInitialize(env, activity); +} + +JNIEXPORT jint JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setupConfig( + JNIEnv*, jobject) { + return app.TangoSetupConfig(); +} + +JNIEXPORT jint JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_connect( + JNIEnv*, jobject) { + return app.TangoConnect(); +} + +JNIEXPORT jint JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_connectCallbacks( + JNIEnv*, jobject) { + int ret = app.TangoConnectCallbacks(); + return ret; +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_disconnect( + JNIEnv*, jobject) { + app.TangoDisconnect(); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_resetMotionTracking( + JNIEnv*, jobject) { + app.TangoResetMotionTracking(); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_initGlContent( + JNIEnv*, jobject) { + app.InitializeGLContent(); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setupGraphic( + JNIEnv*, jobject, jint width, jint height) { + app.SetViewPort(width, height); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_render( + JNIEnv*, jobject) { + app.Render(); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_freeGLContent( + JNIEnv*, jobject) { + app.FreeGLContent(); +} + +JNIEXPORT jstring JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getPoseString( + JNIEnv* env, jobject) { + return (env)->NewStringUTF(app.GetPoseString().c_str()); +} + +JNIEXPORT jstring JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getEventString( + JNIEnv* env, jobject) { + return (env)->NewStringUTF(app.GetEventString().c_str()); +} + +JNIEXPORT jstring JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getVersionNumber( + JNIEnv* env, jobject) { + return (env)->NewStringUTF(app.GetVersionString().c_str()); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setCamera( + JNIEnv*, jobject, int camera_index) { + using namespace tango_gl; + GestureCamera::CameraType cam_type = + static_cast(camera_index); + app.SetCameraType(cam_type); +} + +JNIEXPORT void JNICALL +Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_onTouchEvent( + JNIEnv*, jobject, int touch_count, int event, float x0, float y0, float x1, + float y1) { + using namespace tango_gl; + GestureCamera::TouchEvent touch_event = + static_cast(event); + app.OnTouchEvent(touch_count, touch_event, x0, y0, x1, y1); +} +#ifdef __cplusplus +} +#endif diff --git a/augmented-reality-jni-example/app/src/main/jni/pose_data.cc b/augmented-reality-jni-example/app/src/main/jni/pose_data.cc new file mode 100644 index 00000000..7c809bd7 --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/pose_data.cc @@ -0,0 +1,129 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "tango-augmented-reality/pose_data.h" + +namespace { +const float kMeterToMillimeter = 1000.0f; +} // namespace + +namespace tango_augmented_reality { + +PoseData::PoseData() {} + +PoseData::~PoseData() {} + +void PoseData::UpdatePose(const TangoPoseData* pose_data) { + cur_pose_ = *pose_data; + + if (prev_pose_.status_code != cur_pose_.status_code) { + // Reset pose counter when the status changed. + pose_counter_ = 0; + } + + // Increase pose counter. + ++pose_counter_; + FormatPoseString(); + prev_pose_ = cur_pose_; +} + +std::string PoseData::GetPoseDebugString() { return pose_string_; } + +glm::mat4 PoseData::GetLatestPoseMatrix() { + return GetMatrixFromPose(cur_pose_); +} + +glm::mat4 PoseData::GetExtrinsicsAppliedOpenGLWorldFrame( + const glm::mat4& pose_matrix) { + // This full multiplication is equal to: + // opengl_world_T_opengl_camera = + // opengl_world_T_start_service * + // start_service_T_device * + // device_T_imu * + // imu_T_color_camera * + // color_camera_T_opengl_camera; + // Note that color camera and depth camera are the same hardware, they share + // the same frame. + // + // More information about frame transformation can be found here: + // Frame of reference: + // https://developers.google.com/project-tango/overview/frames-of-reference + // Coordinate System Conventions: + // https://developers.google.com/project-tango/overview/coordinate-systems + return tango_gl::conversions::opengl_world_T_tango_world() * pose_matrix * + glm::inverse(GetImuTDevice()) * GetImuTColorCamera() * + tango_gl::conversions::color_camera_T_opengl_camera(); +} + +glm::mat4 PoseData::GetMatrixFromPose(const TangoPoseData& pose) { + // Convert pose data to vec3 for position and quaternion for orientation. + // + // More information about frame transformation can be found here: + // Frame of reference: + // https://developers.google.com/project-tango/overview/frames-of-reference + // Coordinate System Conventions: + // https://developers.google.com/project-tango/overview/coordinate-systems + glm::vec3 translation = + glm::vec3(pose.translation[0], pose.translation[1], pose.translation[2]); + glm::quat rotation = glm::quat(pose.orientation[3], pose.orientation[0], + pose.orientation[1], pose.orientation[2]); + glm::mat4 matrix = + glm::translate(glm::mat4(1.0f), translation) * glm::mat4_cast(rotation); + return matrix; +} + +std::string PoseData::GetStringFromStatusCode(TangoPoseStatusType status) { + std::string ret_string; + switch (status) { + case TANGO_POSE_INITIALIZING: + ret_string = "initializing"; + break; + case TANGO_POSE_VALID: + ret_string = "valid"; + break; + case TANGO_POSE_INVALID: + ret_string = "invalid"; + break; + case TANGO_POSE_UNKNOWN: + ret_string = "unknown"; + break; + default: + ret_string = "status_code_invalid"; + break; + } + return ret_string; +} + +void PoseData::FormatPoseString() { + std::stringstream string_stream; + string_stream.setf(std::ios_base::fixed, std::ios_base::floatfield); + string_stream.precision(3); + string_stream << "status: " << GetStringFromStatusCode(cur_pose_.status_code) + << ", count: " << pose_counter_ << ", delta time (ms): " + << (cur_pose_.timestamp - prev_pose_.timestamp) * + kMeterToMillimeter << ", position (m): [" + << cur_pose_.translation[0] << ", " << cur_pose_.translation[1] + << ", " << cur_pose_.translation[2] << "]" + << ", orientation: [" << cur_pose_.orientation[0] << ", " + << cur_pose_.orientation[1] << ", " << cur_pose_.orientation[2] + << ", " << cur_pose_.orientation[3] << "]"; + pose_string_ = string_stream.str(); + string_stream.flush(); +} + +} //namespace tango_augmented_reality \ No newline at end of file diff --git a/augmented-reality-jni-example/app/src/main/jni/scene.cc b/augmented-reality-jni-example/app/src/main/jni/scene.cc new file mode 100644 index 00000000..9a3cc99a --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/scene.cc @@ -0,0 +1,169 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "tango-augmented-reality/scene.h" + +namespace { +// We want to represent the device properly with respect to the ground so we'll +// add an offset in z to our origin. We'll set this offset to 1.3 meters based +// on the average height of a human standing with a Tango device. This allows us +// to place a grid roughly on the ground for most users. +const glm::vec3 kHeightOffset = glm::vec3(0.0f, 0.0f, 0.0f); + +// Color of the motion tracking trajectory. +const tango_gl::Color kTraceColor(0.22f, 0.28f, 0.67f); + +// Color of the ground grid. +const tango_gl::Color kGridColor(0.85f, 0.85f, 0.85f); + +// Frustum scale. +const glm::vec3 kFrustumScale = glm::vec3(0.4f, 0.3f, 0.5f); + +// Some property for the AR marker. +const glm::quat kMarkerRotation = glm::quat(0.0f, 0.0f, 1.0f, 0.0f); +// The reason we put mark at 0.85f at Y is because the center of the marker +// object is not at the tip of the mark. +const glm::vec3 kMarkerPosition = glm::vec3(0.0f, 0.85f, -3.0f); +const glm::vec3 kMarkerScale = glm::vec3(0.05f, 0.05f, 0.05f); +const tango_gl::Color kMarkerColor(1.0f, 0.f, 0.f); +} // namespace + +namespace tango_augmented_reality { + +Scene::Scene() {} + +Scene::~Scene() {} + +void Scene::InitGLContent() { + // Allocating render camera and drawable object. + // All of these objects are for visualization purposes. + video_overlay_ = new tango_gl::VideoOverlay(); + gesture_camera_ = new tango_gl::GestureCamera(); + axis_ = new tango_gl::Axis(); + frustum_ = new tango_gl::Frustum(); + trace_ = new tango_gl::Trace(); + grid_ = new tango_gl::Grid(); + marker_ = new tango_gl::GoalMarker(); + + // Set the frustum scale to 4:3, this doesn't necessarily match the physical + // camera's aspect ratio, this is just for visualization purposes. + frustum_->SetScale(kFrustumScale); + + trace_->SetColor(kTraceColor); + grid_->SetColor(kGridColor); + grid_->SetPosition(-kHeightOffset); + + marker_->SetPosition(kMarkerPosition); + marker_->SetScale(kMarkerScale); + marker_->SetRotation(kMarkerRotation); + marker_->SetColor(kMarkerColor); + + gesture_camera_->SetCameraType( + tango_gl::GestureCamera::CameraType::kThirdPerson); +} + +void Scene::FreeGLContent() { + delete video_overlay_; + delete gesture_camera_; + delete axis_; + delete frustum_; + delete trace_; + delete grid_; + delete marker_; +} + +void Scene::SetupViewPort(int x, int y, int w, int h) { + if (h == 0) { + LOGE("Setup graphic height not valid"); + } + gesture_camera_->SetAspectRatio(static_cast(w) / + static_cast(h)); + glViewport(x, y, w, h); +} + +void Scene::Render(const glm::mat4& cur_pose_transformation) { + glEnable(GL_DEPTH_TEST); + glEnable(GL_CULL_FACE); + + glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + + glm::vec3 position = + glm::vec3(cur_pose_transformation[3][0], cur_pose_transformation[3][1], + cur_pose_transformation[3][2]); + + trace_->UpdateVertexArray(position); + + if (gesture_camera_->GetCameraType() == + tango_gl::GestureCamera::CameraType::kFirstPerson) { + // In first person mode, we directly control camera's motion. + gesture_camera_->SetTransformationMatrix(cur_pose_transformation); + + // If it's first person view, we will render the video overlay in full + // screen, so we passed identity matrix as view and projection matrix. + glDisable(GL_DEPTH_TEST); + video_overlay_->Render(glm::mat4(1.0f), glm::mat4(1.0f)); + } else { + // In third person or top down more, we follow the camera movement. + gesture_camera_->SetAnchorPosition(position); + + frustum_->SetTransformationMatrix(cur_pose_transformation); + // Set the frustum scale to 4:3, this doesn't necessarily match the physical + // camera's aspect ratio, this is just for visualization purposes. + frustum_->SetScale( + glm::vec3(1.0f, camera_image_plane_ratio_, image_plane_distance_)); + frustum_->Render(ar_camera_projection_matrix_, + gesture_camera_->GetViewMatrix()); + + axis_->SetTransformationMatrix(cur_pose_transformation); + axis_->Render(ar_camera_projection_matrix_, + gesture_camera_->GetViewMatrix()); + + trace_->Render(ar_camera_projection_matrix_, + gesture_camera_->GetViewMatrix()); + video_overlay_->Render(ar_camera_projection_matrix_, + gesture_camera_->GetViewMatrix()); + } + glEnable(GL_DEPTH_TEST); + grid_->Render(ar_camera_projection_matrix_, gesture_camera_->GetViewMatrix()); + marker_->Render(ar_camera_projection_matrix_, + gesture_camera_->GetViewMatrix()); +} + +void Scene::SetCameraType(tango_gl::GestureCamera::CameraType camera_type) { + gesture_camera_->SetCameraType(camera_type); + if (camera_type == tango_gl::GestureCamera::CameraType::kFirstPerson) { + video_overlay_->SetParent(nullptr); + video_overlay_->SetScale(glm::vec3(1.0f, 1.0f, 1.0f)); + video_overlay_->SetPosition(glm::vec3(0.0f, 0.0f, 0.0f)); + video_overlay_->SetRotation(glm::quat(1.0f, 0.0f, 0.0f, 0.0f)); + } else { + video_overlay_->SetScale(glm::vec3(1.0f, camera_image_plane_ratio_, 1.0f)); + video_overlay_->SetRotation(glm::quat(1.0f, 0.0f, 0.0f, 0.0f)); + video_overlay_->SetPosition(glm::vec3(0.0f, 0.0f, -image_plane_distance_)); + video_overlay_->SetParent(axis_); + } +} + +void Scene::OnTouchEvent(int touch_count, + tango_gl::GestureCamera::TouchEvent event, float x0, + float y0, float x1, float y1) { + gesture_camera_->OnTouchEvent(touch_count, event, x0, y0, x1, y1); +} + +} // namespace tango_augmented_reality diff --git a/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/augmented_reality_app.h b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/augmented_reality_app.h new file mode 100644 index 00000000..05b6191c --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/augmented_reality_app.h @@ -0,0 +1,183 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TANGO_AUGMENTED_REALITY_AUGMENTED_REALITY_APP_H_ +#define TANGO_AUGMENTED_REALITY_AUGMENTED_REALITY_APP_H_ + +#include +#include + +#include // NOLINT +#include + +#include +#include +#include + + +namespace tango_augmented_reality { + +// AugmentedRealityApp handles the application lifecycle and resources. +class AugmentedRealityApp { + public: + // Constructor and deconstructor. + AugmentedRealityApp(); + ~AugmentedRealityApp(); + + // Initialize Tango Service, this function starts the communication + // between the application and Tango Service. + // The activity object is used for checking if the API version is outdated. + int TangoInitialize(JNIEnv* env, jobject caller_activity); + + // Setup the configuration file for the Tango Service. We'll also se whether + // we'd like auto-recover enabled. + int TangoSetupConfig(); + + // Connect the onPoseAvailable callback. + int TangoConnectCallbacks(); + + // Connect to Tango Service. + // This function will start the Tango Service pipeline, in this case, it will + // start Motion Tracking. + int TangoConnect(); + + // Disconnect from Tango Service, release all the resources that the app is + // holding from Tango Service. + void TangoDisconnect(); + + // Explicitly reset motion tracking and restart the pipeline. + // Note that this will cause motion tracking to re-initialize. + void TangoResetMotionTracking(); + + // Tango service event callback function for pose data. Called when new events + // are available from the Tango Service. + // + // @param event: Tango event, caller allocated. + void onTangoEventAvailable(const TangoEvent* event); + + // Tango service texture callback. Called when the texture is updated. + // + // @param id: camera Id of the updated camera. + void onTextureAvailable(TangoCameraId id); + + // Allocate OpenGL resources for rendering, mainly initializing the Scene. + void InitializeGLContent(); + + // Setup the view port width and height. + void SetViewPort(int width, int height); + + // Main render loop. + void Render(); + + // Release all OpenGL resources that allocate from the program. + void FreeGLContent(); + + // Retrun pose debug string. + std::string GetPoseString(); + + // Retrun Tango event debug string. + std::string GetEventString(); + + // Retrun Tango Service version string. + std::string GetVersionString(); + + // Set render camera's viewing angle, first person, third person or top down. + // + // @param: camera_type, camera type includes first person, third person and + // top down + void SetCameraType(tango_gl::GestureCamera::CameraType camera_type); + + // Touch event passed from android activity. This function only supports two + // touches. + // + // @param: touch_count, total count for touches. + // @param: event, touch event of current touch. + // @param: x0, normalized touch location for touch 0 on x axis. + // @param: y0, normalized touch location for touch 0 on y axis. + // @param: x1, normalized touch location for touch 1 on x axis. + // @param: y1, normalized touch location for touch 1 on y axis. + void OnTouchEvent(int touch_count, tango_gl::GestureCamera::TouchEvent event, + float x0, float y0, float x1, float y1); + + // Cache the Java VM + // + // @JavaVM java_vm: the Java VM is using from the Java layer. + void SetJavaVM(JavaVM* java_vm) { java_vm_ = java_vm; } + + private: + // Get a pose in matrix format with extrinsics in OpenGl space. + // + // @param: timstamp, timestamp of the target pose. + // + // @return: pose in matrix format. + glm::mat4 GetPoseMatrixAtTimestamp(double timstamp); + + // Query sensor/camera extrinsic from the Tango Service, the extrinsic is only + // available after the service is connected. + // + // @return: error code. + TangoErrorType UpdateExtrinsics(); + + // Request the render function from Java layer. + void RequestRender(); + + // pose_data_ handles all pose onPoseAvailable callbacks, onPoseAvailable() + // in this object will be routed to pose_data_ to handle. + PoseData pose_data_; + + // Mutex for protecting the pose data. The pose data is shared between render + // thread and TangoService callback thread. + std::mutex pose_mutex_; + + // tango_event_data_ handles all Tango event callbacks, + // onTangoEventAvailable() in this object will be routed to tango_event_data_ + // to handle. + TangoEventData tango_event_data_; + + // tango_event_data_ is share between the UI thread we start for updating + // debug + // texts and the TangoService event callback thread. We keep event_mutex_ to + // protect tango_event_data_. + std::mutex tango_event_mutex_; + + // main_scene_ includes all drawable object for visualizing Tango device's + // movement. + Scene main_scene_; + + // Tango configration file, this object is for configuring Tango Service setup + // before connect to service. For example, we set the flag + // config_enable_auto_recovery based user's input and then start Tango. + TangoConfig tango_config_; + + // Device color camera intrinsics, these intrinsics value is used for + // calculate the camera frustum and image aspect ratio. In the AR view, we + // want to match the virtual camera's intrinsics to the actual physical camera + // as close as possible. + TangoCameraIntrinsics color_camera_intrinsics_; + + // Tango service version string. + std::string tango_core_version_string_; + + // Cached Java VM, caller activity object and the request render method. These + // variables are used for on demand render request from the onTextureAvailable + // callback. + JavaVM* java_vm_; + jobject calling_activity_obj_; + jmethodID on_demand_render_; +}; +} // namespace tango_augmented_reality + +#endif // TANGO_AUGMENTED_REALITY_AUGMENTED_REALITY_APP_H_ diff --git a/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/pose_data.h b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/pose_data.h new file mode 100644 index 00000000..459bf11f --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/pose_data.h @@ -0,0 +1,113 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TANGO_AUGMENTED_REALITY_POSE_DATA_H_ +#define TANGO_AUGMENTED_REALITY_POSE_DATA_H_ + +#include +#include + +#include // NOLINT +#include +#include + +namespace tango_augmented_reality { + +// PoseData holds all pose related data. E.g. pose position, rotation and time- +// stamp. It also produce the debug information strings. +class PoseData { + public: + PoseData(); + ~PoseData(); + + // Update current pose and previous pose. + // + // @param pose: pose data of current frame. + void UpdatePose(const TangoPoseData* pose_data); + + // Compose the pose debug string. + // + // @return: pose debug strings for dispaly on Java activity. + std::string GetPoseDebugString(); + + // Get latest pose in matrix format with extrinsics in OpenGl space. + // + // @return: latest pose in matrix format. + glm::mat4 GetLatestPoseMatrix(); + + // @return: device frame with respect to IMU frame matrix. + glm::mat4 GetImuTDevice() { return imu_T_device_; } + + // Set device frame with respect to IMU frame matrix. + // @param: imu_T_device, imu_T_device_ matrix. + void SetImuTDevice(const glm::mat4& imu_T_device) { + imu_T_device_ = imu_T_device; + } + + // @return: color camera frame with respect to IMU frame. + glm::mat4 GetImuTColorCamera() { return imu_T_color_camera_; } + + // Set color camera frame with respect to IMU frame matrix. + // @param: imu_T_color_camera, imu_T_color_camera_ matrix. + void SetImuTColorCamera(const glm::mat4& imu_T_color_camera) { + imu_T_color_camera_ = imu_T_color_camera; + } + + // Get pose transformation in OpenGL coordinate system. This function also + // applies sensor extrinsics transformation to the current pose. + // + // @param: pose, pose to be converted to matrix. + // + // @return: corresponding matrix of the pose data. + glm::mat4 GetMatrixFromPose(const TangoPoseData& pose); + + // Apply extrinsics and coordinate frame transformations to the matrix. + // This funciton will transform the passed in matrix into opengl world frame. + glm::mat4 GetExtrinsicsAppliedOpenGLWorldFrame(const glm::mat4& pose_matrix); + + private: + // Convert TangoPoseStatusType to string. + // + // @param: status, status code needs to be converted. + // + // @return: corresponding string based on status passed in. + std::string GetStringFromStatusCode(TangoPoseStatusType status); + + // Format the pose debug string based on current pose and previous pose data. + void FormatPoseString(); + + // Device frame with respect to IMU frame. + glm::mat4 imu_T_device_; + + // Color camera frame with respect to IMU frame. + glm::mat4 imu_T_color_camera_; + + // Pose data of current frame. + TangoPoseData cur_pose_; + + // prev_pose_, pose_counter_ and pose_debug_string_ are used for composing the + // debug string to display the useful information on screen. + TangoPoseData prev_pose_; + + // Debug pose string. + std::string pose_string_; + + // Pose counter for debug purpose. + size_t pose_counter_; +}; +} // namespace tango_augmented_reality + +#endif // TANGO_AUGMENTED_REALITY_POSE_DATA_H_ \ No newline at end of file diff --git a/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/scene.h b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/scene.h new file mode 100644 index 00000000..c696b660 --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/scene.h @@ -0,0 +1,158 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TANGO_AUGMENTED_REALITY_SCENE_H_ +#define TANGO_AUGMENTED_REALITY_SCENE_H_ + +#include +#include + +#include // NOLINT +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +namespace tango_augmented_reality { + +// Scene provides OpenGL drawable objects and renders them for visualization. +class Scene { + public: + // Constructor and destructor. + // + // Scene will need a reference to pose_data_ instance to get the device motion + // to render the camera frustum. + Scene(); + ~Scene(); + + // Allocate OpenGL resources for rendering. + void InitGLContent(); + + // Release OpenGL resources allocated. + void FreeGLContent(); + + // Setup GL view port. + // @param: x, left of the screen. + // @param: y, bottom of the screen. + // @param: w, width of the screen. + // @param: h, height of the screen. + void SetupViewPort(int x, int y, int w, int h); + + // Render loop. + void Render(const glm::mat4& cur_pose_transformation); + + // Set render camera's viewing angle, first person, third person or top down. + // + // @param: camera_type, camera type includes first person, third person and + // top down + void SetCameraType(tango_gl::GestureCamera::CameraType camera_type); + + // Get video overlay texture id. + // @return: texture id of video overlay's texture. + GLuint GetVideoOverlayTextureId() { return video_overlay_->GetTextureId(); } + + // @return: AR render camera's image plane ratio. + float GetCameraImagePlaneRatio() { return camera_image_plane_ratio_; } + + // Set AR render camera's image plane ratio. + // @param: image plane ratio. + void SetCameraImagePlaneRatio(float ratio) { + camera_image_plane_ratio_ = ratio; + } + + // @return: AR render camera's image plane distance from the view point. + float GetImagePlaneDistance() { return image_plane_distance_; } + + // Set AR render camera's image plane distance from the view point. + // @param: distance, AR render camera's image plane distance from the view + // point. + void SetImagePlaneDistance(float distance) { + image_plane_distance_ = distance; + } + + // Set projection matrix of the AR view (first person view) + // @param: projection_matrix, the projection matrix. + void SetARCameraProjectionMatrix(const glm::mat4& projection_matrix) { + ar_camera_projection_matrix_ = projection_matrix; + } + + // Set the frustum render drawable object's scale. For the best visialization + // result, we set the camera frustum object's scale to the physical camera's + // aspect ratio. + // @param: scale, frustum's scale. + void SetFrustumScale(const glm::vec3& scale) { frustum_->SetScale(scale); } + + // Clear the Motion Tracking trajactory. + void ResetTrajectory() { trace_->ClearVertexArray(); } + + // Touch event passed from android activity. This function only support two + // touches. + // + // @param: touch_count, total count for touches. + // @param: event, touch event of current touch. + // @param: x0, normalized touch location for touch 0 on x axis. + // @param: y0, normalized touch location for touch 0 on y axis. + // @param: x1, normalized touch location for touch 1 on x axis. + // @param: y1, normalized touch location for touch 1 on y axis. + void OnTouchEvent(int touch_count, tango_gl::GestureCamera::TouchEvent event, + float x0, float y0, float x1, float y1); + + private: + // Video overlay drawable object to display the camera image. + tango_gl::VideoOverlay* video_overlay_; + + // Camera object that allows user to use touch input to interact with. + tango_gl::GestureCamera* gesture_camera_; + + // Device axis (in device frame of reference). + tango_gl::Axis* axis_; + + // Device frustum. + tango_gl::Frustum* frustum_; + + // Ground grid. + tango_gl::Grid* grid_; + + // Trace of pose data. + tango_gl::Trace* trace_; + + // A marker placed at (0.0f, 0.0f, -3.0f) location. + tango_gl::GoalMarker* marker_; + + // We use both camera_image_plane_ratio_ and image_plane_distance_ to compute + // the first person AR camera's frustum, these value is derived from actual + // physical camera instrinsics. + // Aspect ratio of the color camera. + float camera_image_plane_ratio_; + + // Image plane distance from camera's origin view point. + float image_plane_distance_; + + // The projection matrix for the first person AR camera. + glm::mat4 ar_camera_projection_matrix_; +}; +} // namespace tango_augmented_reality + +#endif // TANGO_AUGMENTED_REALITY_SCENE_H_ diff --git a/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/tango_event_data.h b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/tango_event_data.h new file mode 100644 index 00000000..61ab4aeb --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/tango-augmented-reality/tango_event_data.h @@ -0,0 +1,56 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TANGO_AUGMENTED_REALITY_TANGO_EVENT_DATA_H_ +#define TANGO_AUGMENTED_REALITY_TANGO_EVENT_DATA_H_ + +#include +#include + +#include // NOLINT + +namespace tango_augmented_reality { + +// TangoEvent is handling the tango event callbacks (e.g, TooFewFeaturesTracked) +// Currently, we are just exposing the event through the debug text displayed on +// screen. But developers could take advantages of these events to handle the +// exception in a more user friendly method. +class TangoEventData { + public: + // Constructor and deconstructor. + TangoEventData(); + ~TangoEventData(); + + // Update current event string to the event passed in. + // In this application, we are just using these event for debug purpose, but + // in other application, developers could catch this event for exception + // handling. + // + // @param: event, TangoEvent in current frame. + void UpdateTangoEvent(const TangoEvent* event); + + // Clear event string. Set event_string_ to empty. + void ClearEventString(); + + // Get formated event string for debug dispaly purpose. + std::string GetTangoEventString(); + private: + // Current event string. + std::string event_string_; +}; +} // namespace tango_augmented_reality + +#endif // TANGO_AUGMENTED_REALITY_TANGO_EVENT_DATA_H_ \ No newline at end of file diff --git a/augmented-reality-jni-example/app/src/main/jni/tango_augmented_reality.cpp b/augmented-reality-jni-example/app/src/main/jni/tango_augmented_reality.cpp deleted file mode 100644 index fa1bd2ee..00000000 --- a/augmented-reality-jni-example/app/src/main/jni/tango_augmented_reality.cpp +++ /dev/null @@ -1,569 +0,0 @@ -/* - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define GLM_FORCE_RADIANS - -#include -#include - -#include "tango-gl/axis.h" -#include "tango-gl/camera.h" -#include "tango-gl/color.h" -#include "tango-gl/conversions.h" -#include "tango-gl/cube.h" -#include "tango-gl/frustum.h" -#include "tango-gl/grid.h" -#include "tango-gl/goal_marker.h" -#include "tango-gl/trace.h" -#include "tango-gl/util.h" -#include "tango-gl/video_overlay.h" - -#include "tango_data.h" - -// Render camera's parent transformation. -// This object is a pivot transformtion for render camera to rotate around. -tango_gl::Transform* cam_parent_transform; - -// Render camera. -tango_gl::Camera* cam; - -// Coordinate axis for display. -tango_gl::Axis* axis; - -// Color Camera frustum. -tango_gl::Frustum* frustum; - -// Ground grid. -tango_gl::Grid* ground; - -// Trace of pose data. -tango_gl::Trace* trace; - -tango_gl::GoalMarker* marker; - -// Color camera preview. -tango_gl::VideoOverlay* video_overlay; - -// Single finger touch positional values. -// First element in the array is x-axis touching position. -// Second element in the array is y-axis touching position. -float cam_start_angle[2]; -float cam_cur_angle[2]; - -// Double finger touch distance value. -float cam_start_dist; -float cam_cur_dist; - -enum CameraType { - FIRST_PERSON = 0, - THIRD_PERSON = 1, - TOP_DOWN = 2 -}; -CameraType camera_type; - -// Render camera observation distance in third person camera mode. -const float kThirdPersonCameraDist = 7.0f; - -// Render camera observation distance in top down camera mode. -const float kTopDownCameraDist = 5.0f; - -// Zoom in speed. -const float kZoomSpeed = 10.0f; - -// Min/max clamp value of camera observation distance. -const float kCamViewMinDist = 1.0f; -const float kCamViewMaxDist = 100.f; - -// FOV set up values. -// Third and top down camera's FOV is 65 degrees. -// First person is color camera's FOV. -const float kFov = 65.0f; - -// Scale frustum size for closer near clipping plane. -const float kFovScaler = 0.1f; - -// Increment value each time move AR elements. -const float kArElementIncrement = 0.05f; - -const float kZero = 0.0f; -const glm::vec3 kZeroVec3 = glm::vec3(0.0f, 0.0f, 0.0f); -const glm::quat kZeroQuat = glm::quat(1.0f, 0.0f, 0.0f, 0.0f); - -// AR grid rotation, 90 degrees around x axis. -const glm::quat kArGridRotation = glm::quat(0.70711f, -0.70711f, 0.0f, 0.0f); -const glm::quat kMarkerRotation = glm::quat(0.f, 0.f, 1.0f, 0.f); -const glm::vec3 kMarkerPosition = glm::vec3(0.0f, 0.85f, -3.0f); -const glm::vec3 kMarkerOffset = glm::vec3(0.0f, 0.85f, 0.0f); - -// Color of the ground grid. -const tango_gl::Color kGridColor(0.85f, 0.85f, 0.85f); -// Color of the goal marker. -const tango_gl::Color kMarkerColor(1.0f, 0.f, 0.f); - -// AR cube position in world coordinate. -const glm::vec3 kCubePosition = glm::vec3(-1.0f, 0.265f, -2.0f); - -// AR grid position, can be modified based on the real world scene. -const glm::vec3 kGridPosition = glm::vec3(0.0f, 1.26f, -2.0f); - -// AR cube dimension, based on real world scene. -const glm::vec3 kCubeScale = glm::vec3(0.38f, 0.53f, 0.57f); - -// Marker scale. -const glm::vec3 kMarkerScale = glm::vec3(0.05f, 0.05f, 0.05f); - -// Height offset is used for offset height of motion tracking -// pose data. Motion tracking start position is (0,0,0). Adding -// a height offset will give a more reasonable pose while a common -// human is holding the device. The units is in meters. -const glm::vec3 kFloorOffset = glm::vec3(0.0f, -1.4f, 0.0f); -glm::vec3 world_position = glm::vec3(0.0f, -1.4f, 0.0f); - -// Position and rotation of the opengl camera with respect to the opengl world. -// (This is the opengl representation of the physical color camera's location.) -glm::vec3 ow_p_oc; -glm::quat ow_q_oc; - -// Projection matrix from render camera. -glm::mat4 projection_mat; - -// First person projection matrix from color camera intrinsics. -glm::mat4 projection_mat_ar; - -// First person view matrix from color camera extrinsics. -glm::mat4 view_mat; - -// Tango start-of-service with respect to Opengl World matrix. -glm::mat4 ow_T_ss; - -// Device with respect to IMU matrix. -glm::mat4 imu_T_device; - -// Color Camera with respect to IMU matrix. -glm::mat4 imu_T_cc; - -// Opengl Camera with respect to Color Camera matrix. -glm::mat4 cc_T_oc; - -// Opengl Camera with respect to Opengl World matrix. -glm::mat4 ow_T_oc; - -// Color Camera image plane ratio. -float image_plane_ratio; -float image_width; -float image_height; - -// Color Camera image plane distance to view point. -float image_plane_dis; -float image_plane_dis_original; - -void SetupExtrinsics() { - TangoData& instance = TangoData::GetInstance(); - imu_T_device = glm::translate(glm::mat4(1.0f), instance.imu_p_device) * - glm::mat4_cast(instance.imu_q_device); - - imu_T_cc = glm::translate(glm::mat4(1.0f), instance.imu_p_cc) * - glm::mat4_cast(instance.imu_q_cc); -} - -// Setup projection matrix in first person view from color camera intrinsics. -void SetupIntrinsics() { - image_width = static_cast(TangoData::GetInstance().cc_width); - image_height = static_cast(TangoData::GetInstance().cc_height); - // Image plane focal length for x axis. - float img_fl = static_cast(TangoData::GetInstance().cc_fx); - image_plane_ratio = image_height / image_width; - image_plane_dis_original = 2.0f * img_fl / image_width; - image_plane_dis = image_plane_dis_original; - projection_mat_ar = glm::frustum( - -1.0f * kFovScaler, 1.0f * kFovScaler, -image_plane_ratio * kFovScaler, - image_plane_ratio * kFovScaler, image_plane_dis * kFovScaler, - kCamViewMaxDist); - frustum->SetScale(glm::vec3(1.0f, image_plane_ratio, image_plane_dis)); -} - -void SetCamera(CameraType camera_index) { - camera_type = camera_index; - cam_cur_angle[0] = cam_cur_angle[1] = cam_cur_dist = kZero; - switch (camera_index) { - case CameraType::FIRST_PERSON: - cam_parent_transform->SetPosition(kZeroVec3); - cam_parent_transform->SetRotation(kZeroQuat); - video_overlay->SetScale(glm::vec3(1.0f, 1.0f, 1.0f)); - video_overlay->SetPosition(kZeroVec3); - video_overlay->SetRotation(kZeroQuat); - video_overlay->SetParent(NULL); - break; - case CameraType::THIRD_PERSON: - video_overlay->SetParent(axis); - video_overlay->SetScale(glm::vec3(1.0f, image_plane_ratio, 1.0f)); - video_overlay->SetRotation(kZeroQuat); - video_overlay->SetPosition(glm::vec3(kZero, kZero, -image_plane_dis)); - - cam->SetPosition(kZeroVec3); - cam->SetRotation(kZeroQuat); - cam_cur_dist = kThirdPersonCameraDist; - cam_cur_angle[0] = -M_PI / 4.0f; - cam_cur_angle[1] = M_PI / 4.0f; - break; - case CameraType::TOP_DOWN: - video_overlay->SetScale(glm::vec3(1.0f, image_plane_ratio, 1.0f)); - video_overlay->SetRotation(kZeroQuat); - video_overlay->SetPosition(glm::vec3(kZero, kZero, -image_plane_dis)); - video_overlay->SetParent(axis); - - cam->SetPosition(kZeroVec3); - cam->SetRotation(kZeroQuat); - cam_cur_dist = kTopDownCameraDist; - cam_cur_angle[1] = M_PI / 2.0f; - break; - default: - break; - } -} - -bool SetupGraphics() { - cam_parent_transform = new tango_gl::Transform(); - cam = new tango_gl::Camera(); - axis = new tango_gl::Axis(); - frustum = new tango_gl::Frustum(); - trace = new tango_gl::Trace(); - ground = new tango_gl::Grid(1.0f, 10, 10); - marker = new tango_gl::GoalMarker(); - video_overlay = new tango_gl::VideoOverlay(); - - cam->SetParent(cam_parent_transform); - cam->SetFieldOfView(kFov); - SetCamera(CameraType::FIRST_PERSON); - - ow_T_ss = tango_gl::conversions::opengl_world_T_tango_world(); - cc_T_oc = tango_gl::conversions::color_camera_T_opengl_camera(); - - ground->SetPosition(world_position); - ground->SetColor(kGridColor); - - marker->SetPosition(kMarkerPosition + world_position); - marker->SetScale(kMarkerScale); - marker->SetRotation(kMarkerRotation); - marker->SetColor(kMarkerColor); - return true; -} - -// Update viewport according to surface dimensions, always use image plane's -// ratio and make full use of the screen. -void SetupViewport(int w, int h) { - float screen_ratio = static_cast(h) / static_cast(w); - if (image_plane_ratio < screen_ratio) { - glViewport(0, 0, w, w * image_plane_ratio); - } else { - glViewport((w - h / image_plane_ratio) / 2, 0, h / image_plane_ratio, h); - } - cam->SetAspectRatio(1.0f / screen_ratio); -} - -// Render AR elements, frustum and trace with current Color Camera position and -// rotation -// updated from TangoData, TangoPosition and TangoRotation is updated via -// polling function GetPoseAtTime(). -bool RenderFrame() { - glEnable(GL_DEPTH_TEST); - glEnable(GL_CULL_FACE); - glClearColor(1.0f, 1.0f, 1.0f, 1.0f); - glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); - - TangoData::GetInstance().UpdateColorTexture(); - TangoData::GetInstance().GetPoseAtTime(); - - glm::vec3 ss_p_device = TangoData::GetInstance().tango_position; - glm::quat ss_q_device = TangoData::GetInstance().tango_rotation; - glm::mat4 ss_T_device = glm::translate(glm::mat4(1.0f), ss_p_device) * - glm::mat4_cast(ss_q_device); - ow_T_oc = - ow_T_ss * ss_T_device * glm::inverse(imu_T_device) * imu_T_cc * cc_T_oc; - glm::vec3 scale; - tango_gl::util::DecomposeMatrix(ow_T_oc, ow_p_oc, ow_q_oc, scale); - - if (camera_type == CameraType::FIRST_PERSON) { - glDisable(GL_DEPTH_TEST); - video_overlay->Render(glm::mat4(1.0f), glm::mat4(1.0f)); - glEnable(GL_DEPTH_TEST); - projection_mat = projection_mat_ar; - view_mat = glm::inverse(ow_T_oc); - } else { - glm::quat parent_cam_rot = glm::rotate(kZeroQuat, -cam_cur_angle[0], - glm::vec3(kZero, 1.0f, kZero)); - parent_cam_rot = glm::rotate(parent_cam_rot, -cam_cur_angle[1], - glm::vec3(1.0f, kZero, kZero)); - - cam_parent_transform->SetRotation(parent_cam_rot); - cam_parent_transform->SetPosition(tango_gl::conversions::Vec3TangoToGl(ss_p_device)); - - cam->SetPosition(glm::vec3(kZero, kZero, cam_cur_dist)); - - projection_mat = cam->GetProjectionMatrix(); - view_mat = cam->GetViewMatrix(); - - frustum->SetPosition(ow_p_oc); - frustum->SetRotation(ow_q_oc); - frustum->Render(projection_mat, view_mat); - - trace->UpdateVertexArray(ow_p_oc); - trace->Render(projection_mat, view_mat); - - axis->SetPosition(ow_p_oc); - axis->SetRotation(ow_q_oc); - axis->Render(projection_mat, view_mat); - - video_overlay->Render(projection_mat, view_mat); - } - - ground->Render(projection_mat, view_mat); - marker->SetRotation(kZeroQuat); - marker->Render(projection_mat, view_mat); - marker->SetRotation(kMarkerRotation); - marker->Render(projection_mat, view_mat); - return true; -} - -// Reset virtual world, use the current color camera's position as origin. -void ResetAR() { - world_position = ow_p_oc + kFloorOffset; - ground->SetPosition(world_position); - marker->SetPosition(world_position + kMarkerPosition); - image_plane_dis = image_plane_dis_original; - projection_mat_ar = glm::frustum( - -1.0f * kFovScaler, 1.0f * kFovScaler, -image_plane_ratio * kFovScaler, - image_plane_ratio * kFovScaler, image_plane_dis * kFovScaler, - kCamViewMaxDist); - frustum->SetScale(glm::vec3(1.0f, image_plane_ratio, image_plane_dis)); - SetCamera(camera_type); - - trace->ClearVertexArray(); -} - -void PlaceObject() { - marker->SetPosition(world_position + ow_p_oc + kMarkerOffset); -} - -void UpdateARElement(int ar_element, int interaction_type) { - glm::vec3 translation; - // LOGI("%d %d", ar_element,(interaction_type - 1) / 2 ); - switch ((interaction_type - 1) / 2) { - case 0: - translation = - glm::vec3(glm::pow(-1.0f, static_cast(interaction_type)) * - kArElementIncrement, - kZero, kZero); - break; - case 1: - translation = glm::vec3( - kZero, glm::pow(-1.0f, static_cast(interaction_type)) * - kArElementIncrement, - kZero); - break; - case 2: - translation = glm::vec3( - kZero, kZero, glm::pow(-1.0f, static_cast(interaction_type)) * - kArElementIncrement); - break; - } - switch (ar_element) { - case 1: - ground->Translate(translation); - marker->Translate(translation); - break; - case 2: - marker->Translate(translation); - break; - case 3: - ground->Translate(translation); - break; - case 4: - if (interaction_type == 3) { - image_plane_dis += -kArElementIncrement; - } - if (interaction_type == 4) { - image_plane_dis += kArElementIncrement; - } - projection_mat_ar = glm::frustum( - -1.0f * kFovScaler, 1.0f * kFovScaler, - -image_plane_ratio * kFovScaler, image_plane_ratio * kFovScaler, - image_plane_dis * kFovScaler, kCamViewMaxDist); - frustum->SetScale(glm::vec3(1.0f, image_plane_ratio, image_plane_dis)); - SetCamera(camera_type); - break; - } -} - -#ifdef __cplusplus -extern "C" { -#endif -JNIEXPORT jint JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_initialize( - JNIEnv* env, jobject, jobject activity) { - TangoErrorType err = TangoData::GetInstance().Initialize(env, activity); - if (err != TANGO_SUCCESS) { - if (err == TANGO_INVALID) { - LOGE("Tango Service version mis-match"); - } else { - LOGE("Tango Service initialize internal error"); - } - } - if (err == TANGO_SUCCESS) { - LOGI("Tango service initialize success"); - } - return static_cast(err); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setupConfig( - JNIEnv*, jobject, bool is_auto_recovery) { - if (!TangoData::GetInstance().SetConfig(is_auto_recovery)) { - LOGE("Tango set config failed"); - } else { - LOGI("Tango set config success"); - } -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_connectTexture( - JNIEnv*, jobject) { - TangoData::GetInstance().ConnectTexture(video_overlay->GetTextureId()); -} - -JNIEXPORT jint JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_connectService( - JNIEnv*, jobject) { - TangoErrorType err = TangoData::GetInstance().Connect(); - if (err == TANGO_SUCCESS) { - TangoData::GetInstance().GetExtrinsics(); - TangoData::GetInstance().GetIntrinsics(); - SetupExtrinsics(); - SetupIntrinsics(); - LOGI("Tango Service connect success"); - } else { - LOGE("Tango Service connect failed"); - } - return static_cast(err); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setupViewport( - JNIEnv*, jobject, jint width, jint height) { - SetupViewport(width, height); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_disconnectService( - JNIEnv*, jobject) { - TangoData::GetInstance().Disconnect(); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_onDestroy( - JNIEnv*, jobject) { - delete cam; - delete axis; - delete ground; - delete frustum; - delete trace; - delete video_overlay; -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setupGraphic( - JNIEnv*, jobject) { - SetupGraphics(); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_render( - JNIEnv*, jobject) { - RenderFrame(); -} - -JNIEXPORT jboolean JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getIsLocalized( - JNIEnv*, jobject) { - pthread_mutex_lock(&TangoData::GetInstance().pose_mutex); - bool is_localized = TangoData::GetInstance().is_localized; - pthread_mutex_unlock(&TangoData::GetInstance().pose_mutex); - return is_localized; -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_resetMotionTracking( - JNIEnv*, jobject) { - ResetAR(); - // TangoData::GetInstance().ResetMotionTracking(); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setCamera( - JNIEnv*, jobject, int camera_index) { - SetCamera(static_cast(camera_index)); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_placeObject( - JNIEnv*, jobject) { - PlaceObject(); -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_updateARElement( - JNIEnv*, jobject, int ar_element, int interaction_type) { - UpdateARElement(ar_element, interaction_type); -} - -JNIEXPORT jstring JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getPoseString( - JNIEnv* env, jobject) { - pthread_mutex_lock(&TangoData::GetInstance().pose_mutex); - std::string pose_string_cpy = - std::string(TangoData::GetInstance().pose_string); - pthread_mutex_unlock(&TangoData::GetInstance().pose_mutex); - return (env)->NewStringUTF(pose_string_cpy.c_str()); -} - -JNIEXPORT jstring JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_getVersionNumber( - JNIEnv* env, jobject) { - return (env) - ->NewStringUTF(TangoData::GetInstance().lib_version_string.c_str()); -} - -// Touching GL interface. -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_startSetCameraOffset( - JNIEnv*, jobject) { - cam_start_angle[0] = cam_cur_angle[0]; - cam_start_angle[1] = cam_cur_angle[1]; - cam_start_dist = cam->GetPosition().z; -} - -JNIEXPORT void JNICALL -Java_com_projecttango_experiments_nativeaugmentedreality_TangoJNINative_setCameraOffset( - JNIEnv*, jobject, float rotation_x, float rotation_y, float dist) { - cam_cur_angle[0] = cam_start_angle[0] + rotation_x; - cam_cur_angle[1] = cam_start_angle[1] + rotation_y; - dist = tango_gl::util::Clamp(cam_start_dist + dist * kZoomSpeed, - kCamViewMinDist, kCamViewMaxDist); - cam_cur_dist = dist; -} -#ifdef __cplusplus -} -#endif diff --git a/augmented-reality-jni-example/app/src/main/jni/tango_data.cpp b/augmented-reality-jni-example/app/src/main/jni/tango_data.cpp deleted file mode 100644 index c4630862..00000000 --- a/augmented-reality-jni-example/app/src/main/jni/tango_data.cpp +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "tango_data.h" - -TangoData::TangoData() - : tango_position(glm::vec3(0.0f, 0.0f, 0.0f)), - tango_rotation(glm::quat(1.0f, 0.0f, 0.0f, 0.0f)), - config_(nullptr) {} - -// This is called when new pose updates become available. Pair was set to start- -// of-service with respect to ADF frame, which will be available only once -// localized against an ADF. Use this function to check localization status, and -// use GetPoseAtTime to get the current pose. -static void onPoseAvailable(void*, const TangoPoseData* pose) { - pthread_mutex_lock(&TangoData::GetInstance().pose_mutex); - // Update Tango localization status. - if (pose->status_code == TANGO_POSE_VALID) { - TangoData::GetInstance().is_localized = true; - } else { - TangoData::GetInstance().is_localized = false; - } - pthread_mutex_unlock(&TangoData::GetInstance().pose_mutex); -} - -// Tango event callback. -static void onTangoEvent(void*, const TangoEvent* event) { - pthread_mutex_lock(&TangoData::GetInstance().event_mutex); - // Update the status string for debug display. - std::stringstream string_stream; - string_stream << event->event_key << ": " << event->event_value; - TangoData::GetInstance().event_string = string_stream.str(); - pthread_mutex_unlock(&TangoData::GetInstance().event_mutex); -} - -// Get status string based on the pose status code. -const char* TangoData::getStatusStringFromStatusCode( - TangoPoseStatusType status) { - const char* ret_string; - switch (status) { - case TANGO_POSE_INITIALIZING: - ret_string = "Initializing"; - break; - case TANGO_POSE_VALID: - ret_string = "Valid"; - break; - case TANGO_POSE_INVALID: - ret_string = "Invalid"; - break; - case TANGO_POSE_UNKNOWN: - ret_string = "Unknown"; - break; - default: - ret_string = "Status_Code_Invalid"; - break; - } - if (static_cast(status) < 3) { - status_count[static_cast(status)] += 1; - } - return ret_string; -} - -TangoErrorType TangoData::Initialize(JNIEnv* env, jobject activity) { - // Initialize Tango Service. - // The initialize function perform API and Tango Service version check, - // the there is a mis-match between API and Tango Service version, the - // function will return TANGO_INVALID. - return TangoService_initialize(env, activity); -} - -bool TangoData::SetConfig(bool is_auto_recovery) { - // Get the default TangoConfig. - // We get the default config first and change the config - // flag as needed. - config_ = TangoService_getConfig(TANGO_CONFIG_DEFAULT); - if (config_ == NULL) { - LOGE("TangoService_getConfig(): Failed"); - return false; - } - - // Turn on auto recovery for motion tracking. - // Note that the auto-recovery is on by default. - if (TangoConfig_setBool(config_, "config_enable_auto_recovery", - is_auto_recovery) != TANGO_SUCCESS) { - LOGE("config_enable_auto_recovery(): Failed"); - return false; - } - - if (TangoConfig_setBool(config_, "config_enable_low_latency_imu_integration", - true) != TANGO_SUCCESS) { - LOGE("config_enable_low_latency_imu_integration(): Failed"); - return false; - } - - // Get library version string from service. - TangoConfig_getString(config_, "tango_service_library_version", - const_cast(lib_version_string.c_str()), - kVersionStringLength); - - // Setting up the start of service to ADF frame for the onPoseAvailable - // callback, - // it will check the localization status. - TangoCoordinateFramePair pair; - pair.base = TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; - pair.target = TANGO_COORDINATE_FRAME_START_OF_SERVICE; - - // Attach onPoseAvailable callback. - // The callback will be called after the service is connected. - if (TangoService_connectOnPoseAvailable(1, &pair, onPoseAvailable) != - TANGO_SUCCESS) { - LOGE("TangoService_connectOnPoseAvailable(): Failed"); - return false; - } - - // Attach onEventAvailable callback. - // The callback will be called after the service is connected. - if (TangoService_connectOnTangoEvent(onTangoEvent) != TANGO_SUCCESS) { - LOGE("TangoService_connectOnTangoEvent(): Failed"); - return false; - } - - // Load the most recent ADF. - char* uuid_list; - - // uuid_list will contain a comma separated list of UUIDs. - if (TangoService_getAreaDescriptionUUIDList(&uuid_list) != TANGO_SUCCESS) { - LOGI("TangoService_getAreaDescriptionUUIDList"); - } - - // Parse the uuid_list to get the individual uuids. - if (uuid_list != NULL && uuid_list[0] != '\0') { - std::vector adf_list; - - char* parsing_char; - char* saved_ptr; - parsing_char = strtok_r(uuid_list, ",", &saved_ptr); - while (parsing_char != NULL) { - std::string s = std::string(parsing_char); - adf_list.push_back(s); - parsing_char = strtok_r(NULL, ",", &saved_ptr); - } - - int list_size = adf_list.size(); - if (list_size == 0) { - LOGE("List size is 0"); - return false; - } - cur_uuid = adf_list[list_size - 1]; - if (TangoConfig_setString(config_, "config_load_area_description_UUID", - adf_list[list_size - 1].c_str()) != - TANGO_SUCCESS) { - LOGE("config_load_area_description_uuid Failed"); - return false; - } else { - LOGI("Load ADF: %s", adf_list[list_size - 1].c_str()); - } - } else { - LOGE("No area description file available, no file loaded."); - } - is_localized = false; - return true; -} - -bool TangoData::GetPoseAtTime() { - // Set the reference frame pair after connect to service. - // Currently the API will set this set below as default. - - TangoCoordinateFramePair frame_pair; - frame_pair.base = is_localized ? TANGO_COORDINATE_FRAME_AREA_DESCRIPTION - : TANGO_COORDINATE_FRAME_START_OF_SERVICE; - frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE; - - TangoPoseData pose_latest; - TangoErrorType result_latest = - TangoService_getPoseAtTime(0., frame_pair, &pose_latest); - TangoPoseData pose_texture; - TangoErrorType result_texture = - TangoService_getPoseAtTime(timestamp, frame_pair, &pose_texture); - bool ok_latest = (result_latest == TANGO_SUCCESS && - pose_latest.status_code == TANGO_POSE_VALID); - bool ok_texture = (result_texture == TANGO_SUCCESS && - pose_texture.status_code == TANGO_POSE_VALID); - - if (ok_latest) { - const TangoPoseData& pose = ok_texture ? pose_texture : pose_latest; - tango_position = glm::vec3(pose.translation[0], pose.translation[1], - pose.translation[2]); - tango_rotation = glm::quat(pose.orientation[3], pose.orientation[0], - pose.orientation[1], pose.orientation[2]); - return true; - } - - /* - std::stringstream string_stream; - string_stream.setf(std::ios_base::fixed, std::ios_base::floatfield); - string_stream.precision(2); - string_stream << "Tango system event: " << event_string << "\n" << frame_pair - << "\n" - << " status: " - << getStatusStringFromStatusCode(pose.status_code) - << ", count: " << status_count[pose.status_code] - << ", timestamp(ms): " << timestamp << ", position(m): [" - << pose.translation[0] << ", " << pose.translation[1] << ", " - << pose.translation[2] << "]" - << ", orientation: [" << pose.orientation[0] << ", " - << pose.orientation[1] << ", " << pose.orientation[2] << ", " - << pose.orientation[3] << "]\n" - << "Color Camera Intrinsics(px):\n" - << " width: " << cc_width << ", height: " << cc_height - << ", fx: " << cc_fx << ", fy: " << cc_fy; - pose_string = string_stream.str(); - */ - return true; -} - -bool TangoData::GetExtrinsics() { - // Retrieve the Extrinsic - TangoPoseData poseData; - TangoCoordinateFramePair pair; - pair.base = TANGO_COORDINATE_FRAME_IMU; - pair.target = TANGO_COORDINATE_FRAME_DEVICE; - if (TangoService_getPoseAtTime(0.0, pair, &poseData) != TANGO_SUCCESS) { - LOGE("TangoService_getPoseAtTime(): Failed"); - return false; - } - imu_p_device = glm::vec3(poseData.translation[0], poseData.translation[1], - poseData.translation[2]); - imu_q_device = glm::quat(poseData.orientation[3], poseData.orientation[0], - poseData.orientation[1], poseData.orientation[2]); - - pair.target = TANGO_COORDINATE_FRAME_CAMERA_COLOR; - if (TangoService_getPoseAtTime(0.0, pair, &poseData) != TANGO_SUCCESS) { - LOGE("TangoService_getPoseAtTime(): Failed"); - return false; - } - imu_p_cc = glm::vec3(poseData.translation[0], poseData.translation[1], - poseData.translation[2]); - imu_q_cc = glm::quat(poseData.orientation[3], poseData.orientation[0], - poseData.orientation[1], poseData.orientation[2]); - return true; -} - -bool TangoData::GetIntrinsics() { - // Retrieve the Intrinsic - TangoCameraIntrinsics ccIntrinsics; - if (TangoService_getCameraIntrinsics(TANGO_CAMERA_COLOR, &ccIntrinsics) != - TANGO_SUCCESS) { - LOGE("TangoService_getCameraIntrinsics(): Failed"); - return false; - } - - // Color camera's image plane width. - cc_width = ccIntrinsics.width; - // Color camera's image plane height. - cc_height = ccIntrinsics.height; - // Color camera's x axis focal length. - cc_fx = ccIntrinsics.fx; - // Color camera's y axis focal length. - cc_fy = ccIntrinsics.fy; - // Principal point x coordinate on the image. - cc_cx = ccIntrinsics.cx; - // Principal point y coordinate on the image. - cc_cy = ccIntrinsics.cy; - for (int i = 0; i < 5; i++) { - cc_distortion[i] = ccIntrinsics.distortion[i]; - } - return true; -} - -void TangoData::ConnectTexture(GLuint texture_id) { - if (TangoService_connectTextureId(TANGO_CAMERA_COLOR, texture_id, nullptr, - nullptr) == TANGO_SUCCESS) { - LOGI("TangoService_connectTextureId(): Success!"); - } else { - LOGE("TangoService_connectTextureId(): Failed!"); - } -} - -void TangoData::UpdateColorTexture() { - if (TangoService_updateTexture(TANGO_CAMERA_COLOR, ×tamp) != - TANGO_SUCCESS) { - LOGE("TangoService_updateTexture(): Failed"); - } -} - -// Reset the Motion Tracking. -void TangoData::ResetMotionTracking() { TangoService_resetMotionTracking(); } - -// Connect to Tango Service, service will start running, and -// POSE can be queried. -TangoErrorType TangoData::Connect() { - return TangoService_connect(nullptr, config_); -} - -// Disconnect Tango Service. -// Disconnect will disconnect all callback from Tango Service, -// after resume, the application should re-connect all callback -// and connect to service. -// Disconnect will also reset all configuration to default. -// Before disconnecting the service, the application is reponsible to -// free the config_ handle as well. -// -// When running two Tango applications, the first application needs to -// disconnect the service, so that second application could connect the -// service with a new configration handle. The disconnect function will -// reset the configuration each time it's being called. -void TangoData::Disconnect() { TangoService_disconnect(); } diff --git a/augmented-reality-jni-example/app/src/main/jni/tango_data.h b/augmented-reality-jni-example/app/src/main/jni/tango_data.h deleted file mode 100644 index c01eee74..00000000 --- a/augmented-reality-jni-example/app/src/main/jni/tango_data.h +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2014 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef AUGMENTED_REALITY_JNI_EXAMPLE_TANGO_DATA_H_ -#define AUGMENTED_REALITY_JNI_EXAMPLE_TANGO_DATA_H_ - -#include -#include -#include -#include -#include - -#include "tango_client_api.h" -#include "tango-gl/util.h" - -const int kVersionStringLength = 27; - -class TangoData { - public: - static TangoData& GetInstance() { - static TangoData instance; - return instance; - } - TangoData(); - - TangoErrorType Initialize(JNIEnv* env, jobject activity); - bool SetConfig(bool isAutoReset); - TangoErrorType Connect(); - void Disconnect(); - void ResetMotionTracking(); - void ConnectTexture(GLuint texture_id); - void UpdateColorTexture(); - bool GetPoseAtTime(); - bool GetIntrinsics(); - bool GetExtrinsics(); - const char* getStatusStringFromStatusCode(TangoPoseStatusType status); - - pthread_mutex_t pose_mutex; - pthread_mutex_t event_mutex; - - glm::vec3 tango_position; - glm::quat tango_rotation; - - int status_count[3]; - double timestamp; - std::string event_string; - std::string lib_version_string; - std::string pose_string; - - // Extrinsics for imu_T_device (position and hamilton quaternion). - glm::vec3 imu_p_device; - glm::quat imu_q_device; - - // Extrinsics for imu_T_color_camera (position and hamilton quaternion). - glm::vec3 imu_p_cc; - glm::quat imu_q_cc; - - // Intrinsics for color camera. - int cc_width; - int cc_height; - double cc_fx; - double cc_fy; - double cc_cx; - double cc_cy; - double cc_distortion[5]; - - // Localization status. - bool is_localized; - std::string cur_uuid; - - private: - TangoConfig config_; -}; - -#endif // AUGMENTED_REALITY_JNI_EXAMPLE_TANGO_DATA_H_ diff --git a/augmented-reality-jni-example/app/src/main/jni/tango_event_data.cc b/augmented-reality-jni-example/app/src/main/jni/tango_event_data.cc new file mode 100644 index 00000000..8af523ff --- /dev/null +++ b/augmented-reality-jni-example/app/src/main/jni/tango_event_data.cc @@ -0,0 +1,49 @@ +/* + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "tango-augmented-reality/tango_event_data.h" + +namespace tango_augmented_reality { + +TangoEventData::TangoEventData() {} + +TangoEventData::~TangoEventData() {} + +// Update current event string to the event passed in. +// In this application, we are just using these event for debug purpose, but in +// other application, developers could catch this event for exception handling. +// +// @param: event, TangoEvent in current frame. +void TangoEventData::UpdateTangoEvent(const TangoEvent* event) { + std::stringstream string_stream; + string_stream << event->event_key << ": " << event->event_value; + event_string_ = string_stream.str(); + string_stream.flush(); +} + +// Clear event string. Set event_string_ to empty. +void TangoEventData::ClearEventString() { + event_string_.clear(); +} + +// Get formated event string for debug dispaly purpose. +std::string TangoEventData::GetTangoEventString() { + return event_string_; +} + +} //namespace tango_augmented_reality \ No newline at end of file diff --git a/augmented-reality-jni-example/app/src/main/res/layout/activity_augmented_reality.xml b/augmented-reality-jni-example/app/src/main/res/layout/activity_augmented_reality.xml index 55f0307e..b5c4a857 100644 --- a/augmented-reality-jni-example/app/src/main/res/layout/activity_augmented_reality.xml +++ b/augmented-reality-jni-example/app/src/main/res/layout/activity_augmented_reality.xml @@ -1,24 +1,141 @@ + + android:layout_width="wrap_content" + android:layout_height="wrap_content" > - + android:orientation="vertical" + android:paddingLeft="5dp" > + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +