New CTS Verifier case for Rotation Vector Sensor

Added a new Rotation Vector sensor verifier test, which uses phone pose
information generated computer vision algorithm to cross check the
data from the Rotation Vector sensor.

* Necessary resource is added for the new UI.
* OpenCV library stub is added with licence in order for the test to
call OpenCV functions. The heavy lifting native OpenCV library will be installed
in the form of an app from Play Store.
* CtsMedia*.java are code that also available in CTS but lives in
different package so that it is not be able to reused in this package.

This change was approved in Icf0784305e109bff4070e27854f8f81d5290da0e
and is cherry-picked into MNC dev branch with minor bug fixes and
improvements captured in I5313e096999c5dd33a28c96cf011de3385358afb.

Added additional requirement and exclusion rules to AndroidManifest
according to review suggestions.

Change-Id: I9889f4e027baa579346fd2802f7dd45a4cbb57dd
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index 227c6cb..37f1f90 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -31,6 +31,7 @@
                                ctstestrunner \
                                apache-commons-math \
                                androidplot \
+                               ctsverifier-opencv \
 
 LOCAL_PACKAGE_NAME := CtsVerifier
 
@@ -44,6 +45,16 @@
 
 include $(BUILD_PACKAGE)
 
+
+# opencv library
+include $(CLEAR_VARS)
+
+LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES := \
+        ctsverifier-opencv:libs/opencv-android.jar
+
+include $(BUILD_MULTI_PREBUILT)
+
+
 notification-bot := $(call intermediates-dir-for,APPS,NotificationBot)/package.apk
 
 # Builds and launches CTS Verifier on a device.
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 96b5676..8215210 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -720,6 +720,33 @@
                        android:value="android.hardware.sensor.compass" />
         </activity>
 
+        <activity
+            android:name=".sensors.RVCVXCheckTestActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test"
+            android:screenOrientation="locked" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/test_category_sensors" />
+            <meta-data
+                android:name="test_required_features"
+                android:value="android.hardware.sensor.accelerometer:android.hardware.sensor.gyroscope:android.hardware.sensor.compass:android.hardware.camera.any" />
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.hardware.type.television" />
+        </activity>
+        <activity
+            android:name=".sensors.RVCVRecordActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test_rec"
+            android:screenOrientation="locked" >
+        </activity>
+
+
         <!-- TODO: enable when a full set of verifications can be implemented -->
         <!--activity android:name=".sensors.RotationVectorTestActivity"
                   android:label="@string/snsr_rot_vec_test"
diff --git a/apps/CtsVerifier/libs/opencv-android.jar b/apps/CtsVerifier/libs/opencv-android.jar
new file mode 100644
index 0000000..1c13eee
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android.jar
Binary files differ
diff --git a/apps/CtsVerifier/libs/opencv-android_LICENSE b/apps/CtsVerifier/libs/opencv-android_LICENSE
new file mode 100644
index 0000000..5e32d88
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android_LICENSE
@@ -0,0 +1,33 @@
+By downloading, copying, installing or using the software you agree to this license.
+If you do not agree to this license, do not download, install,
+copy or use the software.
+
+
+                          License Agreement
+               For Open Source Computer Vision Library
+                       (3-clause BSD License)
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+
+  * Neither the names of the copyright holders nor the names of the contributors
+    may be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+This software is provided by the copyright holders and contributors "as is" and
+any express or implied warranties, including, but not limited to, the implied
+warranties of merchantability and fitness for a particular purpose are disclaimed.
+In no event shall copyright holders or contributors be liable for any direct,
+indirect, incidental, special, exemplary, or consequential damages
+(including, but not limited to, procurement of substitute goods or services;
+loss of use, data, or profits; or business interruption) however caused
+and on any theory of liability, whether in contract, strict liability,
+or tort (including negligence or otherwise) arising in any way out of
+the use of this software, even if advised of the possibility of such damage.
diff --git a/apps/CtsVerifier/res/drawable/prompt_x.png b/apps/CtsVerifier/res/drawable/prompt_x.png
new file mode 100644
index 0000000..64302dc
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_x.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_y.png b/apps/CtsVerifier/res/drawable/prompt_y.png
new file mode 100644
index 0000000..01926b5
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_y.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_z.png b/apps/CtsVerifier/res/drawable/prompt_z.png
new file mode 100644
index 0000000..f4d86d6
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_z.png
Binary files differ
diff --git a/apps/CtsVerifier/res/layout/cam_preview_overlay.xml b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
new file mode 100644
index 0000000..41bbeb1
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical" android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:keepScreenOn="true">
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.RVCVCameraPreview"
+        android:id="@+id/cam_preview"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <!--
+    <ImageView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_centerVertical="true"
+        android:id="@+id/cam_overlay"
+        android:src="@drawable/icon"
+        android:scaleType="fitStart"
+        />
+    -->
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.MotionIndicatorView"
+        android:id="@+id/cam_indicator"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <ImageView
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_centerInParent="true"
+        android:id="@+id/cam_overlay"
+        android:scaleType="fitStart"
+        />
+</RelativeLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/raw/next_axis.mp3 b/apps/CtsVerifier/res/raw/next_axis.mp3
new file mode 100644
index 0000000..0a3174d
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/next_axis.mp3
Binary files differ
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index e70fa6d..0aba9d0 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -500,6 +500,8 @@
     <string name="snsr_test_skipped">SKIPPED</string>
     <string name="snsr_test_fail">FAIL</string>
     <string name="snsr_execution_time">Test execution time %1$s sec</string>
+    <string name="snsr_rvcvxchk_test">Rotation Vector CV XCheck</string>
+    <string name="snsr_rvcvxchk_test_rec">Rotation Vector CV XCheck Recording</string>
 
     <!-- Strings to interact with users in Sensor Tests -->
     <string name="snsr_test_play_sound">A sound will be played once the verification is complete...</string>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
new file mode 100644
index 0000000..b28e06b
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaOutputSurface.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.util.Log;
+import android.view.Surface;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Holds state associated with a Surface used for MediaCodec decoder output.
+ * <p>
+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
+ * and then create a Surface for that SurfaceTexture.  The Surface can be passed to
+ * MediaCodec.configure() to receive decoder output.  When a frame arrives, we latch the
+ * texture with updateTexImage, then render the texture with GL to a pbuffer.
+ * <p>
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
+ * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
+ * we just draw it on whatever surface is current.
+ * <p>
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class CtsMediaOutputSurface implements SurfaceTexture.OnFrameAvailableListener {
+    private static final String TAG = "OutputSurface";
+    private static final boolean VERBOSE = false;
+
+    private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+    private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+    private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+    private SurfaceTexture mSurfaceTexture;
+    private Surface mSurface;
+
+    private Object mFrameSyncObject = new Object();     // guards mFrameAvailable
+    private boolean mFrameAvailable;
+
+    private CtsMediaTextureRender mTextureRender;
+
+    /**
+     * Creates an OutputSurface backed by a pbuffer with the specifed dimensions.  The new
+     * EGL context and surface will be made current.  Creates a Surface that can be passed
+     * to MediaCodec.configure().
+     */
+    public CtsMediaOutputSurface(int width, int height) {
+        if (width <= 0 || height <= 0) {
+            throw new IllegalArgumentException();
+        }
+
+        eglSetup(width, height);
+        makeCurrent();
+
+        setup(this);
+    }
+
+    /**
+     * Creates an OutputSurface using the current EGL context (rather than establishing a
+     * new one).  Creates a Surface that can be passed to MediaCodec.configure().
+     */
+    public CtsMediaOutputSurface() {
+        setup(this);
+    }
+
+    public CtsMediaOutputSurface(final SurfaceTexture.OnFrameAvailableListener listener) {
+        setup(listener);
+    }
+
+    /**
+     * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+     * with the SurfaceTexture.
+     */
+    private void setup(SurfaceTexture.OnFrameAvailableListener listener) {
+        mTextureRender = new CtsMediaTextureRender();
+        mTextureRender.surfaceCreated();
+
+        // Even if we don't access the SurfaceTexture after the constructor returns, we
+        // still need to keep a reference to it.  The Surface doesn't retain a reference
+        // at the Java level, so if we don't either then the object can get GCed, which
+        // causes the native finalizer to run.
+        if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
+        mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+
+        // This doesn't work if OutputSurface is created on the thread that CTS started for
+        // these test cases.
+        //
+        // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+        // create a Handler that uses it.  The "frame available" message is delivered
+        // there, but since we're not a Looper-based thread we'll never see it.  For
+        // this to do anything useful, OutputSurface must be created on a thread without
+        // a Looper, so that SurfaceTexture uses the main application Looper instead.
+        //
+        // Java language note: passing "this" out of a constructor is generally unwise,
+        // but we should be able to get away with it here.
+        mSurfaceTexture.setOnFrameAvailableListener(listener);
+
+        mSurface = new Surface(mSurfaceTexture);
+    }
+
+    /**
+     * Prepares EGL.  We want a GLES 2.0 context and a surface that supports pbuffer.
+     */
+    private void eglSetup(int width, int height) {
+        mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+        if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+            throw new RuntimeException("unable to get EGL14 display");
+        }
+        int[] version = new int[2];
+        if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+            mEGLDisplay = null;
+            throw new RuntimeException("unable to initialize EGL14");
+        }
+
+        // Configure EGL for pbuffer and OpenGL ES 2.0.  We want enough RGB bits
+        // to be able to tell if the frame is reasonable.
+        int[] attribList = {
+                EGL14.EGL_RED_SIZE, 8,
+                EGL14.EGL_GREEN_SIZE, 8,
+                EGL14.EGL_BLUE_SIZE, 8,
+                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+                EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
+                EGL14.EGL_NONE
+        };
+        EGLConfig[] configs = new EGLConfig[1];
+        int[] numConfigs = new int[1];
+        if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+                numConfigs, 0)) {
+            throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+        }
+
+        // Configure context for OpenGL ES 2.0.
+        int[] attrib_list = {
+                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+                EGL14.EGL_NONE
+        };
+        mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+                attrib_list, 0);
+        checkEglError("eglCreateContext");
+        if (mEGLContext == null) {
+            throw new RuntimeException("null context");
+        }
+
+        // Create a pbuffer surface.  By using this for output, we can use glReadPixels
+        // to test values in the output.
+        int[] surfaceAttribs = {
+                EGL14.EGL_WIDTH, width,
+                EGL14.EGL_HEIGHT, height,
+                EGL14.EGL_NONE
+        };
+        mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
+        checkEglError("eglCreatePbufferSurface");
+        if (mEGLSurface == null) {
+            throw new RuntimeException("surface was null");
+        }
+    }
+
+    /**
+     * Discard all resources held by this class, notably the EGL context.
+     */
+    public void release() {
+        if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+            EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+            EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+            EGL14.eglReleaseThread();
+            EGL14.eglTerminate(mEGLDisplay);
+        }
+
+        mSurface.release();
+
+        // this causes a bunch of warnings that appear harmless but might confuse someone:
+        //  W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+        //mSurfaceTexture.release();
+
+        mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+        mEGLContext = EGL14.EGL_NO_CONTEXT;
+        mEGLSurface = EGL14.EGL_NO_SURFACE;
+
+        mTextureRender = null;
+        mSurface = null;
+        mSurfaceTexture = null;
+    }
+
+    /**
+     * Makes our EGL context and surface current.
+     */
+    public void makeCurrent() {
+        if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+            throw new RuntimeException("eglMakeCurrent failed");
+        }
+    }
+
+    /**
+     * Returns the Surface that we draw onto.
+     */
+    public Surface getSurface() {
+        return mSurface;
+    }
+
+    /**
+     * Replaces the fragment shader.
+     */
+    public void changeFragmentShader(String fragmentShader) {
+        mTextureRender.changeFragmentShader(fragmentShader);
+    }
+
+    /**
+     * Latches the next buffer into the texture.  Must be called from the thread that created
+     * the OutputSurface object, after the onFrameAvailable callback has signaled that new
+     * data is available.
+     */
+    public void awaitNewImage() {
+        final int TIMEOUT_MS = 500;
+
+        synchronized (mFrameSyncObject) {
+            while (!mFrameAvailable) {
+                try {
+                    // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
+                    // stalling the test if it doesn't arrive.
+                    mFrameSyncObject.wait(TIMEOUT_MS);
+                    if (!mFrameAvailable) {
+                        // TODO: if "spurious wakeup", continue while loop
+                        throw new RuntimeException("Surface frame wait timed out");
+                    }
+                } catch (InterruptedException ie) {
+                    // shouldn't happen
+                    throw new RuntimeException(ie);
+                }
+            }
+            mFrameAvailable = false;
+        }
+
+        // Latch the data.
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+    }
+
+    /**
+     * Wait up to given timeout until new image become available.
+     * @param timeoutMs
+     * @return true if new image is available. false for no new image until timeout.
+     */
+    public boolean checkForNewImage(int timeoutMs) {
+        synchronized (mFrameSyncObject) {
+            while (!mFrameAvailable) {
+                try {
+                    // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
+                    // stalling the test if it doesn't arrive.
+                    mFrameSyncObject.wait(timeoutMs);
+                    if (!mFrameAvailable) {
+                        return false;
+                    }
+                } catch (InterruptedException ie) {
+                    // shouldn't happen
+                    throw new RuntimeException(ie);
+                }
+            }
+            mFrameAvailable = false;
+        }
+
+        // Latch the data.
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+        return true;
+    }
+
+    /**
+     * Draws the data from SurfaceTexture onto the current EGL surface.
+     */
+    public void drawImage() {
+        mTextureRender.drawFrame(mSurfaceTexture);
+    }
+
+    public void latchImage() {
+        mTextureRender.checkGlError("before updateTexImage");
+        mSurfaceTexture.updateTexImage();
+    }
+
+    @Override
+    public void onFrameAvailable(SurfaceTexture st) {
+        if (VERBOSE) Log.d(TAG, "new frame available");
+        synchronized (mFrameSyncObject) {
+            if (mFrameAvailable) {
+                throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
+            }
+            mFrameAvailable = true;
+            mFrameSyncObject.notifyAll();
+        }
+    }
+
+    /**
+     * Checks for EGL errors.
+     */
+    private void checkEglError(String msg) {
+        int error;
+        if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+            throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
new file mode 100644
index 0000000..a96033d
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/CtsMediaTextureRender.java
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+//
+// This file is copied from android.hardware.cts.media
+//
+
+/**
+ * Code for rendering a texture onto a surface using OpenGL ES 2.0.
+ */
+class CtsMediaTextureRender {
+    private static final String TAG = "TextureRender";
+
+    private static final int FLOAT_SIZE_BYTES = 4;
+    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+    private final float[] mTriangleVerticesData = {
+        // X, Y, Z, U, V
+        -1.0f, -1.0f, 0, 0.f, 0.f,
+         1.0f, -1.0f, 0, 1.f, 0.f,
+        -1.0f,  1.0f, 0, 0.f, 1.f,
+         1.0f,  1.0f, 0, 1.f, 1.f,
+    };
+
+    private FloatBuffer mTriangleVertices;
+
+    private static final String VERTEX_SHADER =
+            "uniform mat4 uMVPMatrix;\n" +
+            "uniform mat4 uSTMatrix;\n" +
+            "attribute vec4 aPosition;\n" +
+            "attribute vec4 aTextureCoord;\n" +
+            "varying vec2 vTextureCoord;\n" +
+            "void main() {\n" +
+            "  gl_Position = uMVPMatrix * aPosition;\n" +
+            "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+            "}\n";
+
+    private static final String FRAGMENT_SHADER =
+            "#extension GL_OES_EGL_image_external : require\n" +
+            "precision mediump float;\n" +      // highp here doesn't seem to matter
+            "varying vec2 vTextureCoord;\n" +
+            "uniform samplerExternalOES sTexture;\n" +
+            "void main() {\n" +
+            "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+            "}\n";
+
+    private float[] mMVPMatrix = new float[16];
+    private float[] mSTMatrix = new float[16];
+
+    private int mProgram;
+    private int mTextureID = -12345;
+    private int muMVPMatrixHandle;
+    private int muSTMatrixHandle;
+    private int maPositionHandle;
+    private int maTextureHandle;
+
+    public CtsMediaTextureRender() {
+        mTriangleVertices = ByteBuffer.allocateDirect(
+            mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+                .order(ByteOrder.nativeOrder()).asFloatBuffer();
+        mTriangleVertices.put(mTriangleVerticesData).position(0);
+
+        Matrix.setIdentityM(mSTMatrix, 0);
+    }
+
+    public int getTextureId() {
+        return mTextureID;
+    }
+
+    public void drawFrame(SurfaceTexture st) {
+        checkGlError("onDrawFrame start");
+        st.getTransformMatrix(mSTMatrix);
+
+        GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+
+        GLES20.glUseProgram(mProgram);
+        checkGlError("glUseProgram");
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+        checkGlError("glVertexAttribPointer maPosition");
+        GLES20.glEnableVertexAttribArray(maPositionHandle);
+        checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+        GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+        checkGlError("glVertexAttribPointer maTextureHandle");
+        GLES20.glEnableVertexAttribArray(maTextureHandle);
+        checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+        Matrix.setIdentityM(mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+        checkGlError("glDrawArrays");
+        GLES20.glFinish();
+    }
+
+    /**
+     * Initializes GL state.  Call this after the EGL surface has been created and made current.
+     */
+    public void surfaceCreated() {
+        mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+        if (mProgram == 0) {
+            throw new RuntimeException("failed creating program");
+        }
+        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+        checkGlError("glGetAttribLocation aPosition");
+        if (maPositionHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aPosition");
+        }
+        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+        checkGlError("glGetAttribLocation aTextureCoord");
+        if (maTextureHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aTextureCoord");
+        }
+
+        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+        checkGlError("glGetUniformLocation uMVPMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+        }
+
+        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+        checkGlError("glGetUniformLocation uSTMatrix");
+        if (muSTMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uSTMatrix");
+        }
+
+
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+
+        mTextureID = textures[0];
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+        checkGlError("glBindTexture mTextureID");
+
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+                GLES20.GL_NEAREST);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+                GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+                GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+                GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameter");
+    }
+
+    /**
+     * Replaces the fragment shader.
+     */
+    public void changeFragmentShader(String fragmentShader) {
+        GLES20.glDeleteProgram(mProgram);
+        mProgram = createProgram(VERTEX_SHADER, fragmentShader);
+        if (mProgram == 0) {
+            throw new RuntimeException("failed creating program");
+        }
+    }
+
+    private int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        checkGlError("glCreateShader type=" + shaderType);
+        GLES20.glShaderSource(shader, source);
+        GLES20.glCompileShader(shader);
+        int[] compiled = new int[1];
+        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+        if (compiled[0] == 0) {
+            Log.e(TAG, "Could not compile shader " + shaderType + ":");
+            Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+            GLES20.glDeleteShader(shader);
+            shader = 0;
+        }
+        return shader;
+    }
+
+    private int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            return 0;
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            return 0;
+        }
+
+        int program = GLES20.glCreateProgram();
+        checkGlError("glCreateProgram");
+        if (program == 0) {
+            Log.e(TAG, "Could not create program");
+        }
+        GLES20.glAttachShader(program, vertexShader);
+        checkGlError("glAttachShader");
+        GLES20.glAttachShader(program, pixelShader);
+        checkGlError("glAttachShader");
+        GLES20.glLinkProgram(program);
+        int[] linkStatus = new int[1];
+        GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+        if (linkStatus[0] != GLES20.GL_TRUE) {
+            Log.e(TAG, "Could not link program: ");
+            Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+            GLES20.glDeleteProgram(program);
+            program = 0;
+        }
+        return program;
+    }
+
+    public void checkGlError(String op) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            Log.e(TAG, op + ": glError " + error);
+            throw new RuntimeException(op + ": glError " + error);
+        }
+    }
+
+    /**
+     * Saves the current frame to disk as a PNG image.  Frame starts from (0,0).
+     * <p>
+     * Useful for debugging.
+     */
+    public static void saveFrame(String filename, int width, int height) {
+        // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
+        // data (i.e. a byte of red, followed by a byte of green...).  We need an int[] filled
+        // with native-order ARGB data to feed to Bitmap.
+        //
+        // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
+        // copying data around for a 720p frame.  It's better to do a bulk get() and then
+        // rearrange the data in memory.  (For comparison, the PNG compress takes about 500ms
+        // for a trivial frame.)
+        //
+        // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
+        // get() into a straight memcpy on most Android devices.  Our ints will hold ABGR data.
+        // Swapping B and R gives us ARGB.  We need about 30ms for the bulk get(), and another
+        // 270ms for the color swap.
+        //
+        // Making this even more interesting is the upside-down nature of GL, which means we
+        // may want to flip the image vertically here.
+
+        ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
+        buf.order(ByteOrder.LITTLE_ENDIAN);
+        GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+        buf.rewind();
+
+        int pixelCount = width * height;
+        int[] colors = new int[pixelCount];
+        buf.asIntBuffer().get(colors);
+        for (int i = 0; i < pixelCount; i++) {
+            int c = colors[i];
+            colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16);
+        }
+
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(filename);
+            Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
+            bmp.compress(Bitmap.CompressFormat.PNG, 90, fos);
+            bmp.recycle();
+        } catch (IOException ioe) {
+            throw new RuntimeException("Failed to write file " + filename, ioe);
+        } finally {
+            try {
+                if (fos != null) fos.close();
+            } catch (IOException ioe2) {
+                throw new RuntimeException("Failed to close file " + filename, ioe2);
+            }
+        }
+        Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
new file mode 100644
index 0000000..12d4582
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/MotionIndicatorView.java
@@ -0,0 +1,409 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.PorterDuffXfermode;
+import android.graphics.RectF;
+import android.hardware.SensorManager;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.View;
+
+/**
+ * A view class that draws the user prompt
+ *
+ * The following piece of code should show how to use this view.
+ *
+ *  public void testUI()  {
+ *     final int MAX_TILT_ANGLE = 70; // +/- 70
+ *
+ *     final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+ *     final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+ *
+ *     RangeCoveredRegister xCovered, yCovered, zCovered;
+ *     xCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ *
+ *     yCovered = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+ *     zCovered = new RangeCoveredRegister(YAW_ANGLE_STEP);
+ *
+ *     xCovered.update(40);
+ *     xCovered.update(-40);
+ *     xCovered.update(12);
+ *
+ *     yCovered.update(50);
+ *     yCovered.update(-51);
+ *
+ *     zCovered.update(150);
+ *     zCovered.update(42);
+ *
+ *     setDataProvider(xCovered, yCovered, zCovered);
+ *     enableAxis(RVCVRecordActivity.AXIS_ALL); //debug mode, show all three axis
+ * }
+ */
+public class MotionIndicatorView extends View {
+    private final String TAG = "MotionIndicatorView";
+    private final boolean LOCAL_LOGV = false;
+
+    private Paint mCursorPaint;
+    private Paint mLimitPaint;
+    private Paint mCoveredPaint;
+    private Paint mRangePaint;
+    private Paint mEraserPaint;
+
+    // UI settings
+    private final int XBAR_WIDTH = 50;
+    private final int XBAR_MARGIN = 50;
+    private final int XBAR_CURSOR_ADD = 20;
+
+    private final int YBAR_WIDTH = 50;
+    private final int YBAR_MARGIN = 50;
+    private final int YBAR_CURSOR_ADD = 20;
+
+    private final int ZRING_WIDTH = 50;
+    private final int ZRING_CURSOR_ADD = 30;
+
+
+    private int mXSize, mYSize;
+    private RectF mZBoundOut, mZBoundOut2, mZBoundIn, mZBoundIn2;
+
+    private RangeCoveredRegister mXCovered, mYCovered, mZCovered;
+
+    private boolean mXEnabled, mYEnabled, mZEnabled;
+
+    /**
+     * Constructor
+     * @param context
+     */
+    public MotionIndicatorView(Context context) {
+        super(context);
+        init();
+    }
+
+    /**
+     * Constructor
+     * @param context Application context
+     * @param attrs
+     */
+    public MotionIndicatorView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        init();
+    }
+
+    /**
+     * Initialize the Paint objects
+     */
+    private void init() {
+
+        mCursorPaint = new Paint();
+        mCursorPaint.setColor(Color.BLUE);
+
+        mLimitPaint = new Paint();
+        mLimitPaint.setColor(Color.YELLOW);
+
+        mCoveredPaint = new Paint();
+        mCoveredPaint.setColor(Color.CYAN);
+
+        mRangePaint = new Paint();
+        mRangePaint.setColor(Color.DKGRAY);
+
+        mEraserPaint = new Paint();
+        mEraserPaint.setColor(Color.TRANSPARENT);
+        // ensure the erasing effect
+        mEraserPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
+    }
+
+    /**
+     * Connect the view to certain data provider objects
+     * @param x Data provider for x direction tilt angle
+     * @param y Data provider for y direction tilt angle
+     * @param z Data provider for z rotation
+     */
+    public void setDataProvider(RangeCoveredRegister x,
+                                RangeCoveredRegister y,
+                                RangeCoveredRegister z)    {
+        mXCovered = x;
+        mYCovered = y;
+        mZCovered = z;
+    }
+
+    /**
+     * Set the active axis for display
+     *
+     * @param axis AXIS_X, AXIS_Y, AXIS_Z for x, y, z axis indicators, or AXIS_ALL for all three.
+     */
+    public void enableAxis(int axis)  {
+        mXEnabled = mYEnabled = mZEnabled = false;
+
+        switch(axis)
+        {
+            case SensorManager.AXIS_X:
+                mXEnabled = true;
+                break;
+            case SensorManager.AXIS_Y:
+                mYEnabled = true;
+                break;
+            case SensorManager.AXIS_Z:
+                mZEnabled = true;
+                break;
+            case RVCVRecordActivity.AXIS_ALL:
+                mXEnabled = mYEnabled = mZEnabled = true;
+        }
+    }
+
+    /**
+     * Doing some pre-calculation that only changes when view dimensions are changed.
+     * @param w
+     * @param h
+     * @param oldw
+     * @param oldh
+     */
+    @Override
+    protected void onSizeChanged (int w, int h, int oldw, int oldh) {
+        mXSize = w;
+        mYSize = h;
+
+        mZBoundOut = new RectF(w/2-w/2.5f, h/2-w/2.5f, w/2+w/2.5f, h/2+w/2.5f);
+        mZBoundOut2 = new RectF(
+                w/2-w/2.5f-ZRING_CURSOR_ADD, h/2-w/2.5f-ZRING_CURSOR_ADD,
+                w/2+w/2.5f+ZRING_CURSOR_ADD, h/2+w/2.5f+ZRING_CURSOR_ADD);
+        mZBoundIn = new RectF(
+                w/2-w/2.5f+ZRING_WIDTH, h/2-w/2.5f+ZRING_WIDTH,
+                w/2+w/2.5f-ZRING_WIDTH, h/2+w/2.5f-ZRING_WIDTH);
+        mZBoundIn2 = new RectF(
+                w/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD, h/2-w/2.5f+ZRING_WIDTH+ZRING_CURSOR_ADD,
+                w/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD, h/2+w/2.5f-ZRING_WIDTH-ZRING_CURSOR_ADD);
+
+        if (LOCAL_LOGV) Log.v(TAG, "New view size = ("+w+", "+h+")");
+    }
+
+    /**
+     * Draw UI depends on the selected axis and registered value
+     *
+     * @param canvas the canvas to draw on
+     */
+    @Override
+    protected void onDraw(Canvas canvas) {
+        super.onDraw(canvas);
+        int i,t;
+
+        Paint p = new Paint();
+        p.setColor(Color.YELLOW);
+        canvas.drawRect(10,10, 50, 50, p);
+
+        if (mXEnabled && mXCovered != null) {
+            int xNStep = mXCovered.getNSteps() + 4; // two on each side as a buffer
+            int xStepSize = mXSize * 3/4 / xNStep;
+            int xLeft = mXSize * 1/8 + (mXSize * 3/4 % xNStep)/2;
+
+            // base bar
+            canvas.drawRect(xLeft, XBAR_MARGIN,
+                    xLeft+xStepSize*xNStep-1, XBAR_WIDTH+XBAR_MARGIN, mRangePaint);
+
+            // covered range
+            for (i=0; i<mXCovered.getNSteps(); ++i) {
+                if (mXCovered.isCovered(i)) {
+                    canvas.drawRect(
+                            xLeft+xStepSize*(i+2), XBAR_MARGIN,
+                            xLeft+xStepSize*(i+3)-1, XBAR_WIDTH + XBAR_MARGIN,
+                            mCoveredPaint);
+                }
+            }
+
+            // limit
+            canvas.drawRect(xLeft+xStepSize*2-4, XBAR_MARGIN,
+                    xLeft+xStepSize*2+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+            canvas.drawRect(xLeft+xStepSize*(xNStep-2)-4, XBAR_MARGIN,
+                    xLeft+xStepSize*(xNStep-2)+3, XBAR_WIDTH+XBAR_MARGIN, mLimitPaint);
+
+            // cursor
+            t = (int)(xLeft+xStepSize*(mXCovered.getLastValue()+2));
+            canvas.drawRect(t-4, XBAR_MARGIN-XBAR_CURSOR_ADD, t+3,
+                    XBAR_WIDTH+XBAR_MARGIN+XBAR_CURSOR_ADD, mCursorPaint);
+        }
+        if (mYEnabled && mYCovered != null) {
+            int yNStep = mYCovered.getNSteps() + 4; // two on each side as a buffer
+            int yStepSize = mYSize * 3/4 / yNStep;
+            int yLeft = mYSize * 1/8 + (mYSize * 3/4 % yNStep)/2;
+
+            // base bar
+            canvas.drawRect(YBAR_MARGIN, yLeft,
+                    YBAR_WIDTH+YBAR_MARGIN, yLeft+yStepSize*yNStep-1, mRangePaint);
+
+            // covered range
+            for (i=0; i<mYCovered.getNSteps(); ++i) {
+                if (mYCovered.isCovered(i)) {
+                    canvas.drawRect(
+                            YBAR_MARGIN, yLeft+yStepSize*(i+2),
+                            YBAR_WIDTH + YBAR_MARGIN, yLeft+yStepSize*(i+3)-1,
+                            mCoveredPaint);
+                }
+            }
+
+            // limit
+            canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * 2 - 4,
+                    YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * 2 + 3, mLimitPaint);
+            canvas.drawRect(YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) - 4,
+                    YBAR_WIDTH + YBAR_MARGIN, yLeft + yStepSize * (yNStep - 2) + 3, mLimitPaint);
+
+            // cursor
+            t = (int)(yLeft+yStepSize*(mYCovered.getLastValue()+2));
+            canvas.drawRect( YBAR_MARGIN-YBAR_CURSOR_ADD, t-4,
+                    YBAR_WIDTH+YBAR_MARGIN+YBAR_CURSOR_ADD, t+3, mCursorPaint);
+        }
+
+        if (mZEnabled && mZCovered != null) {
+            float stepSize  = 360.0f/mZCovered.getNSteps();
+
+            // base bar
+            canvas.drawArc(mZBoundOut,0, 360, true, mRangePaint);
+
+            // covered range
+            for (i=0; i<mZCovered.getNSteps(); ++i) {
+                if (mZCovered.isCovered(i)) {
+                    canvas.drawArc(mZBoundOut,i*stepSize-0.2f, stepSize+0.4f,
+                            true, mCoveredPaint);
+                }
+            }
+            // clear center
+            canvas.drawArc(mZBoundIn, 0, 360, true, mEraserPaint);
+            // cursor
+            canvas.drawArc(mZBoundOut2, mZCovered.getLastValue()*stepSize- 1, 2,
+                    true, mCursorPaint);
+            canvas.drawArc(mZBoundIn2, mZCovered.getLastValue()*stepSize-1.5f, 3,
+                    true, mEraserPaint);
+        }
+    }
+}
+
+/**
+ *  A range register class for the RVCVRecord Activity
+ */
+class RangeCoveredRegister {
+    enum MODE {
+        LINEAR,
+        ROTATE2D
+    }
+
+    private boolean[] mCovered;
+    private MODE mMode;
+    private int mStep;
+    private int mLow, mHigh;
+    private int mLastData;
+
+    // high is not inclusive
+    RangeCoveredRegister(int low, int high, int step) {
+        mMode = MODE.LINEAR;
+        mStep = step;
+        mLow = low;
+        mHigh = high;
+        init();
+    }
+
+    RangeCoveredRegister(int step) {
+        mMode = MODE.ROTATE2D;
+        mStep = step;
+        mLow = 0;
+        mHigh = 360;
+        init();
+    }
+
+    private void init() {
+        if (mMode == MODE.LINEAR) {
+            mCovered = new boolean[(mHigh-mLow)/mStep];
+        }else {
+            mCovered = new boolean[360/mStep];
+        }
+    }
+
+    /**
+     * Test if the range defined is fully covered.
+     *
+     * @return if the range is fully covered, return true; otherwise false.
+     */
+    public boolean isFullyCovered() {
+        for (boolean i:mCovered) {
+            if (!i) return false;
+        }
+        return true;
+    }
+
+    /**
+     * Test if a specific step is covered.
+     *
+     * @param i the step number
+     * @return if the step specified is covered, return true; otherwise false.
+     */
+    public boolean isCovered(int i) {
+        return mCovered[i];
+    }
+
+    /**
+     *
+     *
+     * @param data
+     * @return if this update changes the status of
+     */
+    public boolean update(int data) {
+        mLastData = data;
+
+        if (mMode == MODE.ROTATE2D) {
+            data %= 360;
+        }
+
+        int iStep = (data - mLow)/mStep;
+
+        if (iStep>=0 && iStep<getNSteps()) {
+            // only record valid data
+            mLastData = data;
+
+            if (mCovered[iStep]) {
+                return false;
+            } else {
+                mCovered[iStep] = true;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Get the number of steps in this register
+     *
+     * @return The number of steps in this register
+     */
+    public int getNSteps() {
+        //if (mCovered == null) {
+        //return 0;
+        //}
+        return mCovered.length;
+    }
+
+    /**
+     * Get the last value updated
+     *
+     * @return The last value updated
+     */
+    public float getLastValue() {
+        // ensure float division
+        return ((float)(mLastData - mLow))/mStep;
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
new file mode 100644
index 0000000..a5b58f6
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVCameraPreview.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors;
+
+// ----------------------------------------------------------------------
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import java.io.IOException;
+import java.util.List;
+
+/** Camera preview class */
+public class RVCVCameraPreview extends SurfaceView implements SurfaceHolder.Callback {
+    private static final String TAG = "RVCVCameraPreview";
+    private static final boolean LOCAL_LOGD = true;
+
+    private SurfaceHolder mHolder;
+    private Camera mCamera;
+
+    /**
+     * Constructor
+     * @param context Activity context
+     * @param camera Camera object to be previewed
+     */
+    public RVCVCameraPreview(Context context, Camera camera) {
+        super(context);
+        mCamera = camera;
+        initSurface();
+    }
+
+    /**
+     * Constructor
+     * @param context Activity context
+     * @param attrs
+     */
+    public RVCVCameraPreview(Context context, AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    public void init(Camera camera) {
+        this.mCamera = camera;
+        initSurface();
+    }
+
+    private void initSurface() {
+        // Install a SurfaceHolder.Callback so we get notified when the
+        // underlying surface is created and destroyed.
+        mHolder = getHolder();
+        mHolder.addCallback(this);
+
+        // deprecated
+        // TODO: update this code to match new API level.
+        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+    }
+
+    /**
+     *  SurfaceHolder.Callback
+     *  Surface is created, it is OK to start the camera preview now.
+     */
+    public void surfaceCreated(SurfaceHolder holder) {
+        // The Surface has been created, now tell the camera where to draw the preview.
+
+        if (mCamera == null) {
+            // preview camera does not exist
+            return;
+        }
+
+        try {
+            mCamera.setPreviewDisplay(holder);
+            mCamera.startPreview();
+        } catch (IOException e) {
+            if (LOCAL_LOGD) Log.d(TAG, "Error when starting camera preview: " + e.getMessage());
+        }
+    }
+    /**
+     *  SurfaceHolder.Callback
+     */
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        // empty. Take care of releasing the Camera preview in your activity.
+    }
+
+    /**
+     *  SurfaceHolder.Callback
+     *  Restart camera preview if surface changed
+     */
+    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+
+        if (mHolder.getSurface() == null || mCamera == null){
+            // preview surface or camera does not exist
+            return;
+        }
+
+        // stop preview before making changes
+        mCamera.stopPreview();
+
+        // the activity using this view is locked to this orientation, so hard code is fine
+        mCamera.setDisplayOrientation(90);
+
+        //do the same as if it is created again
+        surfaceCreated(holder);
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
new file mode 100644
index 0000000..9011619
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVRecordActivity.java
@@ -0,0 +1,903 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.Camera;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.media.AudioManager;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.media.SoundPool;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+import android.view.Window;
+import android.widget.ImageView;
+import android.widget.Toast;
+
+import com.android.cts.verifier.R;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+
+// ----------------------------------------------------------------------
+
+/**
+ *  An activity that does recording of the camera video and rotation vector data at the same time.
+ */
+public class RVCVRecordActivity extends Activity {
+    private static final String TAG = "RVCVRecordActivity";
+    //private static final boolean LOCAL_LOGD = true;
+    private static final boolean LOCAL_LOGV = false;
+
+    private MotionIndicatorView mIndicatorView;
+
+    private SoundPool mSoundPool;
+    private int [] mSoundPoolLookup;
+
+    private File mRecordDir;
+    private RecordProcedureController mController;
+    private VideoRecorder           mVideoRecorder;
+    private RVSensorLogger          mRVSensorLogger;
+    private CoverageManager         mCoverManager;
+    private CameraPreviewer         mPreviewer;
+
+    public static final int AXIS_NONE = 0;
+    public static final int AXIS_ALL = SensorManager.AXIS_X +
+                                       SensorManager.AXIS_Y +
+                                       SensorManager.AXIS_Z;
+
+    // For Rotation Vector algorithm research use
+    private final static boolean     LOG_RAW_SENSORS = false;
+    private RawSensorLogger          mRawSensorLogger;
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        // Hide the window title.
+        requestWindowFeature(Window.FEATURE_NO_TITLE);
+
+        // inflate xml
+        setContentView(R.layout.cam_preview_overlay);
+
+        // locate views
+        mIndicatorView = (MotionIndicatorView) findViewById(R.id.cam_indicator);
+
+        initStoragePath();
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+        mController.quit();
+
+        mPreviewer.end();
+        endSoundPool();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        // delay the initialization as much as possible
+        init();
+    }
+
+    /** display toast message
+     *
+     * @param msg Message content
+     */
+    private void message(String msg) {
+
+        Context context = getApplicationContext();
+        int duration = Toast.LENGTH_SHORT;
+
+        Toast toast = Toast.makeText(context, msg, duration);
+        toast.show();
+    }
+
+    /**
+     *  Initialize components
+     *
+     */
+    private void init() {
+        mPreviewer = new CameraPreviewer();
+        mPreviewer.init();
+
+        mCoverManager = new CoverageManager();
+        mIndicatorView.setDataProvider(
+                mCoverManager.getAxis(SensorManager.AXIS_X),
+                mCoverManager.getAxis(SensorManager.AXIS_Y),
+                mCoverManager.getAxis(SensorManager.AXIS_Z)  );
+
+        initSoundPool();
+        mRVSensorLogger = new RVSensorLogger(this);
+
+        mVideoRecorder = new VideoRecorder(mPreviewer.getCamera());
+
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger = new RawSensorLogger(mRecordDir);
+        }
+
+        mController = new RecordProcedureController(this);
+    }
+
+    /**
+     * Notify recording is completed. This is the successful exit.
+     */
+    public void notifyComplete() {
+        message("Capture completed!");
+
+        Uri resultUri = Uri.fromFile(mRecordDir);
+        Intent result = new Intent();
+        result.setData(resultUri);
+        setResult(Activity.RESULT_OK, result);
+
+        finish();
+    }
+
+    /**
+     * Notify the user what to do next in text
+     *
+     * @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+     */
+    private void notifyPrompt(int axis) {
+        // It is not XYZ because of earlier design have different definition of
+        // X and Y
+        final String axisName = "YXZ";
+
+        message("Manipulate the device in " + axisName.charAt(axis-1) + " axis (as illustrated) about the pattern.");
+    }
+
+    /**
+     *  Ask indicator view to redraw
+     */
+    private void redrawIndicator() {
+        mIndicatorView.invalidate();
+    }
+
+    /**
+     * Switch to a different axis for display and logging
+     * @param axis
+     */
+    private void switchAxis(int axis) {
+        ImageView imageView = (ImageView) findViewById(R.id.cam_overlay);
+
+        final int [] prompts = {R.drawable.prompt_x, R.drawable.prompt_y, R.drawable.prompt_z};
+
+        if (axis >=SensorManager.AXIS_X && axis <=SensorManager.AXIS_Z) {
+            imageView.setImageResource(prompts[axis-1]);
+            mIndicatorView.enableAxis(axis);
+            mRVSensorLogger.updateRegister(mCoverManager.getAxis(axis), axis);
+            notifyPrompt(axis);
+        } else {
+            imageView.setImageDrawable(null);
+            mIndicatorView.enableAxis(AXIS_NONE);
+        }
+        redrawIndicator();
+    }
+
+    /**
+     * Asynchronized way to call switchAxis. Use this if caller is not on UI thread.
+     * @param axis @param axis SensorManager.AXIS_X or SensorManager.AXIS_Y or SensorManager.AXIS_Z
+     */
+    public void switchAxisAsync(int axis) {
+        // intended to be called from a non-UI thread
+        final int fAxis = axis;
+        runOnUiThread(new Runnable() {
+            public void run() {
+                // UI code goes here
+                switchAxis(fAxis);
+            }
+        });
+    }
+
+    /**
+     * Initialize sound pool for user notification
+     */
+    private void initSoundPool() {
+        final int MAX_STREAM = 10;
+        int i=0;
+        mSoundPool = new SoundPool(MAX_STREAM, AudioManager.STREAM_MUSIC, 0);
+        mSoundPoolLookup = new int[MAX_STREAM];
+
+        // TODO: add different sound into this
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+        mSoundPoolLookup[i++] = mSoundPool.load(this, R.raw.next_axis, 1);
+
+    }
+    private void endSoundPool() {
+        mSoundPool.release();
+    }
+
+    /**
+     * Play notify sound to user
+     * @param id ID of the sound to be played
+     */
+    public void playNotifySound(int id) {
+        mSoundPool.play(mSoundPoolLookup[id], 1, 1, 0, 0, 1);
+    }
+
+    /**
+     * Start the sensor recording
+     */
+    public void startRecordSensor() {
+        mRVSensorLogger.init();
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger.init();
+        }
+    }
+
+    /**
+     * Stop the sensor recording
+     */
+    public void stopRecordSensor() {
+        mRVSensorLogger.end();
+        if (LOG_RAW_SENSORS) {
+            mRawSensorLogger.end();
+        }
+    }
+
+    /**
+     * Start video recording
+     */
+    public void startRecordVideo() {
+        mVideoRecorder.init();
+    }
+
+    /**
+     * Stop video recording
+     */
+    public void stopRecordVideo() {
+        mVideoRecorder.end();
+    }
+
+    /**
+     * Wait until a sensor recording for a certain axis is fully covered
+     * @param axis
+     */
+    public void waitUntilCovered(int axis) {
+        mCoverManager.waitUntilCovered(axis);
+    }
+
+
+    /**
+     *
+     */
+    private void initStoragePath() {
+        File rxcvRecDataDir = new File(Environment.getExternalStorageDirectory(),"RVCVRecData");
+
+        // Create the storage directory if it does not exist
+        if (! rxcvRecDataDir.exists()) {
+            if (! rxcvRecDataDir.mkdirs()) {
+                Log.e(TAG, "failed to create main data directory");
+            }
+        }
+
+        mRecordDir = new File(rxcvRecDataDir, new SimpleDateFormat("yyMMdd-hhmmss").format(new Date()));
+
+        if (! mRecordDir.mkdirs()) {
+            Log.e(TAG, "failed to create rec data directory");
+        }
+    }
+
+    /**
+     * Get the sensor log file path
+     * @return Path of the sensor log file
+     */
+    public String getSensorLogFilePath() {
+        return new File(mRecordDir, "sensor.log").getPath();
+    }
+
+    /**
+     * Get the video recording file path
+     * @return Path of the video recording file
+     */
+    public String getVideoRecFilePath() {
+        return new File(mRecordDir, "video.mp4").getPath();
+    }
+
+    /**
+     * Write out important camera/video information to a JSON file
+     * @param width         width of frame
+     * @param height        height of frame
+     * @param frameRate     frame rate in fps
+     * @param fovW          field of view in width direction
+     * @param fovH          field of view in height direction
+     */
+    public void writeVideoMetaInfo(int width, int height, float frameRate, float fovW, float fovH) {
+        try {
+            JsonWriter writer =
+                    new JsonWriter(
+                        new OutputStreamWriter(
+                                new FileOutputStream(
+                                        new File(mRecordDir, "videometa.json").getPath()
+                                )
+                        )
+                    );
+            writer.beginObject();
+            writer.name("fovW").value(fovW);
+            writer.name("fovH").value(fovH);
+            writer.name("width").value(width);
+            writer.name("height").value(height);
+            writer.name("frameRate").value(frameRate);
+            writer.endObject();
+
+            writer.close();
+        }catch (FileNotFoundException e) {
+            // Not very likely to happen
+            e.printStackTrace();
+        }catch (IOException e) {
+            // do nothing
+            e.printStackTrace();
+            Log.e(TAG, "Writing video meta data failed.");
+        }
+    }
+
+    /**
+     * Camera preview control class
+     */
+    class CameraPreviewer {
+        private Camera mCamera;
+
+        CameraPreviewer() {
+            try {
+                mCamera = Camera.open(); // attempt to get a default Camera instance
+            }
+            catch (Exception e) {
+                // Camera is not available (in use or does not exist)
+                Log.e(TAG, "Cannot obtain Camera!");
+            }
+        }
+
+        /**
+         * Get the camera to be previewed
+         * @return Reference to Camera used
+         */
+        public Camera getCamera() {
+            return mCamera;
+        }
+
+        /**
+         * Setup the camera
+         */
+        public void init() {
+            if (mCamera != null) {
+                double alpha = mCamera.getParameters().getHorizontalViewAngle()*Math.PI/180.0;
+                int width = 1920;
+                double fx = width/2/Math.tan(alpha/2.0);
+
+                if (LOCAL_LOGV) Log.v(TAG, "View angle="
+                        + mCamera.getParameters().getHorizontalViewAngle() +"  Estimated fx = "+fx);
+
+                RVCVCameraPreview cameraPreview =
+                        (RVCVCameraPreview) findViewById(R.id.cam_preview);
+                cameraPreview.init(mCamera);
+            } else {
+                message("Cannot open camera!");
+                finish();
+            }
+        }
+
+        /**
+         * End the camera preview
+         */
+        public void end() {
+            if (mCamera != null) {
+                mCamera.release();        // release the camera for other applications
+                mCamera = null;
+            }
+        }
+    }
+
+    /**
+     * Manage a set of RangeCoveredRegister objects
+     */
+    class CoverageManager {
+        // settings
+        private final int MAX_TILT_ANGLE = 60; // +/- 60
+        //private final int REQUIRED_TILT_ANGLE = 50; // +/- 50
+        private final int TILT_ANGLE_STEP = 5; // 5 degree(s) per step
+        private final int YAW_ANGLE_STEP = 10; // 10 degree(s) per step
+
+        RangeCoveredRegister[] mAxisCovered;
+
+        CoverageManager() {
+            mAxisCovered = new RangeCoveredRegister[3];
+            // X AXIS
+            mAxisCovered[0] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+            // Y AXIS
+            mAxisCovered[1] = new RangeCoveredRegister(-MAX_TILT_ANGLE, +MAX_TILT_ANGLE, TILT_ANGLE_STEP);
+            // Z AXIS
+            mAxisCovered[2] = new RangeCoveredRegister(YAW_ANGLE_STEP);
+        }
+
+        public RangeCoveredRegister getAxis(int axis) {
+            // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+            return mAxisCovered[axis-1];
+        }
+
+        public void waitUntilCovered(int axis) {
+            // SensorManager.AXIS_X = 1, need offset -1 for mAxisCovered array
+            while(!mAxisCovered[axis-1].isFullyCovered()) {
+                try {
+                    Thread.sleep(500);
+                } catch (InterruptedException e) {
+                    if (LOCAL_LOGV) {
+                        Log.v(TAG, "waitUntilCovered axis = "+ axis + " is interrupted");
+                    }
+                }
+            }
+        }
+    }
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * A class controls the video recording
+     */
+    class VideoRecorder
+    {
+        private MediaRecorder mRecorder;
+        private Camera mCamera;
+        private boolean mRunning = false;
+
+        private int [] mPreferredProfiles = {   CamcorderProfile.QUALITY_480P,  // smaller -> faster
+                                        CamcorderProfile.QUALITY_720P,
+                                        CamcorderProfile.QUALITY_1080P,
+                                        CamcorderProfile.QUALITY_HIGH // existence guaranteed
+                                    };
+
+
+        VideoRecorder(Camera camera) {
+            mCamera = camera;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            float fovW =  mCamera.getParameters().getHorizontalViewAngle();
+            float fovH =  mCamera.getParameters().getVerticalViewAngle();
+
+            mRecorder = new MediaRecorder();
+
+            mCamera.unlock();
+
+            mRecorder.setCamera(mCamera);
+
+            mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+            mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
+
+            CamcorderProfile profile = null;
+            for (int i: mPreferredProfiles) {
+                if (CamcorderProfile.hasProfile(i)) {
+                    profile = CamcorderProfile.get(i);
+                    mRecorder.setProfile(profile);
+                    break;
+                }
+            }
+
+            writeVideoMetaInfo(profile.videoFrameWidth, profile.videoFrameHeight,
+                    profile.videoFrameRate, fovW, fovH);
+
+            try {
+                mRecorder.setOutputFile(getVideoRecFilePath());
+                mRecorder.prepare();
+            } catch (IOException e) {
+                Log.e(TAG, "Preparation for recording failed.");
+            }
+
+            try {
+                mRecorder.start();
+            } catch (RuntimeException e) {
+                Log.e(TAG, "Starting recording failed.");
+                mRecorder.reset();
+                mRecorder.release();
+                mCamera.lock();
+            }
+            mRunning = true;
+        }
+
+        /**
+         * Stop recording
+         */
+        public void end() {
+            if (mRunning) {
+                try {
+                    mRecorder.stop();
+                    mRecorder.reset();
+                    mRecorder.release();
+                    mCamera.lock();
+                } catch (RuntimeException e) {
+                    e.printStackTrace();
+                    Log.e(TAG, "Runtime error in stopping recording.");
+                }
+            }
+            mRecorder = null;
+        }
+
+    }
+
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     *  Log all raw sensor readings, for Rotation Vector sensor algorithms research
+     */
+    class RawSensorLogger implements SensorEventListener {
+        private final String TAG = "RawSensorLogger";
+
+        private final static int SENSOR_RATE = SensorManager.SENSOR_DELAY_FASTEST;
+        private File mRecPath;
+
+        SensorManager mSensorManager;
+        Sensor mAccSensor, mGyroSensor, mMagSensor;
+        OutputStreamWriter mAccLogWriter, mGyroLogWriter, mMagLogWriter;
+
+        private float[] mRTemp = new float[16];
+
+        RawSensorLogger(File recPath) {
+            mRecPath = recPath;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+
+            mAccSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
+            mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE_UNCALIBRATED);
+            mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED);
+
+            mSensorManager.registerListener(this, mAccSensor, SENSOR_RATE);
+            mSensorManager.registerListener(this, mGyroSensor, SENSOR_RATE);
+            mSensorManager.registerListener(this, mMagSensor, SENSOR_RATE);
+
+            try {
+                mAccLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_acc.log")));
+                mGyroLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_uncal_gyro.log")));
+                mMagLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(new File(mRecPath, "raw_uncal_mag.log")));
+
+            } catch (FileNotFoundException e) {
+                Log.e(TAG, "Sensor log file open failed: " + e.toString());
+            }
+        }
+
+        /**
+         * Stop recording and clean up
+         */
+        public void end() {
+            mSensorManager.flush(this);
+            mSensorManager.unregisterListener(this);
+
+            try {
+                if (mAccLogWriter != null) {
+                    OutputStreamWriter writer = mAccLogWriter;
+                    mAccLogWriter = null;
+                    writer.close();
+                }
+                if (mGyroLogWriter != null) {
+                    OutputStreamWriter writer = mGyroLogWriter;
+                    mGyroLogWriter = null;
+                    writer.close();
+                }
+                if (mMagLogWriter != null) {
+                    OutputStreamWriter writer = mMagLogWriter;
+                    mMagLogWriter = null;
+                    writer.close();
+                }
+
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file close failed: " + e.toString());
+            }
+        }
+
+        @Override
+        public void onAccuracyChanged(Sensor sensor, int i) {
+            // do not care
+        }
+
+        @Override
+        public void onSensorChanged(SensorEvent event) {
+            OutputStreamWriter writer=null;
+            switch(event.sensor.getType()) {
+                case Sensor.TYPE_ACCELEROMETER:
+                    writer = mAccLogWriter;
+                    break;
+                case Sensor.TYPE_GYROSCOPE_UNCALIBRATED:
+                    writer = mGyroLogWriter;
+                    break;
+                case Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED:
+                    writer = mMagLogWriter;
+                    break;
+
+            }
+            if (writer!=null)  {
+                float[] data = event.values;
+                try {
+                    if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
+                        writer.write(String.format("%d %f %f %f\r\n",
+                                event.timestamp, data[0], data[1], data[2]));
+                    }else // TYPE_GYROSCOPE_UNCALIBRATED and TYPE_MAGNETIC_FIELD_UNCALIBRATED
+                    {
+                        writer.write(String.format("%d %f %f %f %f %f %f\r\n", event.timestamp,
+                                data[0], data[1], data[2], data[3], data[4], data[5]));
+                    }
+                }catch (IOException e)
+                {
+                    Log.e(TAG, "Write to raw sensor log file failed.");
+                }
+
+            }
+        }
+    }
+    
+    /**
+     *  Rotation sensor logger class
+     */
+    class RVSensorLogger implements SensorEventListener {
+        private final String TAG = "RVSensorLogger";
+
+        private final static int SENSOR_RATE = 100;
+        RangeCoveredRegister mRegister;
+        int mAxis;
+        RVCVRecordActivity mActivity;
+
+        SensorManager mSensorManager;
+        Sensor mRVSensor;
+        OutputStreamWriter mLogWriter;
+
+        private float[] mRTemp = new float[16];
+
+        RVSensorLogger(RVCVRecordActivity activity) {
+            mActivity = activity;
+        }
+
+        /**
+         * Initialize and start recording
+         */
+        public void init() {
+            mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
+            mRVSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
+            mSensorManager.registerListener(this, mRVSensor, SENSOR_RATE);
+
+            try {
+                mLogWriter= new OutputStreamWriter(
+                        new FileOutputStream(mActivity.getSensorLogFilePath()));
+            } catch (FileNotFoundException e) {
+                Log.e(TAG, "Sensor log file open failed: " + e.toString());
+            }
+        }
+
+        /**
+         * Stop recording and clean up
+         */
+        public void end() {
+            mSensorManager.flush(this);
+            mSensorManager.unregisterListener(this);
+
+            try {
+                if (mLogWriter != null) {
+                    OutputStreamWriter writer = mLogWriter;
+                    mLogWriter = null;
+                    writer.close();
+                }
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file close failed: " + e.toString());
+            }
+
+            updateRegister(null, AXIS_NONE);
+        }
+
+        private void onNewData(float[] data, long timestamp) {
+            // LOG
+            try {
+                if (mLogWriter != null) {
+                    mLogWriter.write(String.format("%d %f %f %f %f\r\n", timestamp,
+                            data[3], data[0], data[1], data[2]));
+                }
+            } catch (IOException e) {
+                Log.e(TAG, "Sensor log file write failed: " + e.toString());
+            }
+
+            // Update UI
+            if (mRegister != null) {
+                int d = 0;
+                int dx, dy, dz;
+                boolean valid = false;
+                SensorManager.getRotationMatrixFromVector(mRTemp, data);
+
+                dx = (int)(Math.asin(mRTemp[8])*(180.0/Math.PI));
+                dy = (int)(Math.asin(mRTemp[9])*(180.0/Math.PI));
+                dz = (int)((Math.atan2(mRTemp[4], mRTemp[0])+Math.PI)*(180.0/Math.PI));
+
+                switch(mAxis) {
+                    case SensorManager.AXIS_X:
+                        d = dx;
+                        valid = (Math.abs(dy) < 30);
+                        break;
+                    case SensorManager.AXIS_Y:
+                        d = dy;
+                        valid = (Math.abs(dx) < 30);
+                        break;
+                    case SensorManager.AXIS_Z:
+                        d = dz;
+                        valid = (Math.abs(dx) < 20 && Math.abs(dy) < 20);
+                        break;
+                }
+
+                if (valid) {
+                    mRegister.update(d);
+                    mActivity.redrawIndicator();
+                }
+            }
+
+        }
+
+        public void updateRegister(RangeCoveredRegister reg, int axis) {
+            mRegister = reg;
+            mAxis = axis;
+        }
+
+
+        @Override
+        public void onAccuracyChanged(Sensor sensor, int i) {
+            // do not care
+        }
+
+        @Override
+        public void onSensorChanged(SensorEvent event) {
+            if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
+                onNewData(event.values, event.timestamp);
+            }
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * Controls the over all logic of record procedure: first x-direction, then y-direction and
+     * then z-direction.
+     */
+    class RecordProcedureController implements Runnable {
+        private static final boolean LOCAL_LOGV = false;
+
+        private final RVCVRecordActivity mActivity;
+        private Thread mThread = null;
+
+        RecordProcedureController(RVCVRecordActivity activity) {
+            mActivity = activity;
+            mThread = new Thread(this);
+            mThread.start();
+        }
+
+        /**
+         * Run the record procedure
+         */
+        public void run() {
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Started.");
+            //start recording & logging
+            delay(2000);
+
+            init();
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread init() finished.");
+
+            // test 3 axis
+            // It is in YXZ order because UI element design use opposite definition
+            // of XY axis. To ensure the user see X Y Z, it is flipped here.
+            recordAxis(SensorManager.AXIS_Y);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 0 finished.");
+
+            recordAxis(SensorManager.AXIS_X);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 1 finished.");
+
+            recordAxis(SensorManager.AXIS_Z);
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread axis 2 finished.");
+
+            delay(1000);
+            end();
+            if (LOCAL_LOGV) Log.v(TAG, "Controller Thread End.");
+        }
+
+        private void delay(int milli) {
+            try{
+                Thread.sleep(milli);
+            } catch(InterruptedException e) {
+                if (LOCAL_LOGV) Log.v(TAG, "Controller Thread Interrupted.");
+            }
+        }
+        private void init() {
+            // start video recording
+            mActivity.startRecordVideo();
+
+            // start sensor logging & listening
+            mActivity.startRecordSensor();
+        }
+
+        private void end() {
+            // stop video recording
+            mActivity.stopRecordVideo();
+
+            // stop sensor logging
+            mActivity.stopRecordSensor();
+
+            // notify ui complete
+            runOnUiThread(new Runnable(){
+                public void run() {
+                    mActivity.notifyComplete();
+                }
+            });
+        }
+
+        private void recordAxis(int axis) {
+            // delay 2 seconds?
+            delay(1000);
+
+            // change ui
+            mActivity.switchAxisAsync(axis);
+
+            // play start sound
+            mActivity.playNotifySound(0);
+
+            // wait until axis covered
+            mActivity.waitUntilCovered(axis);
+
+            // play stop sound
+            mActivity.playNotifySound(1);
+        }
+
+        /**
+         * Force quit
+         */
+        public void quit() {
+            mThread.interrupt();
+            try {
+                if (LOCAL_LOGV) Log.v(TAG, "Wait for controller to end");
+
+                // stop video recording
+                mActivity.stopRecordVideo();
+
+                // stop sensor logging
+                mActivity.stopRecordSensor();
+
+            } catch (Exception e)
+            {
+                e.printStackTrace();
+            }
+        }
+    }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
new file mode 100644
index 0000000..128aaa3
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckAnalyzer.java
@@ -0,0 +1,1290 @@
+package com.android.cts.verifier.sensors;
+
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Debug;
+import android.os.Environment;
+import android.util.JsonWriter;
+import android.util.Log;
+
+import org.opencv.core.Mat;
+import org.opencv.core.CvType;
+import org.opencv.core.MatOfDouble;
+import org.opencv.core.MatOfFloat;
+import org.opencv.core.MatOfPoint2f;
+import org.opencv.core.MatOfPoint3f;
+import org.opencv.core.Size;
+import org.opencv.highgui.Highgui;
+import org.opencv.imgproc.Imgproc;
+import org.opencv.calib3d.Calib3d;
+import org.opencv.core.Core;
+
+import org.json.JSONObject;
+import org.json.JSONException;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+import android.opengl.GLES20;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ *  This class does analysis on the recorded RVCVCXCheck data sets.
+ */
+public class RVCVXCheckAnalyzer {
+    private static final String TAG = "RVCXAnalysis";
+    private static final boolean LOCAL_LOGV = false;
+    private static final boolean LOCAL_LOGD = true;
+    private final String mPath;
+
+    private static final boolean OUTPUT_DEBUG_IMAGE = false;
+    private static final double VALID_FRAME_THRESHOLD = 0.8;
+    private static final double REPROJECTION_THREASHOLD = 4.0;
+    private static final boolean FORCE_CV_ANALYSIS  = false;
+    private static final boolean TRACE_VIDEO_ANALYSIS = false;
+    private static final double DECIMATION_FPS_TARGET = 15.0;
+
+    RVCVXCheckAnalyzer(String path)
+    {
+        mPath = path;
+    }
+
+    /**
+     * A class that contains  the analysis results
+     *
+     */
+    class AnalyzeReport {
+        public boolean error=true;
+        public String reason = "incomplete";
+
+        // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
+        // unit in rad
+        public double roll_rms_error;
+        public double pitch_rms_error;
+        public double yaw_rms_error;
+
+        // roll pitch yaw max error
+        // unit in rad
+        public double roll_max_error;
+        public double pitch_max_error;
+        public double yaw_max_error;
+
+        // optimal t delta between sensor and camera data set to make best match
+        public double optimal_delta_t;
+        // the associate yaw offset based on initial values
+        public double yaw_offset;
+
+        public int n_of_frame;
+        public int n_of_valid_frame;
+
+        // both data below are in [sec]
+        public double sensor_period_avg;
+        public double sensor_period_stdev;
+
+        /**
+         * write Json format serialization to a file in case future processing need the data
+         */
+        public void writeToFile(File file) {
+            try {
+                writeJSONToStream(new FileOutputStream(file));
+            } catch (FileNotFoundException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot create analyze report file.");
+            }
+        }
+
+        /**
+         * Get the JSON format serialization
+         *@return Json format serialization as String
+         */
+        @Override
+        public String toString() {
+            ByteArrayOutputStream s = new ByteArrayOutputStream();
+            writeJSONToStream(s);
+            return new String(s.toByteArray(),  java.nio.charset.StandardCharsets.UTF_8);
+        }
+
+        private void writeJSONToStream(OutputStream s) {
+            try{
+                JsonWriter writer =
+                        new JsonWriter(
+                                new OutputStreamWriter( s )
+                        );
+                writer.beginObject();
+                writer.setLenient(true);
+
+                writer.name("roll_rms_error").value(roll_rms_error);
+                writer.name("pitch_rms_error").value(pitch_rms_error);
+                writer.name("yaw_rms_error").value(yaw_rms_error);
+                writer.name("roll_max_error").value(roll_max_error);
+                writer.name("pitch_max_error").value(pitch_max_error);
+                writer.name("yaw_max_error").value(yaw_max_error);
+                writer.name("optimal_delta_t").value(optimal_delta_t);
+                writer.name("yaw_offset").value(yaw_offset);
+                writer.name("n_of_frame").value(n_of_frame);
+                writer.name("n_of_valid_frame").value(n_of_valid_frame);
+                writer.name("sensor_period_avg").value(sensor_period_avg);
+                writer.name("sensor_period_stdev").value(sensor_period_stdev);
+
+                writer.endObject();
+
+                writer.close();
+            } catch (IOException e) {
+                // do nothing
+                Log.e(TAG, "Error in serialize analyze report to JSON");
+            } catch (IllegalArgumentException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Invalid parameter to write into JSON format");
+            }
+        }
+    }
+
+    /**
+     *  Process data set stored in the path specified in constructor
+     *  and return an analyze report to caller
+     *
+     *  @return An AnalyzeReport that contains detailed information about analysis
+     */
+    public AnalyzeReport processDataSet() {
+        int nframe;// number of frames in video
+        int nslog; // number of sensor log
+        int nvlog; // number of video generated log
+
+
+        AnalyzeReport report = new AnalyzeReport();
+
+        ArrayList<AttitudeRec> srecs = new ArrayList<>();
+        ArrayList<AttitudeRec> vrecs = new ArrayList<>();
+        ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
+
+
+        final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
+
+        if (use_solved) {
+            nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+            nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
+        }else {
+            nframe = analyzeVideo(vrecs);
+            nvlog = vrecs.size();
+
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
+            }
+            if (nvlog <= 0 || nframe <= 0) {
+                // invalid results
+                report.reason = "Unable to to load recorded video.";
+                return report;
+            }
+            if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
+                // to many invalid frames
+                report.reason = "Too many invalid frames.";
+                return null;
+            }
+
+            fixFlippedAxis(vrecs);
+
+            nslog = loadSensorLog(srecs);
+        }
+
+        // Gradient descent will have faster performance than this simple search,
+        // but the performance is dominated by the vision part, so it is not very necessary.
+        double delta_t;
+        double min_rms = Double.MAX_VALUE;
+        double min_delta_t =0.;
+        double min_yaw_offset =0.;
+
+        // pre-allocation
+        for (AttitudeRec i: vrecs) {
+            srecs2.add(new AttitudeRec(0,0,0,0));
+        }
+
+        // find optimal offset
+        for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
+            double rms;
+            resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
+            rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
+            if (rms < min_rms) {
+                min_rms = rms;
+                min_delta_t = delta_t;
+                min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
+            }
+        }
+        // sample at optimal offset
+        resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
+
+        if (!use_solved) {
+            dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
+            dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
+        }
+        dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
+        dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
+
+        // fill report fields
+        report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
+        report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
+        report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
+
+        report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
+        report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
+        report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
+
+        report.optimal_delta_t = min_delta_t;
+        report.yaw_offset = (min_yaw_offset);
+
+        report.n_of_frame = nframe;
+        report.n_of_valid_frame = nvlog;
+
+        double [] sensor_period_stat = calcSensorPeriodStat(srecs);
+        report.sensor_period_avg = sensor_period_stat[0];
+        report.sensor_period_stdev = sensor_period_stat[1];
+
+        // output report to file and log in JSON format as well
+        report.writeToFile(new File(mPath, "report.json"));
+        if (LOCAL_LOGV)    Log.v(TAG, "Report in JSON:" + report.toString());
+
+        report.reason = "Completed";
+        report.error = false;
+        return report;
+    }
+
+    /**
+     * Generate pattern geometry like this one
+     * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
+     *
+     * @return Array of 3D points
+     */
+    private MatOfPoint3f asymmetricalCircleGrid(Size size) {
+        final int cn = 3;
+
+        int n = (int)(size.width * size.height);
+        float positions[] = new float[n * cn];
+        float unit=0.02f;
+        MatOfPoint3f grid = new MatOfPoint3f();
+
+        for (int i = 0; i < size.height; i++) {
+            for (int j = 0; j < size.width * cn; j += cn) {
+                positions[(int) (i * size.width * cn + j + 0)] =
+                        (2 * (j / cn) + i % 2) * (float) unit;
+                positions[(int) (i * size.width * cn + j + 1)] =
+                        i * unit;
+                positions[(int) (i * size.width * cn + j + 2)] = 0;
+            }
+        }
+        grid.create(n, 1, CvType.CV_32FC3);
+        grid.put(0, 0, positions);
+        return grid;
+    }
+
+    /**
+     *  Create a camera intrinsic matrix using input parameters
+     *
+     *  The camera intrinsic matrix will be like:
+     *
+     *       +-                       -+
+     *       |  f   0    center.width  |
+     *   A = |  0   f    center.height |
+     *       |  0   0         1        |
+     *       +-                       -+
+     *
+     *  @return An approximated (not actually calibrated) camera matrix
+     */
+    private static Mat cameraMatrix(float f, Size center) {
+        final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
+        Mat m = new Mat(3,3, CvType.CV_64F);
+        m.put(0, 0, data);
+        return m;
+    }
+
+    /**
+     *  Attitude record in time roll pitch yaw format.
+     *
+     */
+    private class AttitudeRec {
+        public double time;
+        public double roll;
+        public double pitch;
+        public double yaw;
+
+        // ctor
+        AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
+            time = atime;
+            roll = aroll;
+            pitch = apitch;
+            yaw = ayaw;
+        }
+
+        // ctor
+        AttitudeRec(double atime, double [] rpy) {
+            time = atime;
+            roll = rpy[0];
+            pitch = rpy[1];
+            yaw = rpy[2];
+        }
+
+        // copy value of another to this
+        void assign(AttitudeRec rec) {
+            time = rec.time;
+            roll = rec.time;
+            pitch = rec.pitch;
+            yaw = rec.yaw;
+        }
+
+        // copy roll-pitch-yaw value but leave the time specified by atime
+        void assign(AttitudeRec rec, double atime) {
+            time = atime;
+            roll = rec.time;
+            pitch = rec.pitch;
+            yaw = rec.yaw;
+        }
+
+        // set each field separately
+        void set(double atime, double aroll, double apitch, double ayaw) {
+            time = atime;
+            roll = aroll;
+            pitch = apitch;
+            yaw = ayaw;
+        }
+    }
+
+
+    /**
+     *  Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
+     *
+     *  @return the number of sensor log items
+     */
+    private int loadSensorLog(ArrayList<AttitudeRec> recs) {
+        //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
+        File csvFile = new File(mPath, "sensor.log");
+        BufferedReader br=null;
+        String line;
+
+        // preallocate and reuse
+        double [] quat = new double[4];
+        double [] rpy = new double[3];
+
+        double t0 = -1;
+
+        try {
+            br = new BufferedReader(new FileReader(csvFile));
+            while ((line = br.readLine()) != null) {
+                //space separator
+                String[] items = line.split(" ");
+
+                if (items.length != 5) {
+                    recs.clear();
+                    return -1;
+                }
+
+                quat[0] = Double.parseDouble(items[1]);
+                quat[1] = Double.parseDouble(items[2]);
+                quat[2] = Double.parseDouble(items[3]);
+                quat[3] = Double.parseDouble(items[4]);
+
+                //
+                quat2rpy(quat, rpy);
+
+                if (t0 < 0) {
+                    t0 = Long.parseLong(items[0])/1e9;
+                }
+                recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
+            }
+
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot find sensor logging data");
+        } catch (IOException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot read sensor logging data");
+        } finally {
+            if (br != null) {
+                try {
+                    br.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return recs.size();
+    }
+
+    /**
+     * Read video meta info
+     */
+    private class VideoMetaInfo {
+        public double fps;
+        public int frameWidth;
+        public int frameHeight;
+        public double fovWidth;
+        public double fovHeight;
+        public boolean valid = false;
+
+        VideoMetaInfo(File file) {
+
+            BufferedReader br=null;
+            String line;
+            String content="";
+            try {
+                br = new BufferedReader(new FileReader(file));
+                while ((line = br.readLine()) != null) {
+                    content = content +line;
+                }
+
+            } catch (FileNotFoundException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot find video meta info file");
+            } catch (IOException e) {
+                e.printStackTrace();
+                Log.e(TAG, "Cannot read video meta info file");
+            } finally {
+                if (br != null) {
+                    try {
+                        br.close();
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+
+            if (content.isEmpty()) {
+                return;
+            }
+
+            try {
+                JSONObject json = new JSONObject(content);
+                frameWidth = json.getInt("width");
+                frameHeight = json.getInt("height");
+                fps = json.getDouble("frameRate");
+                fovWidth = json.getDouble("fovW")*Math.PI/180.0;
+                fovHeight = json.getDouble("fovH")*Math.PI/180.0;
+            } catch (JSONException e) {
+                return;
+            }
+
+            valid = true;
+
+        }
+    }
+
+
+
+    /**
+     * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
+     * dumpAttitudeRecs
+     */
+    private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+        BufferedReader br=null;
+        String line;
+        double time;
+        double [] rpy = new double[3];
+
+        try {
+            br = new BufferedReader(new FileReader(file));
+            while ((line = br.readLine()) != null) {
+                //space separator
+                String[] items = line.split(" ");
+
+                if (items.length != 4) {
+                    recs.clear();
+                    return -1;
+                }
+
+                time = Double.parseDouble(items[0]);
+                rpy[0] = Double.parseDouble(items[1]);
+                rpy[1] = Double.parseDouble(items[2]);
+                rpy[2] = Double.parseDouble(items[3]);
+
+                recs.add(new AttitudeRec(time, rpy));
+            }
+
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot find AttitudeRecs file specified.");
+        } catch (IOException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Read AttitudeRecs file failure");
+        } finally {
+            if (br != null) {
+                try {
+                    br.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return recs.size();
+    }
+    /**
+     * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
+     */
+    private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
+        OutputStreamWriter w=null;
+        try {
+            w = new OutputStreamWriter(new FileOutputStream(file));
+
+            for (AttitudeRec r : recs) {
+                w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
+            }
+            w.close();
+        } catch(FileNotFoundException e) {
+            e.printStackTrace();
+            Log.e(TAG, "Cannot create AttitudeRecs file.");
+        } catch (IOException e) {
+            Log.e(TAG, "Write AttitudeRecs file failure");
+        } finally {
+            if (w!=null) {
+                try {
+                    w.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    /**
+     *  Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
+     *  statistics: mean and standard deviation.
+     *
+     *  @return The returned value will be a double array with exact 2 items, first [0] will be
+     *  mean and the second [1]  will be the standard deviation.
+     *
+     */
+    private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec)   {
+        double tp = srec.get(0).time;
+        int i;
+        double sum = 0.0;
+        double sumsq = 0.0;
+        for(i=1; i<srec.size(); ++i) {
+            double dt;
+            dt = srec.get(i).time - tp;
+            sum += dt;
+            sumsq += dt*dt;
+            tp += dt;
+        }
+        double [] ret = new double[2];
+        ret[0] = sum/srec.size();
+        ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
+        return ret;
+    }
+
+    /**
+     * Flipping the axis as the image are flipped upside down in OpenGL frames
+     */
+    private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs)   {
+        for (AttitudeRec i: vrecs) {
+            i.yaw = -i.yaw;
+        }
+    }
+
+    /**
+     *  Calculate the maximum error on the specified axis between two time aligned (resampled)
+     *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+     *
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+     * @return Maximum error
+     */
+    private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)  {
+        // check if they are valid and comparable data
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+        // check input parameter validity
+        if (axis<0 || axis > 2) {
+            throw new IllegalArgumentException("Invalid data axis.");
+        }
+
+        int i;
+        double max = 0.0;
+        double diff = 0.0;
+        for(i=0; i<ra.size(); ++i) {
+            // make sure they are aligned data
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+i+
+                        " of two inputs has different time.");
+            }
+            switch(axis) {
+                case 0:
+                    diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
+                    break;
+                case 1:
+                    diff = ra.get(i).pitch - rb.get(i).pitch;
+                    break;
+                case 2:
+                    diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+                            -Math.PI)-Math.PI;
+                    break;
+            }
+            diff = Math.abs(diff);
+            if (diff>max) {
+                max = diff;
+            }
+        }
+        return max;
+    }
+
+    /**
+     *  Calculate the RMS error on the specified axis between two time aligned (resampled)
+     *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
+     *
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
+     * @return Mean square error
+     */
+    private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
+        // check if they are valid and comparable data
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+        // check input parameter validity
+        if (axis<0 || axis > 2) {
+            throw new IllegalArgumentException("Invalid data axis.");
+        }
+
+        int i;
+        double sum = 0.0;
+        double diff = 0.0;
+        for(i=0; i<ra.size(); ++i) {
+            // check input data validity
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+i+
+                        " of two inputs has different time.");
+            }
+
+            switch(axis) {
+                case 0:
+                    diff = ra.get(i).roll - rb.get(i).roll;
+                    break;
+                case 1:
+                    diff = ra.get(i).pitch - rb.get(i).pitch;
+                    break;
+                case 2:
+                    diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
+                            Math.PI)-Math.PI;
+                    break;
+            }
+
+            sum += diff*diff;
+        }
+        return sum/ra.size();
+    }
+
+    /**
+     * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
+     *
+     * @param file File to write to
+     * @param ra  one ArrayList of AttitudeRec
+     * @param rb  the other ArrayList of AttitudeRec
+     */
+    private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
+        if (ra.size() != rb.size()) {
+            throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
+        }
+
+        int i;
+
+        ArrayList<AttitudeRec> rerr = new ArrayList<>();
+        for(i=0; i<ra.size(); ++i) {
+            if (ra.get(i).time != rb.get(i).time) {
+                throw new IllegalArgumentException("Element "+ i
+                        + " of two inputs has different time.");
+            }
+
+            rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
+                    ra.get(i).pitch - rb.get(i).pitch,
+                    (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
+                            -Math.PI)-Math.PI)));
+
+        }
+        dumpAttitudeRecs(file, rerr);
+    }
+
+    /**
+     * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
+     *
+     * @param rec           the ArrayList of AttitudeRec to be sampled
+     * @param timebase      the other ArrayList of AttitudeRec that serves as time base
+     * @param delta_t       offset in time before resample
+     * @param yaw_offset    offset in yaw axis
+     * @param resampled     output ArrayList of AttitudeRec
+     */
+
+    private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
+            double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)    {
+        int i;
+        int j = -1;
+        for(i=0; i<timebase.size(); i++) {
+            double time = timebase.get(i).time + delta_t;
+
+            while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
+
+            if (j == -1) {
+                //use first
+                resampled.get(i).assign(rec.get(0), timebase.get(i).time);
+            } else if (j == rec.size()-1) {
+                // use last
+                resampled.get(i).assign(rec.get(j), timebase.get(i).time);
+            } else {
+                // do linear resample
+                double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
+                double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
+                double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
+                double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
+                resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
+            }
+        }
+    }
+
+    /**
+     * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
+     *
+     * @param recs  output ArrayList of AttitudeRec
+     * @return total number of frame of the video
+     */
+    private int analyzeVideo(ArrayList<AttitudeRec> recs) {
+        VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
+
+        int decimation = 1;
+
+        if (meta.fps > DECIMATION_FPS_TARGET) {
+            decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
+            meta.fps /=decimation;
+        }
+
+        VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
+                new File(mPath, "video.mp4"), decimation); // every 3 frame process 1 frame
+
+
+        Mat frame;
+        Mat gray = new Mat();
+        int i = -1;
+
+        Size frameSize = videoDecoder.getSize();
+
+        if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
+            // this is very unlikely
+            return -1;
+        }
+
+        if (TRACE_VIDEO_ANALYSIS) {
+            Debug.startMethodTracing("cvprocess");
+        }
+
+        Size patternSize = new Size(4,11);
+
+        float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
+        Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
+        MatOfDouble coeff = new MatOfDouble(); // dummy
+
+        MatOfPoint2f centers = new MatOfPoint2f();
+        MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
+        Mat rvec = new MatOfFloat();
+        Mat tvec = new MatOfFloat();
+
+        MatOfPoint2f reprojCenters = new MatOfPoint2f();
+
+        if (LOCAL_LOGV) {
+            Log.v(TAG, "Camera Mat = \n" + camMat.dump());
+        }
+
+        long startTime = System.nanoTime();
+
+        while ((frame = videoDecoder.getFrame()) !=null) {
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "got a frame " + i);
+            }
+
+            // has to be in front, as there are cases where execution
+            // will skip the later part of this while
+            i++;
+
+            // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
+            Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
+
+            boolean foundPattern = Calib3d.findCirclesGridDefault(
+                    gray,  patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
+
+            if (!foundPattern) {
+                // skip to next frame
+                continue;
+            }
+
+            if (OUTPUT_DEBUG_IMAGE) {
+                Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
+            }
+
+            // figure out the extrinsic parameters using real ground truth 3D points and the pixel
+            // position of blobs found in findCircleGrid, an estimated camera matrix and
+            // no-distortion are assumed.
+            boolean foundSolution =
+                    Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
+                            false, Calib3d.CV_ITERATIVE);
+
+            if (!foundSolution) {
+                // skip to next frame
+                continue;
+            }
+
+            // reproject points to for evaluation of result accuracy of solvePnP
+            Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
+
+            // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
+            double error = Core.norm(centers, reprojCenters, Core.NORM_L2);
+
+            if (LOCAL_LOGV) {
+                Log.v(TAG, "Found attitude, re-projection error = " + error);
+            }
+
+            // if error is reasonable, add it into the results
+            if (error < REPROJECTION_THREASHOLD) {
+                double [] rv = new double[3];
+                rvec.get(0,0, rv);
+                recs.add(new AttitudeRec((double) i / meta.fps, rodr2rpy(rv)));
+            }
+
+            if (OUTPUT_DEBUG_IMAGE) {
+                Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
+                Highgui.imwrite(Environment.getExternalStorageDirectory().getPath()
+                        + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
+            }
+        }
+
+        if (LOCAL_LOGV) {
+            Log.v(TAG, "Finished decoding");
+        }
+
+        if (TRACE_VIDEO_ANALYSIS) {
+            Debug.stopMethodTracing();
+        }
+
+        if (LOCAL_LOGV) {
+            // time analysis
+            double totalTime = (System.nanoTime()-startTime)/1e9;
+            Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
+        }
+        return i;
+    }
+
+    /**
+     * OpenCV for Android have not support the VideoCapture from file
+     * This is a make shift solution before it is supported.
+     * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
+     */
+    private class VideoDecoderForOpenCV implements Runnable {
+        private MediaExtractor extractor=null;
+        private MediaCodec decoder=null;
+        private CtsMediaOutputSurface surface=null;
+
+        private MatBuffer mMatBuffer;
+
+        private final File mVideoFile;
+
+        private boolean valid;
+        private Object setupSignal;
+
+        private Thread mThread;
+        private int mDecimation;
+
+        /**
+         * Constructor
+         * @param file video file
+         * @param decimation process every "decimation" number of frame
+         */
+        VideoDecoderForOpenCV(File file, int decimation) {
+            mVideoFile = file;
+            mDecimation = decimation;
+            valid = false;
+
+            start();
+        }
+
+        /**
+         * Constructor
+         * @param file video file
+         */
+        VideoDecoderForOpenCV(File file)   {
+            this(file, 1);
+        }
+
+        /**
+         * Test if video decoder is in valid states ready to output video.
+         * @return true of force.
+         */
+        public boolean isValid() {
+            return valid;
+        }
+
+        private void start() {
+            setupSignal = new Object();
+            mThread = new Thread(this);
+            mThread.start();
+
+            synchronized (setupSignal) {
+                try {
+                    setupSignal.wait();
+                } catch (InterruptedException e) {
+                    Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
+                }
+            }
+        }
+        private void stop() {
+            if (mThread != null) {
+                mThread.interrupt();
+                try {
+                    mThread.join();
+                } catch (InterruptedException e) {
+                    Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
+                }
+                try {
+                    decoder.stop();
+                }catch (IllegalStateException e) {
+                    Log.e(TAG, "Video decoder is not in a state that can be stopped");
+                }
+            }
+            mThread = null;
+        }
+
+        void teardown() {
+            if (decoder!=null) {
+                decoder.release();
+                decoder = null;
+            }
+            if (surface!=null) {
+                surface.release();
+                surface = null;
+            }
+            if (extractor!=null) {
+                extractor.release();
+                extractor = null;
+            }
+        }
+
+        void setup() {
+            int width=0, height=0;
+
+            extractor = new MediaExtractor();
+
+            try {
+                extractor.setDataSource(mVideoFile.getPath());
+            } catch (IOException e) {
+                return;
+            }
+
+            for (int i = 0; i < extractor.getTrackCount(); i++) {
+                MediaFormat format = extractor.getTrackFormat(i);
+                String mime = format.getString(MediaFormat.KEY_MIME);
+                width = format.getInteger(MediaFormat.KEY_WIDTH);
+                height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+                if (mime.startsWith("video/")) {
+                    extractor.selectTrack(i);
+                    try {
+                        decoder = MediaCodec.createDecoderByType(mime);
+                    }catch (IOException e) {
+                        continue;
+                    }
+                    // Decode to surface
+                    //decoder.configure(format, surface, null, 0);
+
+                    // Decode to offscreen surface
+                    surface = new CtsMediaOutputSurface(width, height);
+                    mMatBuffer = new MatBuffer(width, height);
+
+                    decoder.configure(format, surface.getSurface(), null, 0);
+                    break;
+                }
+            }
+
+            if (decoder == null) {
+                Log.e("VideoDecoderForOpenCV", "Can't find video info!");
+                return;
+            }
+            valid = true;
+        }
+
+        @Override
+        public void run() {
+            setup();
+
+            synchronized (setupSignal) {
+                setupSignal.notify();
+            }
+
+            if (!valid) {
+                return;
+            }
+
+            decoder.start();
+
+            ByteBuffer[] inputBuffers = decoder.getInputBuffers();
+            ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
+            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+
+            boolean isEOS = false;
+            long startMs = System.currentTimeMillis();
+            long timeoutUs = 10000;
+
+            int iframe = 0;
+
+            while (!Thread.interrupted()) {
+                if (!isEOS) {
+                    int inIndex = decoder.dequeueInputBuffer(10000);
+                    if (inIndex >= 0) {
+                        ByteBuffer buffer = inputBuffers[inIndex];
+                        int sampleSize = extractor.readSampleData(buffer, 0);
+                        if (sampleSize < 0) {
+                            if (LOCAL_LOGD) {
+                                Log.d("VideoDecoderForOpenCV",
+                                        "InputBuffer BUFFER_FLAG_END_OF_STREAM");
+                            }
+                            decoder.queueInputBuffer(inIndex, 0, 0, 0,
+                                    MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+                            isEOS = true;
+                        } else {
+                            decoder.queueInputBuffer(inIndex, 0, sampleSize,
+                                    extractor.getSampleTime(), 0);
+                            extractor.advance();
+                        }
+                    }
+                }
+
+                int outIndex = decoder.dequeueOutputBuffer(info, 10000);
+                MediaFormat outFormat;
+                switch (outIndex) {
+                    case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED");
+                        }
+                        outputBuffers = decoder.getOutputBuffers();
+                        break;
+                    case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+                        outFormat = decoder.getOutputFormat();
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "New format " + outFormat);
+                        }
+                        break;
+                    case MediaCodec.INFO_TRY_AGAIN_LATER:
+                        if (LOCAL_LOGD) {
+                            Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!");
+                        }
+                        break;
+                    default:
+
+                        ByteBuffer buffer = outputBuffers[outIndex];
+                        boolean doRender = (info.size != 0);
+
+                        // As soon as we call releaseOutputBuffer, the buffer will be forwarded
+                        // to SurfaceTexture to convert to a texture.  The API doesn't
+                        // guarantee that the texture will be available before the call
+                        // returns, so we need to wait for the onFrameAvailable callback to
+                        // fire.  If we don't wait, we risk rendering from the previous frame.
+                        decoder.releaseOutputBuffer(outIndex, doRender);
+
+                        if (doRender) {
+                            surface.awaitNewImage();
+                            surface.drawImage();
+                            if (LOCAL_LOGD) {
+                                Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!");
+                            }
+                            if ((iframe++ % mDecimation) == 0) {
+                                //Send the frame for processing
+                                mMatBuffer.put();
+                            }
+                        }
+                        break;
+                }
+
+                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+                    if (LOCAL_LOGD) {
+                        Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
+                    }
+                    break;
+                }
+            }
+            mMatBuffer.invalidate();
+
+            decoder.stop();
+
+            teardown();
+            mThread = null;
+        }
+
+
+        /**
+         * Get next valid frame
+         * @return Frame in OpenCV mat
+         */
+        public Mat getFrame() {
+            return mMatBuffer.get();
+        }
+
+        /**
+         * Get the size of the frame
+         * @return size of the frame
+         */
+        Size getSize() {
+            return mMatBuffer.getSize();
+        }
+
+        /**
+         * A synchronized buffer
+         */
+        class MatBuffer {
+            private Mat mat;
+            private byte[] bytes;
+            private ByteBuffer buf;
+            private boolean full;
+
+            private int mWidth, mHeight;
+            private boolean mValid = false;
+
+            MatBuffer(int width, int height) {
+                mWidth = width;
+                mHeight = height;
+
+                mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
+                buf = ByteBuffer.allocateDirect(width*height*4);
+                bytes = new byte[width*height*4];
+
+                mValid = true;
+                full = false;
+            }
+
+            public synchronized void invalidate() {
+                mValid = false;
+                notifyAll();
+            }
+
+            public synchronized Mat get() {
+
+                if (!mValid) return null;
+                while (full == false) {
+                    try {
+                        wait();
+                        if (!mValid) return null;
+                    } catch (InterruptedException e) {
+                        return null;
+                    }
+                }
+                mat.put(0,0, bytes);
+                full = false;
+                notifyAll();
+                return mat;
+            }
+            public synchronized void put() {
+                while (full) {
+                    try {
+                        wait();
+                    } catch (InterruptedException e) {
+                        Log.e(TAG, "Interrupted when waiting for space in buffer");
+                    }
+                }
+                GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
+                        GL10.GL_UNSIGNED_BYTE, buf);
+                buf.get(bytes);
+                buf.rewind();
+
+                full = true;
+                notifyAll();
+            }
+
+            public Size getSize() {
+                if (valid) {
+                    return mat.size();
+                }
+                return new Size();
+            }
+        }
+    }
+
+
+    /* a small set of math functions */
+    private static double [] quat2rpy( double [] q) {
+        double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+                Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+                Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+        return rpy;
+    }
+
+    private static void quat2rpy( double [] q, double[] rpy) {
+        rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
+        rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
+        rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
+    }
+
+    private static Mat quat2rpy(Mat quat) {
+        double [] q = new double[4];
+        quat.get(0,0,q);
+
+        double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
+                Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
+                Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
+
+        Mat rpym = new Mat(3,1, CvType.CV_64F);
+        rpym.put(0,0, rpy);
+        return rpym;
+    }
+
+    private static double [] rodr2quat( double [] r) {
+        double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+        double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+                Math.sin(t/2)*r[2]/t};
+        return quat;
+    }
+
+    private static void rodr2quat( double [] r, double [] quat) {
+        double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
+        quat[0] = Math.cos(t/2);
+        quat[1] = Math.sin(t/2)*r[0]/t;
+        quat[2] = Math.sin(t/2)*r[1]/t;
+        quat[3] = Math.sin(t/2)*r[2]/t;
+    }
+
+    private static Mat rodr2quat(Mat rodr) {
+        double t = Core.norm(rodr);
+        double [] r = new double[3];
+        rodr.get(0,0,r);
+
+        double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
+                Math.sin(t/2)*r[2]/t};
+        Mat quatm = new Mat(4,1, CvType.CV_64F);
+        quatm.put(0, 0, quat);
+        return quatm;
+    }
+
+    private static double [] rodr2rpy( double [] r) {
+        return quat2rpy(rodr2quat(r));
+    }
+    //////////////////
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
new file mode 100644
index 0000000..ffb0d85
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/RVCVXCheckTestActivity.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.sensors;
+
+
+import android.hardware.cts.helpers.SensorTestStateNotSupportedException;
+import android.os.Bundle;
+
+import com.android.cts.verifier.sensors.base.SensorCtsVerifierTestActivity;
+import com.android.cts.verifier.sensors.helpers.OpenCVLibrary;
+
+import junit.framework.Assert;
+
+import android.content.Intent;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * This test (Rotation Vector - Computer Vision Cross Check, or RXCVXCheck for short) verifies that
+ * mobile device can detect the orientation of itself in a relatively accurate manner.
+ *
+ * Currently only ROTATION_VECTOR sensor is used.
+ *
+ */
+public class RVCVXCheckTestActivity
+        extends SensorCtsVerifierTestActivity {
+    public RVCVXCheckTestActivity() {
+        super(RVCVXCheckTestActivity.class);
+    }
+
+    CountDownLatch mRecordActivityFinishedSignal = null;
+
+    private static final int REQ_CODE_TXCVRECORD = 0x012345678;
+    private static final boolean TEST_USING_DEBUGGING_DATA = false;
+    private static final String PATH_DEBUGGING_DATA = "/sdcard/RXCVRecData/150313-014443/";
+
+    private String mRecPath;
+
+    RVCVXCheckAnalyzer.AnalyzeReport mReport = null;
+
+    private boolean mRecordSuccessful = false;
+    private boolean mOpenCVLoadSuccessful = false;
+
+
+    /**
+     * The activity setup collects all the required data for test cases.
+     * This approach allows to test all sensors at once.
+     */
+    @Override
+    protected void activitySetUp() throws InterruptedException {
+
+        mRecPath = "";
+
+        showUserMessage("Loading OpenCV Library...");
+        int retry = 10;
+
+        while(retry-->0) {
+            try {
+                Thread.sleep(100);
+            } catch (InterruptedException e) {
+                //
+            }
+            if (OpenCVLibrary.isLoaded()) {
+                break;
+            }
+        }
+        if (!OpenCVLibrary.isLoaded()) {
+            // failed requirement test
+            clearText();
+            return;
+        }
+        showUserMessage("OpenCV Library Successfully Loaded");
+
+        mOpenCVLoadSuccessful = true;
+
+        if (TEST_USING_DEBUGGING_DATA) {
+            mRecPath = PATH_DEBUGGING_DATA;
+
+            // assume the data is there already
+            mRecordSuccessful = true;
+        } else {
+            showUserMessage("Take the test as instructed below:\n" +
+                "1. Print out the test pattern and place it on a "+
+                   "horizontal surface.\n" +
+                "2. Start the test and align the yellow square on the screen "+
+                   "roughly to the yellow sqaure.\n" +
+                "3. Follow the prompt to rotate the phone while keeping the "+
+                   "entire test pattern inside view of camera. This requires " +
+                   "orbiting the phone around and aiming the "+
+                   "camera at the test pattern at the same time.\n" +
+                "4. Wait patiently for the analysis to finish.\n");
+
+            waitForUserToContinue();
+
+            // prepare sync signal
+            mRecordActivityFinishedSignal = new CountDownLatch(1);
+
+            // record both sensor and camera
+            Intent intent = new Intent(this, RVCVRecordActivity.class);
+            startActivityForResult(intent, REQ_CODE_TXCVRECORD);
+
+            // wait for record finish
+            mRecordActivityFinishedSignal.await();
+
+            if ("".equals(mRecPath)) {
+                showUserMessage("Recording failed or exited prematurely.");
+                waitForUserToContinue();
+            } else {
+                showUserMessage("Recording is done!");
+                showUserMessage("Result are in path: " + mRecPath);
+                mRecordSuccessful = true;
+            }
+        }
+
+
+        if (mRecordSuccessful) {
+            showUserMessage("Please wait for the analysis ... \n"+
+                            "It may take a few minutes, you will be noted when "+
+                            "its finished by sound and vibration. ");
+
+            // Analysis of recorded video and sensor data using RVCXAnalyzer
+            RVCVXCheckAnalyzer analyzer = new RVCVXCheckAnalyzer(mRecPath);
+            mReport = analyzer.processDataSet();
+
+            playSound();
+            vibrate(500);
+
+            if (mReport == null) {
+                showUserMessage("Analysis failed due to unknown reason!");
+            } else {
+                if (mReport.error) {
+                    showUserMessage("Analysis failed: " + mReport.reason);
+                } else {
+                    showUserMessage(String.format("Analysis finished!\n" +
+                                    "Roll error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "Pitch error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "Yaw error (Rms, max) = %4.3f, %4.3f rad\n" +
+                                    "N of Frame (valid, total) = %d, %d\n" +
+                                    "Sensor period (mean, stdev) = %4.3f, %4.3f ms\n" +
+                                    "Time offset: %4.3f s \n" +
+                                    "Yaw offset: %4.3f rad \n\n",
+                            mReport.roll_rms_error, mReport.roll_max_error,
+                            mReport.pitch_rms_error, mReport.pitch_max_error,
+                            mReport.yaw_rms_error, mReport.yaw_max_error,
+                            mReport.n_of_valid_frame, mReport.n_of_frame,
+                            mReport.sensor_period_avg * 1000.0, mReport.sensor_period_stdev*1000.0,
+                            mReport.optimal_delta_t, mReport.yaw_offset));
+                    showUserMessage("Please click next after details reviewed.");
+                    waitForUserToContinue();
+                }
+            }
+        }
+        clearText();
+    }
+
+    /**
+    Receiving the results from the RVCVRecordActivity, which is a patch where the recorded
+    video and sensor data is stored.
+    */
+    @Override
+    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+        // Check which request we're responding to
+        if (requestCode == REQ_CODE_TXCVRECORD) {
+            // Make sure the request was successful
+
+            if (resultCode == RESULT_OK) {
+                mRecPath = data.getData().getPath();
+            }
+
+            // notify it is finished
+            mRecordActivityFinishedSignal.countDown();
+        }
+        super.onActivityResult(requestCode, resultCode, data);
+    }
+
+    /**
+     * Test cases.
+     */
+
+    public String test00OpenCV() throws Throwable {
+
+        String message = "OpenCV is loaded";
+        Assert.assertTrue("OpenCV library cannot be loaded.", mOpenCVLoadSuccessful);
+        return message;
+    }
+
+
+    public String test01Recording() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+
+        String message = "Record is successful.";
+        Assert.assertTrue("Record is not successful.", mRecordSuccessful);
+        return message;
+    }
+
+    public String test02Analysis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+
+        String message = "Analysis result: " + mReport.reason;
+        Assert.assertTrue(message, (mReport!=null && !mReport.error));
+        return message;
+    }
+
+    public String test1RollAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Roll Axis Accuracy";
+
+        Assert.assertEquals("Roll RMS error", 0.0, mReport.roll_rms_error, 0.15);
+        Assert.assertEquals("Roll max error", 0.0, mReport.roll_max_error, 0.35);
+        return message;
+    }
+
+    public String test2PitchAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Pitch Axis Accuracy";
+
+        Assert.assertEquals("Pitch RMS error", 0.0, mReport.pitch_rms_error, 0.15);
+        Assert.assertEquals("Pitch max error", 0.0, mReport.pitch_max_error, 0.35);
+        return message;
+    }
+
+    public String test3YawAxis() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Yaw Axis Accuracy";
+
+        Assert.assertEquals("Yaw RMS error", 0.0, mReport.yaw_rms_error, 0.2);
+        Assert.assertEquals("Yaw max error", 0.0, mReport.yaw_max_error, 0.4);
+        return message;
+    }
+
+    public String test4SensorPeriod() throws Throwable {
+
+        loadOpenCVSuccessfulOrSkip();
+        recordSuccessfulOrSkip();
+        analyzeSuccessfulOrSkip();
+
+        String message = "Test Sensor Period";
+
+        Assert.assertEquals("Sensor Period Mean", 5e-3, mReport.sensor_period_avg, 0.2e-3);
+        Assert.assertEquals("Sensor Period Stdev", 0.0, mReport.sensor_period_stdev, 0.5e-3);
+        return message;
+    }
+
+    private void loadOpenCVSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (!mOpenCVLoadSuccessful)
+            throw new SensorTestStateNotSupportedException("Skipped due to OpenCV cannot be loaded");
+    }
+
+    private void recordSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (!mRecordSuccessful)
+            throw new SensorTestStateNotSupportedException("Skipped due to record failure.");
+    }
+
+    private void analyzeSuccessfulOrSkip() throws SensorTestStateNotSupportedException {
+        if (mReport == null || mReport.error)
+            throw new SensorTestStateNotSupportedException("Skipped due to CV Analysis failure.");
+    }
+
+    /*
+     *  This function serves as a proxy as showUserMessage is marked to be deprecated.
+     *  When appendText is removed, this function will have a different implementation.
+     *
+     */
+    void showUserMessage(String s) {
+        appendText(s);
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+
+        super.onCreate(savedInstanceState);
+
+        // GlSurfaceView is not necessary for this test
+        closeGlSurfaceView();
+
+        OpenCVLibrary.loadAsync(this);
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
new file mode 100644
index 0000000..2f5c873
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/sensors/helpers/OpenCVLibrary.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.cts.verifier.sensors.helpers;
+
+import android.content.Context;
+import android.os.Looper;
+import android.util.Log;
+
+import org.opencv.android.BaseLoaderCallback;
+import org.opencv.android.LoaderCallbackInterface;
+import org.opencv.android.OpenCVLoader;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * OpenCV library loader class
+ */
+public class OpenCVLibrary {
+
+    private static String TAG = "OpenCVLibraryProbe";
+    private static boolean mLoaded = false;
+
+    /**
+     * Load OpenCV Library in async mode
+     * @param context Activity context
+     */
+    public static void loadAsync(Context context) {
+        // only need to load once
+        if (isLoaded())  return;
+
+        // Load the library through loader
+        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, context,
+                new BaseLoaderCallback(context) {
+                    @Override
+                    public void onManagerConnected(int status) {
+                        Log.v(TAG, "New Loading status: "+status);
+                        switch (status) {
+                            case LoaderCallbackInterface.SUCCESS: {
+                                mLoaded = true;
+                            }
+                            break;
+                            default: {
+                                super.onManagerConnected(status);
+                            }
+                            break;
+                        }
+                    }
+                });
+    }
+
+    /**
+     * Test if the library is loaded
+     * @return a boolean indicates whether the OpenCV library is loaded.
+     */
+    public static boolean isLoaded() {
+        return mLoaded;
+    }
+}