Add stress test for native media player
- Uses SurfaceTexture / OpenGl to display the rendered contents
- repeat creating / destroying native instances for different resolutions

Change-Id: I5cb6a107513b80ef6645352e7584390bbb544867
diff --git a/tests/tests/mediastress/Android.mk b/tests/tests/mediastress/Android.mk
index 9ed59c5..505d123 100644
--- a/tests/tests/mediastress/Android.mk
+++ b/tests/tests/mediastress/Android.mk
@@ -22,6 +22,8 @@
 
 LOCAL_JAVA_LIBRARIES := android.test.runner
 
+LOCAL_JNI_SHARED_LIBRARIES := libctsmediastress_jni
+
 LOCAL_SRC_FILES := $(call all-java-files-under, src)
 
 LOCAL_PACKAGE_NAME := CtsMediaStressTestCases
@@ -29,3 +31,6 @@
 LOCAL_SDK_VERSION := current
 
 include $(BUILD_CTS_PACKAGE)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
+
diff --git a/tests/tests/mediastress/AndroidManifest.xml b/tests/tests/mediastress/AndroidManifest.xml
index 5a4569c..1ee4104 100644
--- a/tests/tests/mediastress/AndroidManifest.xml
+++ b/tests/tests/mediastress/AndroidManifest.xml
@@ -34,6 +34,8 @@
                 <category android:name="android.intent.category.LAUNCHER"/>
             </intent-filter>
         </activity>
+        <activity android:name="android.mediastress.cts.NativeMediaActivity"
+                  android:label="NativeMedia" />
     </application>
     <instrumentation android:name="android.test.InstrumentationTestRunner"
             android:targetPackage="com.android.cts.mediastress"
diff --git a/tests/tests/mediastress/jni/Android.mk b/tests/tests/mediastress/jni/Android.mk
new file mode 100644
index 0000000..3913b54
--- /dev/null
+++ b/tests/tests/mediastress/jni/Android.mk
@@ -0,0 +1,31 @@
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE    := libctsmediastress_jni
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := native-media-jni.cpp
+
+LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)
+
+LOCAL_CFLAGS += -Isystem/media/wilhelm/include
+
+LOCAL_SHARED_LIBRARIES := libandroid libnativehelper liblog libOpenMAXAL
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/tests/mediastress/jni/native-media-jni.cpp b/tests/tests/mediastress/jni/native-media-jni.cpp
new file mode 100644
index 0000000..8df937f
--- /dev/null
+++ b/tests/tests/mediastress/jni/native-media-jni.cpp
@@ -0,0 +1,284 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Original code copied from NDK Native-media sample code */
+
+
+#include <assert.h>
+#include <jni.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+
+// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
+#include <android/log.h>
+#define TAG "NativeMedia"
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+// for native media
+#include <OMXAL/OpenMAXAL.h>
+#include <OMXAL/OpenMAXAL_Android.h>
+
+// for native window JNI
+#include <android/native_window_jni.h>
+
+// engine interfaces
+static XAObjectItf engineObject = NULL;
+static XAEngineItf engineEngine = NULL;
+
+// output mix interfaces
+static XAObjectItf outputMixObject = NULL;
+
+// streaming media player interfaces
+static XAObjectItf             playerObj = NULL;
+static XAPlayItf               playerPlayItf = NULL;
+static XAAndroidBufferQueueItf playerBQItf = NULL;
+static XAStreamInformationItf  playerStreamInfoItf = NULL;
+static XAVolumeItf             playerVolItf = NULL;
+
+// number of required interfaces for the MediaPlayer creation
+#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
+
+// video sink for the player
+static ANativeWindow* theNativeWindow = NULL;
+
+/**
+ * Macro to clean-up already created stuffs and return JNI_FALSE when cond is not true
+ */
+#define RETURN_ON_ASSERTION_FAILURE(cond, env, clazz)                                   \
+        if (!(cond)) {                                                                  \
+            LOGE("assertion failure at file %s line %d", __FILE__, __LINE__);           \
+            Java_android_mediastress_cts_NativeMediaActivity_shutdown((env), (clazz));  \
+            return JNI_FALSE;                                                           \
+        }
+
+// callback invoked whenever there is new or changed stream information
+static void StreamChangeCallback(XAStreamInformationItf caller,
+        XAuint32 eventId,
+        XAuint32 streamIndex,
+        void * pEventData,
+        void * pContext )
+{
+    LOGV("StreamChangeCallback called for stream %u", streamIndex);
+    // pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
+    assert(NULL == pContext);
+    switch (eventId) {
+      case XA_STREAMCBEVENT_PROPERTYCHANGE: {
+        /** From spec 1.0.1:
+            "This event indicates that stream property change has occurred.
+            The streamIndex parameter identifies the stream with the property change.
+            The pEventData parameter for this event is not used and shall be ignored."
+         */
+        XAresult res;
+        XAuint32 domain;
+        res = (*caller)->QueryStreamType(caller, streamIndex, &domain);
+        assert(XA_RESULT_SUCCESS == res);
+        switch (domain) {
+          case XA_DOMAINTYPE_VIDEO: {
+            XAVideoStreamInformation videoInfo;
+            res = (*caller)->QueryStreamInformation(caller, streamIndex, &videoInfo);
+            assert(XA_RESULT_SUCCESS == res);
+            LOGV("Found video size %u x %u, codec ID=%u, frameRate=%u, bitRate=%u, duration=%u ms",
+                        videoInfo.width, videoInfo.height, videoInfo.codecId, videoInfo.frameRate,
+                        videoInfo.bitRate, videoInfo.duration);
+          } break;
+          default:
+              LOGE("Unexpected domain %u\n", domain);
+            break;
+        }
+      } break;
+      default:
+          LOGE("Unexpected stream event ID %u\n", eventId);
+        break;
+    }
+}
+
+// shut down the native media system
+// force C naming convention for JNI interfaces to avoid registering manually
+extern "C" void Java_android_mediastress_cts_NativeMediaActivity_shutdown(JNIEnv* env,
+        jclass clazz)
+{
+    // destroy streaming media player object, and invalidate all associated interfaces
+    if (playerObj != NULL) {
+        if (playerPlayItf != NULL) {
+            (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_STOPPED);
+        }
+        (*playerObj)->Destroy(playerObj);
+        playerObj = NULL;
+        playerPlayItf = NULL;
+        playerBQItf = NULL;
+        playerStreamInfoItf = NULL;
+        playerVolItf = NULL;
+    }
+
+    // destroy output mix object, and invalidate all associated interfaces
+    if (outputMixObject != NULL) {
+        (*outputMixObject)->Destroy(outputMixObject);
+        outputMixObject = NULL;
+    }
+
+    // destroy engine object, and invalidate all associated interfaces
+    if (engineObject != NULL) {
+        (*engineObject)->Destroy(engineObject);
+        engineObject = NULL;
+        engineEngine = NULL;
+    }
+
+    // make sure we don't leak native windows
+    if (theNativeWindow != NULL) {
+        ANativeWindow_release(theNativeWindow);
+        theNativeWindow = NULL;
+    }
+}
+
+// create the engine and output mix objects
+extern "C" jboolean Java_android_mediastress_cts_NativeMediaActivity_createEngine(JNIEnv* env,
+        jclass clazz)
+{
+    XAresult res;
+
+    // create engine
+    res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // realize the engine
+    res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // get the engine interface, which is needed in order to create other objects
+    res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // create output mix
+    res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // realize the output mix
+    res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    return JNI_TRUE;
+}
+
+
+// create streaming media player
+extern "C" jboolean Java_android_mediastress_cts_NativeMediaActivity_createMediaPlayer(JNIEnv* env,
+        jclass clazz, jstring fileUri)
+{
+    XAresult res;
+
+    // convert Java string to UTF-8
+    const char *utf8 = env->GetStringUTFChars(fileUri, NULL);
+    RETURN_ON_ASSERTION_FAILURE((NULL != utf8), env, clazz);
+
+    XADataLocator_URI uri = {XA_DATALOCATOR_URI, (XAchar*) utf8};
+    XADataFormat_MIME format_mime = {
+            XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS };
+    XADataSource dataSrc = {&uri, &format_mime};
+
+    // configure audio sink
+    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
+    XADataSink audioSnk = { &loc_outmix, NULL };
+
+    // configure image video sink
+    XADataLocator_NativeDisplay loc_nd = {
+            XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
+            // the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
+            (void*)theNativeWindow,              // hWindow
+            // must be NULL
+            NULL                                 // hDisplay
+    };
+    XADataSink imageVideoSink = {&loc_nd, NULL};
+
+    // declare interfaces to use
+    XAboolean     required[NB_MAXAL_INTERFACES]
+                           = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
+    XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
+                           = {XA_IID_PLAY,
+                              XA_IID_ANDROIDBUFFERQUEUESOURCE,
+                              XA_IID_STREAMINFORMATION};
+
+    // create media player
+    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
+            NULL, &audioSnk, &imageVideoSink, NULL, NULL,
+            NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
+            iidArray /*const XAInterfaceID *pInterfaceIds*/,
+            required /*const XAboolean *pInterfaceRequired*/);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // release the Java string and UTF-8
+    env->ReleaseStringUTFChars(fileUri, utf8);
+
+    // realize the player
+    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // get the play interface
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // get the stream information interface (for video size)
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // get the volume interface
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // we want to be notified of the video size once it's found, so we register a callback for that
+    res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
+            StreamChangeCallback, NULL);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // prepare the player
+    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // set the volume
+    res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    // start the playback
+    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+
+    return JNI_TRUE;
+}
+
+
+// set the playing state for the streaming media player
+extern "C" jboolean Java_android_mediastress_cts_NativeMediaActivity_playOrPauseMediaPlayer(
+        JNIEnv* env, jclass clazz, jboolean play)
+{
+    XAresult res;
+
+    // make sure the streaming media player was created
+    RETURN_ON_ASSERTION_FAILURE((NULL != playerPlayItf), env, clazz);
+    // set the player's state
+    res = (*playerPlayItf)->SetPlayState(playerPlayItf, play ?
+        XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED);
+    RETURN_ON_ASSERTION_FAILURE((XA_RESULT_SUCCESS == res), env, clazz);
+    return JNI_TRUE;
+}
+
+// set the surface
+extern "C" jboolean Java_android_mediastress_cts_NativeMediaActivity_setSurface(JNIEnv *env,
+        jclass clazz, jobject surface)
+{
+    // obtain a native window from a Java surface
+    theNativeWindow = ANativeWindow_fromSurface(env, surface);
+    return JNI_TRUE;
+}
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaActivity.java b/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaActivity.java
new file mode 100644
index 0000000..6a34f88
--- /dev/null
+++ b/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaActivity.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Original code copied from NDK Native-media sample code */
+package android.mediastress.cts;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.media.CamcorderProfile;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import junit.framework.Assert;
+
+public class NativeMediaActivity extends Activity implements OnSurfaceChangedListener {
+    public static final String EXTRA_VIDEO_QUALITY = "videoQuality";
+    // should be long enough. time-out can be treated as error
+    public static final long NATIVE_MEDIA_LIFECYCLE_TIMEOUT_MS = 10000;
+    static final String TAG = "NativeMedia";
+    static final String[] MEDIA = {
+        "bbb_short/480x360/mp4_libx264_libfaac/" +
+        "bbb_short.ffmpeg.480x360.mp4.libx264_1000kbps_30fps.libfaac_stereo_192kbps_44100Hz.ts",
+        "bbb_short/720x480/mp4_libx264_libfaac/" +
+        "bbb_short.ffmpeg.720x480.mp4.libx264_1000kbps_30fps.libfaac_stereo_192kbps_44100Hz.ts",
+        "bbb_short/1280x720/mp4_libx264_libfaac/" +
+        "bbb_short.ffmpeg.1280x720.mp4.libx264_1000kbps_30fps.libfaac_stereo_192kbps_44100Hz.ts",
+        "bbb_short/1920x1080/mp4_libx264_libfaac/" +
+        "bbb_short.ffmpeg.1920x1080.mp4.libx264_5000kbps_30fps.libfaac_stereo_192kbps_48000Hz.ts"
+    };
+
+    private SurfaceTextureGLSurfaceView mGLView;
+    private volatile boolean mNativeCreated = false;
+    /** 0 for default (480x360), other value can be CamcorderProfile.QUALITY_480P / 720P / 1080P */
+    private int mVideoQuality = 0;
+    // native media status queued whenever there is a change in life.
+    private final BlockingQueue<Boolean> mNativeWaitQ = new LinkedBlockingQueue<Boolean>();
+
+    @Override
+    public void onCreate(Bundle icicle) {
+        super.onCreate(icicle);
+        mVideoQuality = getIntent().getIntExtra(EXTRA_VIDEO_QUALITY, mVideoQuality);
+        mGLView = new SurfaceTextureGLSurfaceView(this, this);
+        setContentView(mGLView);
+    }
+
+    /**
+     * should be called by GLThread after GlSurface is created.
+     */
+    @Override
+    public void onSurfaceCreated() {
+        runOnUiThread(new Runnable() {
+            @Override
+            public void run() {
+                Log.i(TAG, "onSurfaceCreated create engine");
+                // initialize native media system
+                Assert.assertTrue(createEngine());
+                Assert.assertTrue(setSurfaceForNative());
+                String fileName = getMediaString();
+                Log.i(TAG, "start playing " + fileName);
+                Assert.assertTrue(createMediaPlayer("file:///" + fileName));
+                mNativeCreated = true;
+                mNativeWaitQ.add(mNativeCreated);
+            }
+        });
+    }
+
+    /**
+     * should be called inside main thread
+     */
+    @Override
+    public void onSurfaceDestroyed() {
+        shutdownIfActive();
+    }
+
+    /**
+     * check if native media is alive. If it does not become alive
+     * for more than certain time, assertion fail will happen.
+     * @return the status of native media, true if it is alive, null if timed-out
+     * @throws InterruptedException
+     */
+    public Boolean waitForNativeMediaLifeCycle() throws InterruptedException {
+        return mNativeWaitQ.poll(NATIVE_MEDIA_LIFECYCLE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+    }
+
+    @Override
+    protected void onPause() {
+        //GLSurfaceView destroys surface on pause. so shutdown should be done.
+        shutdownIfActive();
+        mGLView.onPause();
+        super.onPause();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        mGLView.onResume();
+        if (mNativeCreated) {
+            Assert.assertTrue(playOrPauseMediaPlayer(true));
+        }
+    }
+
+    @Override
+    protected void onDestroy() {
+        if (mNativeCreated) {
+            shutdown();
+        }
+        super.onDestroy();
+    }
+
+    private void shutdownIfActive() {
+        if (mNativeCreated) {
+            Log.i(TAG, "shutdownIfActive shutdown");
+            // surface no longer available, so just shutdown
+            shutdown();
+            mNativeCreated = false;
+            mNativeWaitQ.add(mNativeCreated);
+        }
+    }
+
+    private boolean setSurfaceForNative() {
+        SurfaceTexture st = mGLView.getSurfaceTexture();
+        Assert.assertNotNull(st);
+        Surface s = new Surface(st);
+        boolean res = setSurface(s);
+        s.release();
+        return res;
+    }
+
+    private String getMediaString() {
+        int mediaIndex = 0; // default: 480x360
+        switch(mVideoQuality) {
+        case CamcorderProfile.QUALITY_1080P:
+            mediaIndex = 3;
+            break;
+        case CamcorderProfile.QUALITY_720P:
+            mediaIndex = 2;
+            break;
+        case CamcorderProfile.QUALITY_480P:
+            mediaIndex = 1;
+            break;
+        }
+        return WorkDir.getMediaDirString() + MEDIA[mediaIndex];
+    }
+
+    /**
+     * creates OpenMaxAl Engine
+    */
+    public static native boolean createEngine();
+    /**
+     * set surface to render. should be called before creating Media player
+     * @param surface
+     */
+    public static native boolean setSurface(Surface surface);
+
+    public static native boolean createMediaPlayer(String fileUri);
+    public static native boolean playOrPauseMediaPlayer(boolean play);
+    public static native void shutdown();
+
+    /** Load jni on initialization */
+    static {
+         System.loadLibrary("ctsmediastress_jni");
+    }
+
+}
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaTest.java b/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaTest.java
new file mode 100644
index 0000000..f3bbc40
--- /dev/null
+++ b/tests/tests/mediastress/src/android/mediastress/cts/NativeMediaTest.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.mediastress.cts;
+
+import android.app.Instrumentation;
+import android.content.Intent;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder.AudioEncoder;
+import android.media.MediaRecorder.VideoEncoder;
+import android.os.Environment;
+import android.test.ActivityInstrumentationTestCase2;
+
+import junit.framework.Assert;
+
+public class NativeMediaTest extends ActivityInstrumentationTestCase2<NativeMediaActivity> {
+    private static final int NUMBER_PLAY_PAUSE_REPEATITIONS = 10;
+    private static final long PLAY_WAIT_TIME_MS = 4000;
+
+    public NativeMediaTest() {
+        super(NativeMediaActivity.class);
+    }
+
+    public void test1080pPlay() throws InterruptedException {
+        runPlayTest(CamcorderProfile.QUALITY_1080P);
+    }
+
+    public void test720pPlay() throws InterruptedException {
+        runPlayTest(CamcorderProfile.QUALITY_720P);
+    }
+
+    public void test480pPlay() throws InterruptedException {
+        runPlayTest(CamcorderProfile.QUALITY_480P);
+    }
+
+    public void testDefaultPlay() throws InterruptedException {
+        runPlayTest(0);
+    }
+
+    private void runPlayTest(int quality) throws InterruptedException {
+        if (quality != 0) {
+            if (!isResolutionSupported(quality)) {
+                return;
+            }
+        }
+        Intent intent = new Intent();
+        intent.putExtra(NativeMediaActivity.EXTRA_VIDEO_QUALITY,
+                quality);
+        setActivityIntent(intent);
+        final NativeMediaActivity activity = getActivity();
+        final Instrumentation instrumentation = getInstrumentation();
+        waitForNativeMediaLifeCycle(activity, true);
+        Thread.sleep(PLAY_WAIT_TIME_MS); // let it play for some time
+        for (int i = 0; i < NUMBER_PLAY_PAUSE_REPEATITIONS; i++) {
+            instrumentation.callActivityOnPause(activity);
+            instrumentation.waitForIdleSync();
+            waitForNativeMediaLifeCycle(activity, false);
+            instrumentation.callActivityOnResume(activity);
+            waitForNativeMediaLifeCycle(activity, true);
+            Thread.sleep(PLAY_WAIT_TIME_MS); // let it play for some time
+        }
+    }
+
+    /**
+     * wait until life cycle change and checks if the current status is in line with expectation
+     * @param activity
+     * @param expectAlive expected status, true if it should be alive.
+     * @throws InterruptedException
+     */
+    private void waitForNativeMediaLifeCycle(NativeMediaActivity activity, boolean expectAlive)
+            throws InterruptedException {
+        Boolean status = activity.waitForNativeMediaLifeCycle();
+        Assert.assertNotNull(status); // null means time-out
+        Assert.assertEquals(expectAlive, status.booleanValue());
+    }
+
+    private boolean isResolutionSupported(int quality) {
+        Assert.assertEquals(Environment.getExternalStorageState(), Environment.MEDIA_MOUNTED);
+        if (!CamcorderProfile.hasProfile(quality)) {
+            return false;
+        }
+        CamcorderProfile profile = CamcorderProfile.get(quality);
+        if ((profile != null) && (profile.videoCodec == VideoEncoder.H264) &&
+                (profile.audioCodec == AudioEncoder.AAC)) {
+            return true;
+        }
+        return false;
+    }
+}
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/OnSurfaceChangedListener.java b/tests/tests/mediastress/src/android/mediastress/cts/OnSurfaceChangedListener.java
new file mode 100644
index 0000000..f4a2c0d
--- /dev/null
+++ b/tests/tests/mediastress/src/android/mediastress/cts/OnSurfaceChangedListener.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediastress.cts;
+
+interface OnSurfaceChangedListener {
+    void onSurfaceCreated();
+    void onSurfaceDestroyed();
+}
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureGLSurfaceView.java b/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureGLSurfaceView.java
new file mode 100644
index 0000000..6d89652
--- /dev/null
+++ b/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureGLSurfaceView.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Original code copied from NDK Native-media sample code */
+
+package android.mediastress.cts;
+
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+
+public class SurfaceTextureGLSurfaceView extends GLSurfaceView {
+
+    private SurfaceTextureRenderer mRenderer;
+    private final OnSurfaceChangedListener mListener;
+
+    public SurfaceTextureGLSurfaceView(Context context, OnSurfaceChangedListener listener) {
+        this(context, null, listener);
+    }
+
+    public SurfaceTextureGLSurfaceView(Context context, AttributeSet attributeSet,
+            OnSurfaceChangedListener listener) {
+        super(context, attributeSet);
+        mListener = listener;
+        init(listener);
+    }
+
+    private void init(OnSurfaceChangedListener listener) {
+        setEGLContextClientVersion(2);
+        mRenderer = new SurfaceTextureRenderer(listener);
+        setRenderer(mRenderer);
+    }
+
+    public SurfaceTexture getSurfaceTexture() {
+        return mRenderer.getSurfaceTexture();
+    }
+
+    @Override
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        // ugly but renderer has no notification for surfaceDestroyed.
+        if (mListener != null) {
+            // surface destroyed long time ago, but let existing stuffs cleared
+            mListener.onSurfaceDestroyed();
+        }
+        super.surfaceDestroyed(holder);
+    }
+}
+
diff --git a/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureRenderer.java b/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureRenderer.java
new file mode 100644
index 0000000..4c2af8b
--- /dev/null
+++ b/tests/tests/mediastress/src/android/mediastress/cts/SurfaceTextureRenderer.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Original code copied from NDK Native-media sample code */
+package android.mediastress.cts;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.Matrix;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import junit.framework.Assert;
+
+class SurfaceTextureRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+    private static final String TAG = "SurfaceTextureRenderer";
+    // Magic key
+    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+    private static final int FLOAT_SIZE_BYTES = 4;
+    private static final int VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+    private static final int VERTICES_DATA_POS_OFFSET = 0;
+    private static final int VERTICES_DATA_UV_OFFSET = 3;
+    private final float[] mVerticesData = {
+        // X, Y, Z, U, V
+        -1.0f, -1.0f, 0, 0.f, 0.f,
+        1.0f, -1.0f, 0, 1.f, 0.f,
+        -1.0f,  1.0f, 0, 0.f, 1.f,
+        1.0f,   1.0f, 0, 1.f, 1.f,
+    };
+
+    private FloatBuffer mVertices;
+
+    private final String mVertexShader =
+        "uniform mat4 uMVPMatrix;\n" +
+        "uniform mat4 uSTMatrix;\n" +
+        "attribute vec4 aPosition;\n" +
+        "attribute vec4 aTextureCoord;\n" +
+        "varying vec2 vTextureCoord;\n" +
+        "void main() {\n" +
+        "  gl_Position = uMVPMatrix * aPosition;\n" +
+        "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+        "}\n";
+
+    private final String mFragmentShader =
+        "#extension GL_OES_EGL_image_external : require\n" +
+        "precision mediump float;\n" +
+        "varying vec2 vTextureCoord;\n" +
+        "uniform samplerExternalOES sTexture;\n" +
+        "void main() {\n" +
+        "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+        "}\n";
+
+    private float[] mMVPMatrix = new float[16];
+    private float[] mProjMatrix = new float[16];
+    private float[] mMMatrix = new float[16];
+    private float[] mVMatrix = new float[16];
+    private float[] mSTMatrix = new float[16];
+
+    private int mProgram;
+    private int mTextureID;
+    private int muMVPMatrixHandle;
+    private int muSTMatrixHandle;
+    private int maPositionHandle;
+    private int maTextureHandle;
+
+    private float mRatio = 1.0f;
+    private SurfaceTexture mSurface = null;
+    private boolean updateSurface = false;
+
+    private final OnSurfaceChangedListener mListener;
+
+    public SurfaceTextureRenderer(OnSurfaceChangedListener listener) {
+        mListener = listener;
+        mVertices = ByteBuffer.allocateDirect(mVerticesData.length
+                * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
+        mVertices.put(mVerticesData).position(0);
+
+        Matrix.setIdentityM(mSTMatrix, 0);
+        Matrix.setIdentityM(mMMatrix, 0);
+        Matrix.rotateM(mMMatrix, 0, 20, 0, 1, 0);
+    }
+
+    public void onDrawFrame(GL10 glUnused) {
+        synchronized(this) {
+            if (updateSurface) {
+                mSurface.updateTexImage();
+
+                mSurface.getTransformMatrix(mSTMatrix);
+                updateSurface = false;
+            }
+        }
+
+        // Ignore the passed-in GL10 interface, and use the GLES20
+        // class's static methods instead.
+        GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+        GLES20.glUseProgram(mProgram);
+        checkGlError("glUseProgram");
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+        mVertices.position(VERTICES_DATA_POS_OFFSET);
+        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+                VERTICES_DATA_STRIDE_BYTES, mVertices);
+        checkGlError("glVertexAttribPointer maPosition");
+        GLES20.glEnableVertexAttribArray(maPositionHandle);
+        checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+        mVertices.position(VERTICES_DATA_UV_OFFSET);
+        GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
+                VERTICES_DATA_STRIDE_BYTES, mVertices);
+        checkGlError("glVertexAttribPointer maTextureHandle");
+        GLES20.glEnableVertexAttribArray(maTextureHandle);
+        checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+        Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
+        Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
+
+        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+        checkGlError("glDrawArrays");
+    }
+
+    public void onSurfaceChanged(GL10 glUnused, int width, int height) {
+        // Ignore the passed-in GL10 interface, and use the GLES20
+        // class's static methods instead.
+        GLES20.glViewport(0, 0, width, height);
+        mRatio = (float) width / height;
+        Matrix.frustumM(mProjMatrix, 0, -mRatio, mRatio, -1, 1, 3, 7);
+    }
+
+    public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
+        String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS);
+        Log.i(TAG, "Gl extensions: " + extensions);
+        Assert.assertTrue(extensions.contains("OES_EGL_image_external"));
+
+        /* Set up alpha blending and an Android background color */
+        GLES20.glEnable(GLES20.GL_BLEND);
+        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+        GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
+
+        /* Set up shaders and handles to their variables */
+        mProgram = createProgram(mVertexShader, mFragmentShader);
+        if (mProgram == 0) {
+            return;
+        }
+        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+        checkGlError("glGetAttribLocation aPosition");
+        if (maPositionHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aPosition");
+        }
+        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+        checkGlError("glGetAttribLocation aTextureCoord");
+        if (maTextureHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aTextureCoord");
+        }
+
+        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+        checkGlError("glGetUniformLocation uMVPMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+        }
+
+        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+        checkGlError("glGetUniformLocation uSTMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uSTMatrix");
+        }
+
+        checkGlError("glGetUniformLocation uCRatio");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uCRatio");
+        }
+
+        /*
+         * Create our texture. This has to be done each time the
+         * surface is created.
+         */
+
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+
+        mTextureID = textures[0];
+        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+        checkGlError("glBindTexture mTextureID");
+
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+                GLES20.GL_NEAREST);
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+                GLES20.GL_LINEAR);
+        // Clamp to edge is the only option
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+                GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+                GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameteri mTextureID");
+
+        if (mSurface != null) {
+            mSurface.release();
+        }
+        mSurface = new SurfaceTexture(mTextureID);
+        mSurface.setOnFrameAvailableListener(this);
+
+        Matrix.setLookAtM(mVMatrix, 0, 0, 0, 4f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
+
+        synchronized(this) {
+            updateSurface = false;
+        }
+        if (mListener != null) {
+            mListener.onSurfaceCreated();
+        }
+    }
+
+    synchronized public void onFrameAvailable(SurfaceTexture surface) {
+        /* For simplicity, SurfaceTexture calls here when it has new
+         * data available.  Call may come in from some random thread,
+         * so let's be safe and use synchronize. No OpenGL calls can be done here.
+         */
+        updateSurface = true;
+    }
+
+    private int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        if (shader != 0) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                Log.e(TAG, "Could not compile shader " + shaderType + ":");
+                Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+        return shader;
+    }
+
+    private int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            return 0;
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            return 0;
+        }
+
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertexShader);
+            checkGlError("glAttachShader");
+            GLES20.glAttachShader(program, pixelShader);
+            checkGlError("glAttachShader");
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                Log.e(TAG, "Could not link program: ");
+                Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+                GLES20.glDeleteProgram(program);
+                program = 0;
+            }
+        }
+        return program;
+    }
+
+    private void checkGlError(String op) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            Log.e(TAG, op + ": glError " + error);
+            throw new RuntimeException(op + ": glError " + error);
+        }
+    }
+
+    public SurfaceTexture getSurfaceTexture() {
+        return mSurface;
+    }
+}