Merge "IME interferes with print UI tests" into mnc-dev
diff --git a/tests/tests/hardware/src/android/hardware/cts/helpers/sensorverification/EventOrderingVerificationTest.java b/tests/tests/hardware/src/android/hardware/cts/helpers/sensorverification/EventOrderingVerificationTest.java
index b9848fa..f1dc229c8 100644
--- a/tests/tests/hardware/src/android/hardware/cts/helpers/sensorverification/EventOrderingVerificationTest.java
+++ b/tests/tests/hardware/src/android/hardware/cts/helpers/sensorverification/EventOrderingVerificationTest.java
@@ -41,16 +41,6 @@
     }
 
     /**
-     * Test that the verification passes when the timestamps are the same.
-     */
-    public void testSameTimestamp() {
-        SensorStats stats = new SensorStats();
-        EventOrderingVerification verification = getVerification(0, 0, 0, 0, 0);
-        verification.verify(stats);
-        verifyStats(stats, true, 0);
-    }
-
-    /**
      * Test that the verification passes when the timestamps are increasing.
      */
     public void testSequentialTimestamp() {
diff --git a/tests/tests/media/Android.mk b/tests/tests/media/Android.mk
index 43e3e89..13daca6 100644
--- a/tests/tests/media/Android.mk
+++ b/tests/tests/media/Android.mk
@@ -41,7 +41,7 @@
 LOCAL_STATIC_JAVA_LIBRARIES := \
     ctsmediautil ctsdeviceutil ctstestserver ctstestrunner
 
-LOCAL_JNI_SHARED_LIBRARIES := libctsmediacodec_jni
+LOCAL_JNI_SHARED_LIBRARIES := libctsmediacodec_jni libaudio_jni
 
 LOCAL_SRC_FILES := $(call all-java-files-under, src)
 
diff --git a/tests/tests/media/libaudiojni/Android.mk b/tests/tests/media/libaudiojni/Android.mk
new file mode 100644
index 0000000..a6c1bfc
--- /dev/null
+++ b/tests/tests/media/libaudiojni/Android.mk
@@ -0,0 +1,39 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE    := libaudio_jni
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := \
+	appendix-b-1-1-buffer-queue.cpp \
+	appendix-b-1-2-recording.cpp \
+	audio-record-native.cpp \
+	audio-track-native.cpp \
+	sl-utils.cpp
+
+LOCAL_C_INCLUDES := \
+	$(JNI_H_INCLUDE) \
+	system/core/include
+
+LOCAL_C_INCLUDES += $(call include-path-for, libaudiojni) \
+	$(call include-path-for, wilhelm)
+
+LOCAL_SHARED_LIBRARIES := libandroid liblog libnativehelper libOpenSLES libutils
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/tests/media/libaudiojni/Blob.h b/tests/tests/media/libaudiojni/Blob.h
new file mode 100644
index 0000000..134232c
--- /dev/null
+++ b/tests/tests/media/libaudiojni/Blob.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_BLOB_H
+#define ANDROID_BLOB_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+
+namespace android {
+
+// read only byte buffer like object
+
+class BlobReadOnly {
+public:
+    BlobReadOnly(const void *data, size_t size, bool byReference) :
+        mMem(byReference ? NULL : malloc(size)),
+        mData(byReference ? data : mMem),
+        mSize(size) {
+        if (!byReference) {
+            memcpy(mMem, data, size);
+        }
+    }
+    ~BlobReadOnly() {
+        free(mMem);
+    }
+
+private:
+          void * const mMem;
+
+public:
+    const void * const mData;
+          const size_t mSize;
+};
+
+// read/write byte buffer like object
+
+class Blob {
+public:
+    Blob(size_t size) :
+        mData(malloc(size)),
+        mOffset(0),
+        mSize(size),
+        mMem(mData) { }
+
+    // by reference
+    Blob(void *data, size_t size) :
+        mData(data),
+        mOffset(0),
+        mSize(size),
+        mMem(NULL) { }
+
+    ~Blob() {
+        free(mMem);
+    }
+
+    void * const mData;
+          size_t mOffset;
+    const size_t mSize;
+
+private:
+    void * const mMem;
+};
+
+} // namespace android
+
+#endif // ANDROID_BLOB_H
diff --git a/tests/tests/media/libaudiojni/Gate.h b/tests/tests/media/libaudiojni/Gate.h
new file mode 100644
index 0000000..dfc15b7
--- /dev/null
+++ b/tests/tests/media/libaudiojni/Gate.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_GATE_H
+#define ANDROID_GATE_H
+
+#include <stdint.h>
+#include <mutex>
+
+namespace android {
+
+// Gate is a synchronization object.
+//
+// Threads will pass if it is open.
+// Threads will block (wait) if it is closed.
+//
+// When a gate is opened, all waiting threads will pass through.
+//
+// Since gate holds no external locks, consistency with external
+// state needs to be handled elsewhere.
+//
+// We use mWaitCount to indicate the number of threads that have
+// arrived at the gate via wait().  Each thread entering
+// wait obtains a unique waitId (which is the current mWaitCount).
+// This can be viewed as a sequence number.
+//
+// We use mPassCount to indicate the number of threads that have
+// passed the gate.  If the waitId is less than or equal to the mPassCount
+// then that thread has passed the gate.  An open gate sets mPassedCount
+// to the current mWaitCount, allowing all prior threads to pass.
+//
+// See sync_timeline, sync_pt, etc. for graphics.
+
+class Gate {
+public:
+    Gate(bool open = false) :
+        mOpen(open),
+        mExit(false),
+        mWaitCount(0),
+        mPassCount(0)
+    { }
+
+    // waits for the gate to open, returns immediately if gate is already open.
+    //
+    // Do not hold a monitor lock while calling this.
+    //
+    // returns true if we passed the gate normally
+    //         false if gate is terminated and we didn't pass the gate.
+    bool wait() {
+        std::unique_lock<std::mutex> l(mLock);
+        size_t waitId = ++mWaitCount;
+        if (mOpen) {
+            mPassCount = waitId; // let me through
+        }
+        while (!passedGate_l(waitId) && !mExit) {
+            mCondition.wait(l);
+        }
+        return passedGate_l(waitId);
+    }
+
+    // close the gate.
+    void closeGate() {
+        std::lock_guard<std::mutex> l(mLock);
+        mOpen = false;
+        mExit = false;
+    }
+
+    // open the gate.
+    // signal to all waiters it is okay to go.
+    void openGate() {
+        std::lock_guard<std::mutex> l(mLock);
+        mOpen = true;
+        mExit = false;
+        if (waiters_l() > 0) {
+            mPassCount = mWaitCount;  // allow waiting threads to go through
+            // unoptimized pthreads will wake thread to find we still hold lock.
+            mCondition.notify_all();
+        }
+    }
+
+    // terminate (term has expired).
+    // all threads allowed to pass regardless of whether the gate is open or closed.
+    void terminate() {
+        std::lock_guard<std::mutex> l(mLock);
+        mExit = true;
+        if (waiters_l() > 0) {
+            // unoptimized pthreads will wake thread to find we still hold lock.
+            mCondition.notify_all();
+        }
+    }
+
+    bool isOpen() {
+        std::lock_guard<std::mutex> l(mLock);
+        return mOpen;
+    }
+
+    // return how many waiters are at the gate.
+    size_t waiters() {
+        std::lock_guard<std::mutex> l(mLock);
+        return waiters_l();
+    }
+
+private:
+    bool                    mOpen;
+    bool                    mExit;
+    size_t                  mWaitCount;  // total number of threads that have called wait()
+    size_t                  mPassCount;  // total number of threads passed the gate.
+    std::condition_variable mCondition;
+    std::mutex              mLock;
+
+    // return how many waiters are at the gate.
+    inline size_t waiters_l() {
+        return mWaitCount - mPassCount;
+    }
+
+    // return whether the waitId (from mWaitCount) has passed through the gate
+    inline bool passedGate_l(size_t waitId) {
+        return (ssize_t)(waitId - mPassCount) <= 0;
+    }
+};
+
+} // namespace android
+
+#endif // ANDROID_GATE_H
diff --git a/tests/tests/media/libaudiojni/appendix-b-1-1-buffer-queue.cpp b/tests/tests/media/libaudiojni/appendix-b-1-1-buffer-queue.cpp
new file mode 100644
index 0000000..5bb88a7
--- /dev/null
+++ b/tests/tests/media/libaudiojni/appendix-b-1-1-buffer-queue.cpp
@@ -0,0 +1,250 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OpenSL-ES-Test-B-1-1-Buffer-Queue"
+
+#include "sl-utils.h"
+
+/*
+ * See https://www.khronos.org/registry/sles/specs/OpenSL_ES_Specification_1.0.1.pdf
+ * Appendix B.1.1 sample code.
+ *
+ * Minor edits made to conform to Android coding style.
+ *
+ * Correction to code: SL_IID_VOLUME is now made optional for the mixer.
+ * It isn't supported on the standard Android mixer, but it is supported on the player.
+ */
+
+#define MAX_NUMBER_INTERFACES 3
+
+/* Local storage for Audio data in 16 bit words */
+#define AUDIO_DATA_STORAGE_SIZE 4096
+
+#define AUDIO_DATA_SEGMENTS 8
+
+/* Audio data buffer size in 16 bit words. 8 data segments are used in
+   this simple example */
+#define AUDIO_DATA_BUFFER_SIZE (AUDIO_DATA_STORAGE_SIZE / AUDIO_DATA_SEGMENTS)
+
+/* Structure for passing information to callback function */
+typedef struct  {
+    SLPlayItf playItf;
+    SLint16  *pDataBase; // Base address of local audio data storage
+    SLint16  *pData;     // Current address of local audio data storage
+    SLuint32  size;
+} CallbackCntxt;
+
+/* Local storage for Audio data */
+static SLint16 pcmData[AUDIO_DATA_STORAGE_SIZE];
+
+/* Callback for Buffer Queue events */
+static void BufferQueueCallback(
+        SLBufferQueueItf queueItf,
+        void *pContext)
+{
+    SLresult res;
+    CallbackCntxt *pCntxt = (CallbackCntxt*)pContext;
+    if (pCntxt->pData < (pCntxt->pDataBase + pCntxt->size)) {
+        res = (*queueItf)->Enqueue(queueItf, (void *)pCntxt->pData,
+                sizeof(SLint16) * AUDIO_DATA_BUFFER_SIZE); /* Size given in bytes. */
+        ALOGE_IF(res != SL_RESULT_SUCCESS, "error: %s", android::getSLErrStr(res));
+        /* Increase data pointer by buffer size */
+        pCntxt->pData += AUDIO_DATA_BUFFER_SIZE;
+    }
+}
+
+/* Play some music from a buffer queue */
+static void TestPlayMusicBufferQueue(SLObjectItf sl)
+{
+    SLEngineItf EngineItf;
+
+    SLresult res;
+
+    SLDataSource audioSource;
+    SLDataLocator_BufferQueue bufferQueue;
+    SLDataFormat_PCM pcm;
+
+    SLDataSink audioSink;
+    SLDataLocator_OutputMix locator_outputmix;
+
+    SLObjectItf player;
+    SLPlayItf playItf;
+    SLBufferQueueItf bufferQueueItf;
+    SLBufferQueueState state;
+
+    SLObjectItf OutputMix;
+    SLVolumeItf volumeItf;
+
+    int i;
+
+    SLboolean required[MAX_NUMBER_INTERFACES];
+    SLInterfaceID iidArray[MAX_NUMBER_INTERFACES];
+
+    /* Callback context for the buffer queue callback function */
+    CallbackCntxt cntxt;
+
+    /* Get the SL Engine Interface which is implicit */
+    res = (*sl)->GetInterface(sl, SL_IID_ENGINE, (void *)&EngineItf);
+    CheckErr(res);
+
+    /* Initialize arrays required[] and iidArray[] */
+    for (i = 0; i < MAX_NUMBER_INTERFACES; i++) {
+        required[i] = SL_BOOLEAN_FALSE;
+        iidArray[i] = SL_IID_NULL;
+    }
+
+    // Set arrays required[] and iidArray[] for VOLUME interface
+    required[0] = SL_BOOLEAN_FALSE; // ANDROID: we don't require this interface
+    iidArray[0] = SL_IID_VOLUME;
+
+#if 0
+    const unsigned interfaces = 1;
+#else
+
+    /* FIXME: Android doesn't properly support optional interfaces (required == false).
+    [3.1.6] When an application requests explicit interfaces during object creation,
+    it can flag any interface as required. If an implementation is unable to satisfy
+    the request for an interface that is not flagged as required (i.e. it is not required),
+    this will not cause the object to fail creation. On the other hand, if the interface
+    is flagged as required and the implementation is unable to satisfy the request
+    for the interface, the object will not be created.
+    */
+    const unsigned interfaces = 0;
+#endif
+    // Create Output Mix object to be used by player
+    res = (*EngineItf)->CreateOutputMix(EngineItf, &OutputMix, interfaces,
+            iidArray, required);
+    CheckErr(res);
+
+    // Realizing the Output Mix object in synchronous mode.
+    res = (*OutputMix)->Realize(OutputMix, SL_BOOLEAN_FALSE);
+    CheckErr(res);
+
+    volumeItf = NULL; // ANDROID: Volume interface on mix object may not be supported
+    res = (*OutputMix)->GetInterface(OutputMix, SL_IID_VOLUME,
+            (void *)&volumeItf);
+
+    /* Setup the data source structure for the buffer queue */
+    bufferQueue.locatorType = SL_DATALOCATOR_BUFFERQUEUE;
+    bufferQueue.numBuffers = 4; /* Four buffers in our buffer queue */
+
+    /* Setup the format of the content in the buffer queue */
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = 2;
+    pcm.samplesPerSec = SL_SAMPLINGRATE_44_1;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = 16;
+    pcm.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audioSource.pFormat = (void *)&pcm;
+    audioSource.pLocator = (void *)&bufferQueue;
+
+    /* Setup the data sink structure */
+    locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+    locator_outputmix.outputMix = OutputMix;
+    audioSink.pLocator = (void *)&locator_outputmix;
+    audioSink.pFormat = NULL;
+
+    /* Initialize the context for Buffer queue callbacks */
+    cntxt.pDataBase = pcmData;
+    cntxt.pData = cntxt.pDataBase;
+    cntxt.size = sizeof(pcmData) / sizeof(pcmData[0]); // ANDROID: Bug
+
+    /* Set arrays required[] and iidArray[] for SEEK interface
+       (PlayItf is implicit) */
+    required[0] = SL_BOOLEAN_TRUE;
+    iidArray[0] = SL_IID_BUFFERQUEUE;
+
+    /* Create the music player */
+
+    res = (*EngineItf)->CreateAudioPlayer(EngineItf, &player,
+            &audioSource, &audioSink, 1, iidArray, required);
+    CheckErr(res);
+
+    /* Realizing the player in synchronous mode. */
+    res = (*player)->Realize(player, SL_BOOLEAN_FALSE);
+    CheckErr(res);
+
+    /* Get seek and play interfaces */
+    res = (*player)->GetInterface(player, SL_IID_PLAY, (void *)&playItf);
+    CheckErr(res);
+    res = (*player)->GetInterface(player, SL_IID_BUFFERQUEUE,
+            (void *)&bufferQueueItf);
+    CheckErr(res);
+
+    /* Setup to receive buffer queue event callbacks */
+    res = (*bufferQueueItf)->RegisterCallback(bufferQueueItf,
+            BufferQueueCallback, &cntxt /* BUG, was NULL */);
+    CheckErr(res);
+
+    /* Before we start set volume to -3dB (-300mB) */
+    if (volumeItf != NULL) { // ANDROID: Volume interface may not be supported.
+        res = (*volumeItf)->SetVolumeLevel(volumeItf, -300);
+        CheckErr(res);
+    }
+
+    /* Enqueue a few buffers to get the ball rolling */
+    res = (*bufferQueueItf)->Enqueue(bufferQueueItf, cntxt.pData,
+            sizeof(SLint16) * AUDIO_DATA_BUFFER_SIZE); /* Size given in bytes. */
+    CheckErr(res);
+    cntxt.pData += AUDIO_DATA_BUFFER_SIZE;
+    res = (*bufferQueueItf)->Enqueue(bufferQueueItf, cntxt.pData,
+            sizeof(SLint16) * AUDIO_DATA_BUFFER_SIZE); /* Size given in bytes. */
+    CheckErr(res);
+    cntxt.pData += AUDIO_DATA_BUFFER_SIZE;
+    res = (*bufferQueueItf)->Enqueue(bufferQueueItf, cntxt.pData,
+            sizeof(SLint16) * AUDIO_DATA_BUFFER_SIZE); /* Size given in bytes. */
+    CheckErr(res);
+    cntxt.pData += AUDIO_DATA_BUFFER_SIZE;
+
+    /* Play the PCM samples using a buffer queue */
+    res = (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_PLAYING);
+    CheckErr(res);
+
+    /* Wait until the PCM data is done playing, the buffer queue callback
+       will continue to queue buffers until the entire PCM data has been
+       played. This is indicated by waiting for the count member of the
+       SLBufferQueueState to go to zero.
+     */
+    res = (*bufferQueueItf)->GetState(bufferQueueItf, &state);
+    CheckErr(res);
+
+    while (state.count) {
+        usleep(5 * 1000 /* usec */); // ANDROID: avoid busy waiting
+        (*bufferQueueItf)->GetState(bufferQueueItf, &state);
+    }
+
+    /* Make sure player is stopped */
+    res = (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_STOPPED);
+    CheckErr(res);
+
+    /* Destroy the player */
+    (*player)->Destroy(player);
+
+    /* Destroy Output Mix object */
+    (*OutputMix)->Destroy(OutputMix);
+}
+
+extern "C" void Java_android_media_cts_AudioNativeTest_nativeAppendixBBufferQueue(
+        JNIEnv * /* env */, jclass /* clazz */)
+{
+    SLObjectItf engineObject = android::OpenSLEngine();
+    LOG_ALWAYS_FATAL_IF(engineObject == NULL, "cannot open OpenSL ES engine");
+
+    TestPlayMusicBufferQueue(engineObject);
+    android::CloseSLEngine(engineObject);
+}
diff --git a/tests/tests/media/libaudiojni/appendix-b-1-2-recording.cpp b/tests/tests/media/libaudiojni/appendix-b-1-2-recording.cpp
new file mode 100644
index 0000000..5f6f3aa
--- /dev/null
+++ b/tests/tests/media/libaudiojni/appendix-b-1-2-recording.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OpenSL-ES-Test-B-1-2-Recording"
+
+#include "sl-utils.h"
+
+/*
+ * See https://www.khronos.org/registry/sles/specs/OpenSL_ES_Specification_1.0.1.pdf
+ * Appendix B.1.2 sample code.
+ *
+ * Minor edits made to conform to Android coding style.
+ *
+ * Correction to code: SL_IID_AUDIOIODEVICECAPABILITIES is not supported.
+ * Detection of microphone should be made in Java layer.
+ */
+
+#define MAX_NUMBER_INTERFACES 5
+#define MAX_NUMBER_INPUT_DEVICES 3
+#define POSITION_UPDATE_PERIOD 1000 /* 1 sec */
+
+static void RecordEventCallback(SLRecordItf caller __unused,
+        void *pContext __unused,
+        SLuint32 recordevent __unused)
+{
+    /* Callback code goes here */
+}
+
+/*
+ * Test recording of audio from a microphone into a specified file
+ */
+static void TestAudioRecording(SLObjectItf sl)
+{
+    SLObjectItf recorder;
+    SLRecordItf recordItf;
+    SLEngineItf EngineItf;
+    SLAudioIODeviceCapabilitiesItf AudioIODeviceCapabilitiesItf;
+    SLAudioInputDescriptor AudioInputDescriptor;
+    SLresult res;
+
+    SLDataSource audioSource;
+    SLDataLocator_IODevice locator_mic;
+    SLDeviceVolumeItf devicevolumeItf;
+    SLDataSink audioSink;
+    SLDataLocator_URI uri;
+    SLDataFormat_MIME mime;
+
+    int i;
+    SLboolean required[MAX_NUMBER_INTERFACES];
+    SLInterfaceID iidArray[MAX_NUMBER_INTERFACES];
+
+    SLuint32 InputDeviceIDs[MAX_NUMBER_INPUT_DEVICES];
+    SLint32 numInputs = 0;
+    SLboolean mic_available = SL_BOOLEAN_FALSE;
+    SLuint32 mic_deviceID = 0;
+
+    /* Get the SL Engine Interface which is implicit */
+    res = (*sl)->GetInterface(sl, SL_IID_ENGINE, (void *)&EngineItf);
+    CheckErr(res);
+
+    AudioIODeviceCapabilitiesItf = NULL;
+    /* Get the Audio IO DEVICE CAPABILITIES interface, which is also
+       implicit */
+    res = (*sl)->GetInterface(sl, SL_IID_AUDIOIODEVICECAPABILITIES,
+            (void *)&AudioIODeviceCapabilitiesItf);
+    // ANDROID: obtaining SL_IID_AUDIOIODEVICECAPABILITIES may fail
+    if (AudioIODeviceCapabilitiesItf != NULL ) {
+        numInputs = MAX_NUMBER_INPUT_DEVICES;
+        res = (*AudioIODeviceCapabilitiesItf)->GetAvailableAudioInputs(
+                AudioIODeviceCapabilitiesItf, &numInputs, InputDeviceIDs);
+        CheckErr(res);
+        /* Search for either earpiece microphone or headset microphone input
+           device - with a preference for the latter */
+        for (i = 0; i < numInputs; i++) {
+            res = (*AudioIODeviceCapabilitiesItf)->QueryAudioInputCapabilities(
+                    AudioIODeviceCapabilitiesItf, InputDeviceIDs[i], &AudioInputDescriptor);
+            CheckErr(res);
+            if ((AudioInputDescriptor.deviceConnection == SL_DEVCONNECTION_ATTACHED_WIRED)
+                    && (AudioInputDescriptor.deviceScope == SL_DEVSCOPE_USER)
+                    && (AudioInputDescriptor.deviceLocation == SL_DEVLOCATION_HEADSET)) {
+                mic_deviceID = InputDeviceIDs[i];
+                mic_available = SL_BOOLEAN_TRUE;
+                break;
+            }
+            else if ((AudioInputDescriptor.deviceConnection == SL_DEVCONNECTION_INTEGRATED)
+                    && (AudioInputDescriptor.deviceScope == SL_DEVSCOPE_USER)
+                    && (AudioInputDescriptor.deviceLocation == SL_DEVLOCATION_HANDSET)) {
+                mic_deviceID = InputDeviceIDs[i];
+                mic_available = SL_BOOLEAN_TRUE;
+                break;
+            }
+        }
+    } else {
+        mic_deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
+        mic_available = true;
+    }
+
+    /* If neither of the preferred input audio devices is available, no
+       point in continuing */
+    if (!mic_available) {
+        /* Appropriate error message here */
+        ALOGW("No microphone available");
+        return;
+    }
+
+    /* Initialize arrays required[] and iidArray[] */
+    for (i = 0; i < MAX_NUMBER_INTERFACES; i++) {
+        required[i] = SL_BOOLEAN_FALSE;
+        iidArray[i] = SL_IID_NULL;
+    }
+
+    // ANDROID: the following may fail for volume
+    devicevolumeItf = NULL;
+    /* Get the optional DEVICE VOLUME interface from the engine */
+    res = (*sl)->GetInterface(sl, SL_IID_DEVICEVOLUME,
+            (void *)&devicevolumeItf);
+
+    /* Set recording volume of the microphone to -3 dB */
+    if (devicevolumeItf != NULL) { // ANDROID: Volume may not be supported
+        res = (*devicevolumeItf)->SetVolume(devicevolumeItf, mic_deviceID, -300);
+        CheckErr(res);
+    }
+
+    /* Setup the data source structure */
+    locator_mic.locatorType = SL_DATALOCATOR_IODEVICE;
+    locator_mic.deviceType = SL_IODEVICE_AUDIOINPUT;
+    locator_mic.deviceID = mic_deviceID;
+    locator_mic.device= NULL;
+
+    audioSource.pLocator = (void *)&locator_mic;
+    audioSource.pFormat = NULL;
+
+#if 0
+    /* Setup the data sink structure */
+    uri.locatorType = SL_DATALOCATOR_URI;
+    uri.URI = (SLchar *) "file:///recordsample.wav";
+    mime.formatType = SL_DATAFORMAT_MIME;
+    mime.mimeType = (SLchar *) "audio/x-wav";
+    mime.containerType = SL_CONTAINERTYPE_WAV;
+    audioSink.pLocator = (void *)&uri;
+    audioSink.pFormat = (void *)&mime;
+#else
+    // FIXME: Android requires SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE
+    // because the recorder makes the distinction from SL_DATALOCATOR_BUFFERQUEUE
+    // which the player does not.
+    SLDataLocator_AndroidSimpleBufferQueue loc_bq = {
+            SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2
+    };
+    SLDataFormat_PCM format_pcm = {
+            SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_16,
+            SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
+            SL_SPEAKER_FRONT_LEFT, SL_BYTEORDER_LITTLEENDIAN
+    };
+    audioSink = { &loc_bq, &format_pcm };
+#endif
+
+    /* Create audio recorder */
+    res = (*EngineItf)->CreateAudioRecorder(EngineItf, &recorder,
+            &audioSource, &audioSink, 0, iidArray, required);
+    CheckErr(res);
+
+    /* Realizing the recorder in synchronous mode. */
+    res = (*recorder)->Realize(recorder, SL_BOOLEAN_FALSE);
+    CheckErr(res);
+
+    /* Get the RECORD interface - it is an implicit interface */
+    res = (*recorder)->GetInterface(recorder, SL_IID_RECORD, (void *)&recordItf);
+    CheckErr(res);
+
+    // ANDROID: Should register SL_IID_ANDROIDSIMPLEBUFFERQUEUE interface for callback.
+    // but does original SL_DATALOCATOR_BUFFERQUEUE variant work just as well ?
+
+    /* Setup to receive position event callbacks */
+    res = (*recordItf)->RegisterCallback(recordItf, RecordEventCallback, NULL);
+    CheckErr(res);
+
+    /* Set notifications to occur after every second - may be useful in
+       updating a recording progress bar */
+    res = (*recordItf)->SetPositionUpdatePeriod(recordItf, POSITION_UPDATE_PERIOD);
+    CheckErr(res);
+    res = (*recordItf)->SetCallbackEventsMask(recordItf, SL_RECORDEVENT_HEADATNEWPOS);
+    CheckErr(res);
+
+    /* Set the duration of the recording - 30 seconds (30,000
+       milliseconds) */
+    res = (*recordItf)->SetDurationLimit(recordItf, 30000);
+    CheckErr(res);
+
+    /* Record the audio */
+    res = (*recordItf)->SetRecordState(recordItf, SL_RECORDSTATE_RECORDING);
+    CheckErr(res);
+
+    // ANDROID: BUG - we don't wait for anything to record!
+
+    /* Destroy the recorder object */
+    (*recorder)->Destroy(recorder);
+}
+
+extern "C" void Java_android_media_cts_AudioNativeTest_nativeAppendixBRecording(
+        JNIEnv * /* env */, jclass /* clazz */)
+{
+    SLObjectItf engineObject = android::OpenSLEngine();
+    LOG_ALWAYS_FATAL_IF(engineObject == NULL, "cannot open OpenSL ES engine");
+
+    TestAudioRecording(engineObject);
+    android::CloseSLEngine(engineObject);
+}
diff --git a/tests/tests/media/libaudiojni/audio-record-native.cpp b/tests/tests/media/libaudiojni/audio-record-native.cpp
new file mode 100644
index 0000000..9103cdc
--- /dev/null
+++ b/tests/tests/media/libaudiojni/audio-record-native.cpp
@@ -0,0 +1,631 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio-record-native"
+
+#include "Blob.h"
+#include "Gate.h"
+#include "sl-utils.h"
+
+#include <deque>
+#include <utils/Errors.h>
+
+// Select whether to use STL shared pointer or to use Android strong pointer.
+// We really don't promote any sharing of this object for its lifetime, but nevertheless could
+// change the shared pointer value on the fly if desired.
+#define USE_SHARED_POINTER
+
+#ifdef USE_SHARED_POINTER
+#include <memory>
+template <typename T> using shared_pointer = std::shared_ptr<T>;
+#else
+#include <utils/RefBase.h>
+template <typename T> using shared_pointer = android::sp<T>;
+#endif
+
+using namespace android;
+
+// Must be kept in sync with Java android.media.cts.AudioRecordNative.ReadFlags
+enum {
+    READ_FLAG_BLOCKING = (1 << 0),
+};
+
+// buffer queue buffers on the OpenSL ES side.
+// The choice can be >= 1.  There is also internal buffering by AudioRecord.
+
+static const size_t BUFFER_SIZE_MSEC = 20;
+
+// TODO: Add a single buffer blocking read mode which does not require additional memory.
+// TODO: Add internal buffer memory (e.g. use circular buffer, right now mallocs on heap).
+
+class AudioRecordNative
+#ifndef USE_SHARED_POINTER
+        : public RefBase // android strong pointers require RefBase
+#endif
+{
+public:
+    AudioRecordNative() :
+        mEngineObj(NULL),
+        mEngine(NULL),
+        mRecordObj(NULL),
+        mRecord(NULL),
+        mBufferQueue(NULL),
+        mRecordState(SL_RECORDSTATE_STOPPED),
+        mBufferSize(0),
+        mNumBuffers(0)
+    { }
+
+    ~AudioRecordNative() {
+        close();
+    }
+
+    typedef std::lock_guard<std::recursive_mutex> auto_lock;
+
+    status_t open(uint32_t numChannels, uint32_t sampleRate, bool useFloat, uint32_t numBuffers) {
+        close();
+        auto_lock l(mLock);
+        mEngineObj = OpenSLEngine();
+        if (mEngineObj == NULL) {
+            ALOGW("cannot create OpenSL ES engine");
+            return INVALID_OPERATION;
+        }
+
+        SLresult res;
+        for (;;) {
+            /* Get the SL Engine Interface which is implicit */
+            res = (*mEngineObj)->GetInterface(mEngineObj, SL_IID_ENGINE, (void *)&mEngine);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            SLDataLocator_IODevice locator_mic;
+            /* Setup the data source structure */
+            locator_mic.locatorType = SL_DATALOCATOR_IODEVICE;
+            locator_mic.deviceType = SL_IODEVICE_AUDIOINPUT;
+            locator_mic.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
+            locator_mic.device= NULL;
+            SLDataSource audioSource;
+            audioSource.pLocator = (void *)&locator_mic;
+            audioSource.pFormat = NULL;
+
+            // FIXME: Android requires SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE
+            // because the recorder makes the distinction from SL_DATALOCATOR_BUFFERQUEUE
+            // which the player does not.
+            SLDataLocator_AndroidSimpleBufferQueue loc_bq = {
+                    SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, numBuffers
+            };
+#if 0
+            SLDataFormat_PCM pcm = {
+                    SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_16,
+                    SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
+                    SL_SPEAKER_FRONT_LEFT, SL_BYTEORDER_LITTLEENDIAN
+            };
+#else
+            SLAndroidDataFormat_PCM_EX pcm;
+            pcm.formatType = useFloat ? SL_ANDROID_DATAFORMAT_PCM_EX : SL_DATAFORMAT_PCM;
+            pcm.numChannels = numChannels;
+            pcm.sampleRate = sampleRate * 1000;
+            pcm.bitsPerSample = useFloat ?
+                    SL_PCMSAMPLEFORMAT_FIXED_32 : SL_PCMSAMPLEFORMAT_FIXED_16;
+            pcm.containerSize = pcm.bitsPerSample;
+            pcm.channelMask = channelCountToMask(numChannels);
+            pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+            // additional
+            pcm.representation = useFloat ? SL_ANDROID_PCM_REPRESENTATION_FLOAT
+                                    : SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
+#endif
+            SLDataSink audioSink;
+            audioSink = { &loc_bq, &pcm };
+
+            SLboolean required[2];
+            SLInterfaceID iidArray[2];
+            /* Request the AndroidSimpleBufferQueue and AndroidConfiguration interfaces */
+            required[0] = SL_BOOLEAN_TRUE;
+            iidArray[0] = SL_IID_ANDROIDSIMPLEBUFFERQUEUE;
+            required[1] = SL_BOOLEAN_TRUE;
+            iidArray[1] = SL_IID_ANDROIDCONFIGURATION;
+
+            ALOGV("creating recorder");
+            /* Create audio recorder */
+            res = (*mEngine)->CreateAudioRecorder(mEngine, &mRecordObj,
+                    &audioSource, &audioSink, 2, iidArray, required);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            ALOGV("realizing recorder");
+            /* Realizing the recorder in synchronous mode. */
+            res = (*mRecordObj)->Realize(mRecordObj, SL_BOOLEAN_FALSE /* async */);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            ALOGV("geting record interface");
+            /* Get the RECORD interface - it is an implicit interface */
+            res = (*mRecordObj)->GetInterface(mRecordObj, SL_IID_RECORD, (void *)&mRecord);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            ALOGV("geting buffer queue interface");
+            /* Get the buffer queue interface which was explicitly requested */
+            res = (*mRecordObj)->GetInterface(mRecordObj, SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+                    (void *)&mBufferQueue);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            ALOGV("registering buffer queue interface");
+            /* Setup to receive buffer queue event callbacks */
+            res = (*mBufferQueue)->RegisterCallback(mBufferQueue, BufferQueueCallback, this);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            mBufferSize = (BUFFER_SIZE_MSEC * sampleRate / 1000)
+                    * numChannels * (useFloat ? sizeof(float) : sizeof(int16_t));
+            mNumBuffers = numBuffers;
+            // success
+            break;
+        }
+        if (res != SL_RESULT_SUCCESS) {
+            close(); // should be safe to close even with lock held
+            ALOGW("open error %s", android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        return OK;
+    }
+
+    void close() {
+        SLObjectItf engineObj;
+        SLObjectItf recordObj;
+        {
+            auto_lock l(mLock);
+            (void)stop();
+            // once stopped, we can unregister the callback
+            if (mBufferQueue != NULL) {
+                (void)(*mBufferQueue)->RegisterCallback(
+                        mBufferQueue, NULL /* callback */, NULL /* *pContext */);
+            }
+            (void)flush();
+            engineObj = mEngineObj;
+            recordObj = mRecordObj;
+            // clear out interfaces and objects
+            mRecord = NULL;
+            mBufferQueue = NULL;
+            mEngine = NULL;
+            mRecordObj = NULL;
+            mEngineObj = NULL;
+            mRecordState = SL_RECORDSTATE_STOPPED;
+            mBufferSize = 0;
+            mNumBuffers = 0;
+        }
+        // destroy without lock
+        if (recordObj != NULL) {
+            (*recordObj)->Destroy(recordObj);
+        }
+        if (engineObj) {
+            CloseSLEngine(engineObj);
+        }
+    }
+
+    status_t setRecordState(SLuint32 recordState) {
+        auto_lock l(mLock);
+        if (mRecord == NULL) {
+            return INVALID_OPERATION;
+        }
+        if (recordState == SL_RECORDSTATE_RECORDING) {
+            queueBuffers();
+        }
+        SLresult res = (*mRecord)->SetRecordState(mRecord, recordState);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("setRecordState %d error %s", recordState, android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        mRecordState = recordState;
+        return OK;
+    }
+
+    SLuint32 getRecordState() {
+        auto_lock l(mLock);
+        if (mRecord == NULL) {
+            return SL_RECORDSTATE_STOPPED;
+        }
+        SLuint32 recordState;
+        SLresult res = (*mRecord)->GetRecordState(mRecord, &recordState);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("getRecordState error %s", android::getSLErrStr(res));
+            return SL_RECORDSTATE_STOPPED;
+        }
+        return recordState;
+    }
+
+    status_t getPositionInMsec(int64_t *position) {
+        auto_lock l(mLock);
+        if (mRecord == NULL) {
+            return INVALID_OPERATION;
+        }
+        if (position == NULL) {
+            return BAD_VALUE;
+        }
+        SLuint32 pos;
+        SLresult res = (*mRecord)->GetPosition(mRecord, &pos);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("getPosition error %s", android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        // only lower 32 bits valid
+        *position = pos;
+        return OK;
+    }
+
+    status_t start() {
+        return setRecordState(SL_RECORDSTATE_RECORDING);
+    }
+
+    status_t pause() {
+        return setRecordState(SL_RECORDSTATE_PAUSED);
+    }
+
+    status_t stop() {
+        return setRecordState(SL_RECORDSTATE_STOPPED);
+    }
+
+    status_t flush() {
+        auto_lock l(mLock);
+        status_t result = OK;
+        if (mBufferQueue != NULL) {
+            SLresult res = (*mBufferQueue)->Clear(mBufferQueue);
+            if (res != SL_RESULT_SUCCESS) {
+                return INVALID_OPERATION;
+            }
+        }
+        mReadyQueue.clear();
+        // possible race if the engine is in the callback
+        // safety is only achieved if the recorder is paused or stopped.
+        mDeliveredQueue.clear();
+        mReadBlob = NULL;
+        mReadReady.terminate();
+        return result;
+    }
+
+    ssize_t read(void *buffer, size_t size, bool blocking = false) {
+        std::lock_guard<std::mutex> rl(mReadLock);
+        // not needed if we assume that a single thread is doing the reading
+        // or we always operate in non-blocking mode.
+
+        ALOGV("reading:%p  %zu", buffer, size);
+        size_t copied;
+        std::shared_ptr<Blob> blob;
+        {
+            auto_lock l(mLock);
+            if (mEngine == NULL) {
+                return INVALID_OPERATION;
+            }
+            size_t osize = size;
+            while (!mReadyQueue.empty() && size > 0) {
+                auto b = mReadyQueue.front();
+                size_t tocopy = min(size, b->mSize - b->mOffset);
+                // ALOGD("buffer:%p  size:%zu  b->mSize:%zu  b->mOffset:%zu tocopy:%zu ",
+                //        buffer, size, b->mSize, b->mOffset, tocopy);
+                memcpy(buffer, (char *)b->mData + b->mOffset, tocopy);
+                buffer = (char *)buffer + tocopy;
+                size -= tocopy;
+                b->mOffset += tocopy;
+                if (b->mOffset == b->mSize) {
+                    mReadyQueue.pop_front();
+                }
+            }
+            copied = osize - size;
+            if (!blocking || size == 0 || mReadBlob.get() != NULL) {
+                return copied;
+            }
+            blob = std::make_shared<Blob>(buffer, size);
+            mReadBlob = blob;
+            mReadReady.closeGate(); // the callback will open gate when read is completed.
+        }
+        if (mReadReady.wait()) {
+            // success then the blob is ours with valid data otherwise a flush has occurred
+            // and we return a short count.
+            copied += blob->mOffset;
+        }
+        return copied;
+    }
+
+    void logBufferState() {
+        auto_lock l(mLock);
+        SLBufferQueueState state;
+        SLresult res = (*mBufferQueue)->GetState(mBufferQueue, &state);
+        CheckErr(res);
+        ALOGD("logBufferState state.count:%d  state.playIndex:%d", state.count, state.playIndex);
+    }
+
+    size_t getBuffersPending() {
+        auto_lock l(mLock);
+        return mReadyQueue.size();
+    }
+
+private:
+    status_t queueBuffers() {
+        if (mBufferQueue == NULL) {
+            return INVALID_OPERATION;
+        }
+        if (mReadyQueue.size() + mDeliveredQueue.size() < mNumBuffers) {
+            // add new empty buffer
+            auto b = std::make_shared<Blob>(mBufferSize);
+            mDeliveredQueue.emplace_back(b);
+            (*mBufferQueue)->Enqueue(mBufferQueue, b->mData, b->mSize);
+        }
+        return OK;
+    }
+
+    void bufferQueueCallback(SLBufferQueueItf queueItf) {
+        auto_lock l(mLock);
+        if (queueItf != mBufferQueue) {
+            ALOGW("invalid buffer queue interface, ignoring");
+            return;
+        }
+        // logBufferState();
+
+        // remove from delivered queue
+        if (mDeliveredQueue.size()) {
+            auto b = mDeliveredQueue.front();
+            mDeliveredQueue.pop_front();
+            if (mReadBlob.get() != NULL) {
+                size_t tocopy = min(mReadBlob->mSize - mReadBlob->mOffset, b->mSize - b->mOffset);
+                memcpy((char *)mReadBlob->mData + mReadBlob->mOffset,
+                        (char *)b->mData + b->mOffset, tocopy);
+                b->mOffset += tocopy;
+                mReadBlob->mOffset += tocopy;
+                if (mReadBlob->mOffset == mReadBlob->mSize) {
+                    mReadBlob = NULL;      // we're done, clear our reference.
+                    mReadReady.openGate(); // allow read to continue.
+                }
+                if (b->mOffset == b->mSize) {
+                    b = NULL;
+                }
+            }
+            if (b.get() != NULL) {
+                if (mReadyQueue.size() + mDeliveredQueue.size() < mNumBuffers) {
+                    mReadyQueue.emplace_back(b); // save onto ready queue for future reads
+                } else {
+                    ALOGW("dropping data");
+                }
+            }
+        } else {
+            ALOGW("no delivered data!");
+        }
+        queueBuffers();
+    }
+
+    static void BufferQueueCallback(SLBufferQueueItf queueItf, void *pContext) {
+        SLresult res;
+        // naked native record
+        AudioRecordNative *record = (AudioRecordNative *)pContext;
+        record->bufferQueueCallback(queueItf);
+    }
+
+    SLObjectItf           mEngineObj;
+    SLEngineItf           mEngine;
+    SLObjectItf           mRecordObj;
+    SLRecordItf           mRecord;
+    SLBufferQueueItf      mBufferQueue;
+    SLuint32              mRecordState;
+    size_t                mBufferSize;
+    size_t                mNumBuffers;
+    std::recursive_mutex  mLock;          // monitor lock - locks public API methods and callback.
+                                          // recursive since it may call itself through API.
+    std::mutex            mReadLock;      // read lock - for blocking mode, prevents multiple
+                                          // reader threads from overlapping reads.  this is
+                                          // generally unnecessary as reads occur from
+                                          // one thread only.  acquire this before mLock.
+    std::shared_ptr<Blob> mReadBlob;
+    Gate                  mReadReady;
+    std::deque<std::shared_ptr<Blob>> mReadyQueue;     // ready for read.
+    std::deque<std::shared_ptr<Blob>> mDeliveredQueue; // delivered to BufferQueue
+};
+
+/* Java static methods.
+ *
+ * These are not directly exposed to the user, so we can assume a valid "jrecord" handle
+ * to be passed in.
+ */
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeTest(
+    JNIEnv * /* env */, jclass /* clazz */,
+    jint numChannels, jint sampleRate, jboolean useFloat,
+    jint msecPerBuffer, jint numBuffers)
+{
+    AudioRecordNative record;
+    const size_t frameSize = numChannels * (useFloat ? sizeof(float) : sizeof(int16_t));
+    const size_t framesPerBuffer = msecPerBuffer * sampleRate / 1000;
+
+    status_t res;
+    void *buffer = calloc(framesPerBuffer * numBuffers, frameSize);
+    for (;;) {
+        res = record.open(numChannels, sampleRate, useFloat, numBuffers);
+        if (res != OK) break;
+
+        record.logBufferState();
+        res = record.start();
+        if (res != OK) break;
+
+        size_t size = framesPerBuffer * numBuffers * frameSize;
+        for (size_t offset = 0; size - offset > 0; ) {
+            ssize_t amount = record.read((char *)buffer + offset, size -offset);
+            // ALOGD("read amount: %zd", amount);
+            if (amount < 0) break;
+            offset += amount;
+            usleep(5 * 1000 /* usec */);
+        }
+
+        res = record.stop();
+        break;
+    }
+    record.close();
+    free(buffer);
+    return res;
+}
+
+extern "C" jlong Java_android_media_cts_AudioRecordNative_nativeCreateRecord(
+    JNIEnv * /* env */, jclass /* clazz */)
+{
+    return (jlong)(new shared_pointer<AudioRecordNative>(new AudioRecordNative()));
+}
+
+extern "C" void Java_android_media_cts_AudioRecordNative_nativeDestroyRecord(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    delete (shared_pointer<AudioRecordNative> *)jrecord;
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeOpen(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord,
+    jint numChannels, jint sampleRate, jboolean useFloat, jint numBuffers)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)record->open(numChannels, sampleRate, useFloat == JNI_TRUE,
+            numBuffers);
+}
+
+extern "C" void Java_android_media_cts_AudioRecordNative_nativeClose(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() != NULL) {
+        record->close();
+    }
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeStart(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)record->start();
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeStop(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)record->stop();
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativePause(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)record->pause();
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeFlush(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)record->flush();
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeGetPositionInMsec(
+    JNIEnv *env, jclass /* clazz */, jlong jrecord, jlongArray jPosition)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    int64_t pos;
+    status_t res = record->getPositionInMsec(&pos);
+    if (res != OK) {
+        return res;
+    }
+    jlong *nPostition = (jlong *) env->GetPrimitiveArrayCritical(jPosition, NULL /* isCopy */);
+    if (nPostition == NULL) {
+        ALOGE("Unable to get array for nativeGetPositionInMsec()");
+        return BAD_VALUE;
+    }
+    nPostition[0] = (jlong)pos;
+    env->ReleasePrimitiveArrayCritical(jPosition, nPostition, 0 /* mode */);
+    return OK;
+}
+
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeGetBuffersPending(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jrecord)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)0;
+    }
+    return (jint)record->getBuffersPending();
+}
+
+template <typename T>
+static inline jint readFromRecord(jlong jrecord, T *data,
+    jint offsetInSamples, jint sizeInSamples, jint readFlags)
+{
+    auto record = *(shared_pointer<AudioRecordNative> *)jrecord;
+    if (record.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+
+    const bool isBlocking = readFlags & READ_FLAG_BLOCKING;
+    const size_t sizeInBytes = sizeInSamples * sizeof(T);
+    ssize_t ret = record->read(data + offsetInSamples, sizeInBytes, isBlocking == JNI_TRUE);
+    return (jint)(ret > 0 ? ret / sizeof(T) : ret);
+}
+
+template <typename T>
+static inline jint readArray(JNIEnv *env, jclass /* clazz */, jlong jrecord,
+        T javaAudioData, jint offsetInSamples, jint sizeInSamples, jint readFlags)
+{
+    if (javaAudioData == NULL) {
+        return (jint)BAD_VALUE;
+    }
+
+    auto cAudioData = envGetArrayElements(env, javaAudioData, NULL /* isCopy */);
+    if (cAudioData == NULL) {
+        ALOGE("Error retrieving destination of audio data to record");
+        return (jint)BAD_VALUE;
+    }
+
+    jint ret = readFromRecord(jrecord, cAudioData, offsetInSamples, sizeInSamples, readFlags);
+    envReleaseArrayElements(env, javaAudioData, cAudioData, 0 /* mode */);
+    return ret;
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeReadByteArray(
+    JNIEnv *env, jclass clazz, jlong jrecord,
+    jbyteArray byteArray, jint offsetInSamples, jint sizeInSamples, jint readFlags)
+{
+    return readArray(env, clazz, jrecord, byteArray, offsetInSamples, sizeInSamples, readFlags);
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeReadShortArray(
+    JNIEnv *env, jclass clazz, jlong jrecord,
+    jshortArray shortArray, jint offsetInSamples, jint sizeInSamples, jint readFlags)
+{
+    return readArray(env, clazz, jrecord, shortArray, offsetInSamples, sizeInSamples, readFlags);
+}
+
+extern "C" jint Java_android_media_cts_AudioRecordNative_nativeReadFloatArray(
+    JNIEnv *env, jclass clazz, jlong jrecord,
+    jfloatArray floatArray, jint offsetInSamples, jint sizeInSamples, jint readFlags)
+{
+    return readArray(env, clazz, jrecord, floatArray, offsetInSamples, sizeInSamples, readFlags);
+}
diff --git a/tests/tests/media/libaudiojni/audio-track-native.cpp b/tests/tests/media/libaudiojni/audio-track-native.cpp
new file mode 100644
index 0000000..d51a751
--- /dev/null
+++ b/tests/tests/media/libaudiojni/audio-track-native.cpp
@@ -0,0 +1,579 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio-track-native"
+
+#include "Blob.h"
+#include "Gate.h"
+#include "sl-utils.h"
+
+#include <deque>
+#include <utils/Errors.h>
+
+// Select whether to use STL shared pointer or to use Android strong pointer.
+// We really don't promote any sharing of this object for its lifetime, but nevertheless could
+// change the shared pointer value on the fly if desired.
+#define USE_SHARED_POINTER
+
+#ifdef USE_SHARED_POINTER
+#include <memory>
+template <typename T> using shared_pointer = std::shared_ptr<T>;
+#else
+#include <utils/RefBase.h>
+template <typename T> using shared_pointer = android::sp<T>;
+#endif
+
+using namespace android;
+
+// Must be kept in sync with Java android.media.cts.AudioTrackNative.WriteFlags
+enum {
+    WRITE_FLAG_BLOCKING = (1 << 0),
+};
+
+// TODO: Add a single buffer blocking write mode which does not require additional memory.
+// TODO: Add internal buffer memory (e.g. use circular buffer, right now mallocs on heap).
+
+class AudioTrackNative
+#ifndef USE_SHARED_POINTER
+        : public RefBase // android strong pointers require RefBase
+#endif
+{
+public:
+    AudioTrackNative() :
+        mEngineObj(NULL),
+        mEngine(NULL),
+        mOutputMixObj(NULL),
+        mPlayerObj(NULL),
+        mPlay(NULL),
+        mBufferQueue(NULL),
+        mPlayState(SL_PLAYSTATE_STOPPED),
+        mNumBuffers(0)
+    { }
+
+    ~AudioTrackNative() {
+        close();
+    }
+
+    typedef std::lock_guard<std::recursive_mutex> auto_lock;
+
+    status_t open(uint32_t numChannels, uint32_t sampleRate, bool useFloat,
+            uint32_t numBuffers) {
+        close();
+        auto_lock l(mLock);
+        mEngineObj = OpenSLEngine();
+        if (mEngineObj == NULL) {
+            ALOGW("cannot create OpenSL ES engine");
+            return INVALID_OPERATION;
+        }
+
+        SLresult res;
+        for (;;) {
+            /* Get the SL Engine Interface which is implicit */
+            res = (*mEngineObj)->GetInterface(mEngineObj, SL_IID_ENGINE, (void *)&mEngine);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            // Create Output Mix object to be used by player
+            res = (*mEngine)->CreateOutputMix(
+                    mEngine, &mOutputMixObj, 0 /* numInterfaces */,
+                    NULL /* pInterfaceIds */, NULL /* pInterfaceRequired */);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            // Realizing the Output Mix object in synchronous mode.
+            res = (*mOutputMixObj)->Realize(mOutputMixObj, SL_BOOLEAN_FALSE /* async */);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            /* Setup the data source structure for the buffer queue */
+            SLDataLocator_BufferQueue bufferQueue;
+            bufferQueue.locatorType = SL_DATALOCATOR_BUFFERQUEUE;
+            bufferQueue.numBuffers = numBuffers;
+            mNumBuffers = numBuffers;
+
+            /* Setup the format of the content in the buffer queue */
+
+            SLAndroidDataFormat_PCM_EX pcm;
+            pcm.formatType = useFloat ? SL_ANDROID_DATAFORMAT_PCM_EX : SL_DATAFORMAT_PCM;
+            pcm.numChannels = numChannels;
+            pcm.sampleRate = sampleRate * 1000;
+            pcm.bitsPerSample = useFloat ?
+                    SL_PCMSAMPLEFORMAT_FIXED_32 : SL_PCMSAMPLEFORMAT_FIXED_16;
+            pcm.containerSize = pcm.bitsPerSample;
+            pcm.channelMask = channelCountToMask(numChannels);
+            pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+            // additional
+            pcm.representation = useFloat ? SL_ANDROID_PCM_REPRESENTATION_FLOAT
+                                    : SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
+            SLDataSource audioSource;
+            audioSource.pFormat = (void *)&pcm;
+            audioSource.pLocator = (void *)&bufferQueue;
+
+            /* Setup the data sink structure */
+            SLDataLocator_OutputMix locator_outputmix;
+            locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+            locator_outputmix.outputMix = mOutputMixObj;
+
+            SLDataSink audioSink;
+            audioSink.pLocator = (void *)&locator_outputmix;
+            audioSink.pFormat = NULL;
+
+            SLboolean required[1];
+            SLInterfaceID iidArray[1];
+            required[0] = SL_BOOLEAN_TRUE;
+            iidArray[0] = SL_IID_BUFFERQUEUE;
+
+            res = (*mEngine)->CreateAudioPlayer(mEngine, &mPlayerObj,
+                    &audioSource, &audioSink, 1 /* numInterfaces */, iidArray, required);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            res = (*mPlayerObj)->Realize(mPlayerObj, SL_BOOLEAN_FALSE /* async */);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            res = (*mPlayerObj)->GetInterface(mPlayerObj, SL_IID_PLAY, (void*)&mPlay);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            res = (*mPlayerObj)->GetInterface(
+                    mPlayerObj, SL_IID_BUFFERQUEUE, (void*)&mBufferQueue);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            /* Setup to receive buffer queue event callbacks */
+            res = (*mBufferQueue)->RegisterCallback(mBufferQueue, BufferQueueCallback, this);
+            if (res != SL_RESULT_SUCCESS) break;
+
+            // success
+            break;
+        }
+        if (res != SL_RESULT_SUCCESS) {
+            close(); // should be safe to close even with lock held
+            ALOGW("open error %s", android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        return OK;
+    }
+
+    void close() {
+        SLObjectItf engineObj;
+        SLObjectItf outputMixObj;
+        SLObjectItf playerObj;
+        {
+            auto_lock l(mLock);
+            if (mPlay != NULL && mPlayState != SL_PLAYSTATE_STOPPED) {
+                (void)stop();
+            }
+            // once stopped, we can unregister the callback
+            if (mBufferQueue != NULL) {
+                (void)(*mBufferQueue)->RegisterCallback(
+                        mBufferQueue, NULL /* callback */, NULL /* *pContext */);
+            }
+            (void)flush();
+            engineObj = mEngineObj;
+            outputMixObj = mOutputMixObj;
+            playerObj = mPlayerObj;
+            // clear out interfaces and objects
+            mPlay = NULL;
+            mBufferQueue = NULL;
+            mEngine = NULL;
+            mPlayerObj = NULL;
+            mOutputMixObj = NULL;
+            mEngineObj = NULL;
+            mPlayState = SL_PLAYSTATE_STOPPED;
+        }
+        // destroy without lock
+        if (playerObj != NULL) {
+            (*playerObj)->Destroy(playerObj);
+        }
+        if (outputMixObj != NULL) {
+            (*outputMixObj)->Destroy(outputMixObj);
+        }
+        if (engineObj != NULL) {
+            CloseSLEngine(engineObj);
+        }
+    }
+
+    status_t setPlayState(SLuint32 playState) {
+        auto_lock l(mLock);
+        if (mPlay == NULL) {
+            return INVALID_OPERATION;
+        }
+        SLresult res = (*mPlay)->SetPlayState(mPlay, playState);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("setPlayState %d error %s", playState, android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        mPlayState = playState;
+        return OK;
+    }
+
+    SLuint32 getPlayState() {
+        auto_lock l(mLock);
+        if (mPlay == NULL) {
+            return SL_PLAYSTATE_STOPPED;
+        }
+        SLuint32 playState;
+        SLresult res = (*mPlay)->GetPlayState(mPlay, &playState);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("getPlayState error %s", android::getSLErrStr(res));
+            return SL_PLAYSTATE_STOPPED;
+        }
+        return playState;
+    }
+
+    status_t getPositionInMsec(int64_t *position) {
+        auto_lock l(mLock);
+        if (mPlay == NULL) {
+            return INVALID_OPERATION;
+        }
+        if (position == NULL) {
+            return BAD_VALUE;
+        }
+        SLuint32 pos;
+        SLresult res = (*mPlay)->GetPosition(mPlay, &pos);
+        if (res != SL_RESULT_SUCCESS) {
+            ALOGW("getPosition error %s", android::getSLErrStr(res));
+            return INVALID_OPERATION;
+        }
+        // only lower 32 bits valid
+        *position = pos;
+        return OK;
+    }
+
+    status_t start() {
+        return setPlayState(SL_PLAYSTATE_PLAYING);
+    }
+
+    status_t pause() {
+        return setPlayState(SL_PLAYSTATE_PAUSED);
+    }
+
+    status_t stop() {
+        return setPlayState(SL_PLAYSTATE_STOPPED);
+    }
+
+    status_t flush() {
+        auto_lock l(mLock);
+        status_t result = OK;
+        if (mBufferQueue != NULL) {
+            SLresult res = (*mBufferQueue)->Clear(mBufferQueue);
+            if (res != SL_RESULT_SUCCESS) {
+                return INVALID_OPERATION;
+            }
+        }
+
+        // possible race if the engine is in the callback
+        // safety is only achieved if the player is paused or stopped.
+        mDeliveredQueue.clear();
+        return result;
+    }
+
+    status_t write(const void *buffer, size_t size, bool isBlocking = false) {
+        std::lock_guard<std::mutex> rl(mWriteLock);
+        // not needed if we assume that a single thread is doing the reading
+        // or we always operate in non-blocking mode.
+
+        {
+            auto_lock l(mLock);
+            if (mBufferQueue == NULL) {
+                return INVALID_OPERATION;
+            }
+            if (mDeliveredQueue.size() < mNumBuffers) {
+                auto b = std::make_shared<BlobReadOnly>(buffer, size, false /* byReference */);
+                mDeliveredQueue.emplace_back(b);
+                (*mBufferQueue)->Enqueue(mBufferQueue, b->mData, b->mSize);
+                return size;
+            }
+            if (!isBlocking) {
+                return 0;
+            }
+            mWriteReady.closeGate(); // we're full.
+        }
+        if (mWriteReady.wait()) {
+            auto_lock l(mLock);
+            if (mDeliveredQueue.size() < mNumBuffers) {
+                auto b = std::make_shared<BlobReadOnly>(buffer, size, false /* byReference */);
+                mDeliveredQueue.emplace_back(b);
+                (*mBufferQueue)->Enqueue(mBufferQueue, b->mData, b->mSize);
+                return size;
+            }
+        }
+        ALOGW("unable to deliver write");
+        return 0;
+    }
+
+    void logBufferState() {
+        auto_lock l(mLock);
+        SLBufferQueueState state;
+        SLresult res = (*mBufferQueue)->GetState(mBufferQueue, &state);
+        CheckErr(res);
+        ALOGD("logBufferState state.count:%d  state.playIndex:%d", state.count, state.playIndex);
+    }
+
+    size_t getBuffersPending() {
+        auto_lock l(mLock);
+        return mDeliveredQueue.size();
+    }
+
+private:
+    void bufferQueueCallback(SLBufferQueueItf queueItf) {
+        auto_lock l(mLock);
+        if (queueItf != mBufferQueue) {
+            ALOGW("invalid buffer queue interface, ignoring");
+            return;
+        }
+        // logBufferState();
+
+        // remove from delivered queue
+        if (mDeliveredQueue.size()) {
+            mDeliveredQueue.pop_front();
+        } else {
+            ALOGW("no delivered data!");
+        }
+        if (!mWriteReady.isOpen()) {
+            mWriteReady.openGate();
+        }
+    }
+
+    static void BufferQueueCallback(SLBufferQueueItf queueItf, void *pContext) {
+        SLresult res;
+        // naked native track
+        AudioTrackNative *track = (AudioTrackNative *)pContext;
+        track->bufferQueueCallback(queueItf);
+    }
+
+    SLObjectItf          mEngineObj;
+    SLEngineItf          mEngine;
+    SLObjectItf          mOutputMixObj;
+    SLObjectItf          mPlayerObj;
+    SLPlayItf            mPlay;
+    SLBufferQueueItf     mBufferQueue;
+    SLuint32             mPlayState;
+    SLuint32             mNumBuffers;
+    std::recursive_mutex mLock;           // monitor lock - locks public API methods and callback.
+                                          // recursive since it may call itself through API.
+    std::mutex           mWriteLock;      // write lock - for blocking mode, prevents multiple
+                                          // writer threads from overlapping writes.  this is
+                                          // generally unnecessary as writes occur from
+                                          // one thread only.  acquire this before mLock.
+    Gate                 mWriteReady;
+    std::deque<std::shared_ptr<BlobReadOnly>> mDeliveredQueue; // delivered to mBufferQueue
+};
+
+/* Java static methods.
+ *
+ * These are not directly exposed to the user, so we can assume a valid "jtrack" handle
+ * to be passed in.
+ */
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeTest(
+    JNIEnv * /* env */, jclass /* clazz */,
+    jint numChannels, jint sampleRate, jboolean useFloat,
+    jint msecPerBuffer, jint numBuffers)
+{
+    AudioTrackNative track;
+    const size_t frameSize = numChannels * (useFloat ? sizeof(float) : sizeof(int16_t));
+    const size_t framesPerBuffer = msecPerBuffer * sampleRate / 1000;
+
+    status_t res;
+    void *buffer = calloc(framesPerBuffer * numBuffers, frameSize);
+    for (;;) {
+        res = track.open(numChannels, sampleRate, useFloat, numBuffers);
+        if (res != OK) break;
+
+        for (int i = 0; i < numBuffers; ++i) {
+            track.write((char *)buffer + i * (framesPerBuffer * frameSize),
+                    framesPerBuffer * frameSize);
+        }
+
+        track.logBufferState();
+        res = track.start();
+        if (res != OK) break;
+
+        size_t buffers;
+        while ((buffers = track.getBuffersPending()) > 0) {
+            // ALOGD("outstanding buffers: %zu", buffers);
+            usleep(5 * 1000 /* usec */);
+        }
+        res = track.stop();
+        break;
+    }
+    track.close();
+    free(buffer);
+    return res;
+}
+
+extern "C" jlong Java_android_media_cts_AudioTrackNative_nativeCreateTrack(
+    JNIEnv * /* env */, jclass /* clazz */)
+{
+    return (jlong)(new shared_pointer<AudioTrackNative>(new AudioTrackNative()));
+}
+
+extern "C" void Java_android_media_cts_AudioTrackNative_nativeDestroyTrack(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    delete (shared_pointer<AudioTrackNative> *)jtrack;
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeOpen(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack,
+    jint numChannels, jint sampleRate, jboolean useFloat, jint numBuffers)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)track->open(numChannels, sampleRate, useFloat == JNI_TRUE,
+            numBuffers);
+}
+
+extern "C" void Java_android_media_cts_AudioTrackNative_nativeClose(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() != NULL) {
+        track->close();
+    }
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeStart(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)track->start();
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeStop(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)track->stop();
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativePause(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)track->pause();
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeFlush(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    return (jint)track->flush();
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeGetPositionInMsec(
+    JNIEnv *env, jclass /* clazz */, jlong jtrack, jlongArray jPosition)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+    int64_t pos;
+    status_t res = track->getPositionInMsec(&pos);
+    if (res != OK) {
+        return res;
+    }
+    jlong *nPostition = (jlong *) env->GetPrimitiveArrayCritical(jPosition, NULL /* isCopy */);
+    if (nPostition == NULL) {
+        ALOGE("Unable to get array for nativeGetPositionInMsec()");
+        return BAD_VALUE;
+    }
+    nPostition[0] = (jlong)pos;
+    env->ReleasePrimitiveArrayCritical(jPosition, nPostition, 0 /* mode */);
+    return OK;
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeGetBuffersPending(
+    JNIEnv * /* env */, jclass /* clazz */, jlong jtrack)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)0;
+    }
+    return (jint)track->getBuffersPending();
+}
+
+template <typename T>
+static inline jint writeToTrack(jlong jtrack, const T *data,
+    jint offsetInSamples, jint sizeInSamples, jint writeFlags)
+{
+    auto track = *(shared_pointer<AudioTrackNative> *)jtrack;
+    if (track.get() == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+
+    const bool isBlocking = writeFlags & WRITE_FLAG_BLOCKING;
+    const size_t sizeInBytes = sizeInSamples * sizeof(T);
+    ssize_t ret = track->write(data + offsetInSamples, sizeInBytes, isBlocking);
+    return (jint)(ret > 0 ? ret / sizeof(T) : ret);
+}
+
+template <typename T>
+static inline jint writeArray(JNIEnv *env, jclass /* clazz */, jlong jtrack,
+        T javaAudioData, jint offsetInSamples, jint sizeInSamples, jint writeFlags)
+{
+    if (javaAudioData == NULL) {
+        return (jint)INVALID_OPERATION;
+    }
+
+    auto cAudioData = envGetArrayElements(env, javaAudioData, NULL /* isCopy */);
+    if (cAudioData == NULL) {
+        ALOGE("Error retrieving source of audio data to play");
+        return (jint)BAD_VALUE;
+    }
+
+    jint ret = writeToTrack(jtrack, cAudioData, offsetInSamples, sizeInSamples, writeFlags);
+    envReleaseArrayElements(env, javaAudioData, cAudioData, 0 /* mode */);
+    return ret;
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeWriteByteArray(
+    JNIEnv *env, jclass clazz, jlong jtrack,
+    jbyteArray byteArray, jint offsetInSamples, jint sizeInSamples, jint writeFlags)
+{
+    ALOGV("nativeWriteByteArray(%p, %d, %d, %d)",
+            byteArray, offsetInSamples, sizeInSamples, writeFlags);
+    return writeArray(env, clazz, jtrack, byteArray, offsetInSamples, sizeInSamples, writeFlags);
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeWriteShortArray(
+    JNIEnv *env, jclass clazz, jlong jtrack,
+    jshortArray shortArray, jint offsetInSamples, jint sizeInSamples, jint writeFlags)
+{
+    ALOGV("nativeWriteShortArray(%p, %d, %d, %d)",
+            shortArray, offsetInSamples, sizeInSamples, writeFlags);
+    return writeArray(env, clazz, jtrack, shortArray, offsetInSamples, sizeInSamples, writeFlags);
+}
+
+extern "C" jint Java_android_media_cts_AudioTrackNative_nativeWriteFloatArray(
+    JNIEnv *env, jclass clazz, jlong jtrack,
+    jfloatArray floatArray, jint offsetInSamples, jint sizeInSamples, jint writeFlags)
+{
+    ALOGV("nativeWriteFloatArray(%p, %d, %d, %d)",
+            floatArray, offsetInSamples, sizeInSamples, writeFlags);
+    return writeArray(env, clazz, jtrack, floatArray, offsetInSamples, sizeInSamples, writeFlags);
+}
diff --git a/tests/tests/media/libaudiojni/sl-utils.cpp b/tests/tests/media/libaudiojni/sl-utils.cpp
new file mode 100644
index 0000000..1aa89ba
--- /dev/null
+++ b/tests/tests/media/libaudiojni/sl-utils.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SL-Utils"
+
+#include "sl-utils.h"
+#include <utils/Mutex.h>
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+// These will wind up in <SLES/OpenSLES_Android.h>
+#define SL_ANDROID_SPEAKER_QUAD (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT \
+ | SL_SPEAKER_BACK_LEFT | SL_SPEAKER_BACK_RIGHT)
+
+#define SL_ANDROID_SPEAKER_5DOT1 (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT \
+ | SL_SPEAKER_FRONT_CENTER  | SL_SPEAKER_LOW_FREQUENCY| SL_SPEAKER_BACK_LEFT \
+ | SL_SPEAKER_BACK_RIGHT)
+
+#define SL_ANDROID_SPEAKER_7DOT1 (SL_ANDROID_SPEAKER_5DOT1 | SL_SPEAKER_SIDE_LEFT \
+ |SL_SPEAKER_SIDE_RIGHT)
+
+namespace android {
+
+static Mutex gLock;
+static SLObjectItf gEngineObject;
+static unsigned gRefCount;
+
+static const char *gErrorStrings[] = {
+    "SL_RESULT_SUCCESS",                // 0
+    "SL_RESULT_PRECONDITIONS_VIOLATE",  // 1
+    "SL_RESULT_PARAMETER_INVALID",      // 2
+    "SL_RESULT_MEMORY_FAILURE",         // 3
+    "SL_RESULT_RESOURCE_ERROR",         // 4
+    "SL_RESULT_RESOURCE_LOST",          // 5
+    "SL_RESULT_IO_ERROR",               // 6
+    "SL_RESULT_BUFFER_INSUFFICIENT",    // 7
+    "SL_RESULT_CONTENT_CORRUPTED",      // 8
+    "SL_RESULT_CONTENT_UNSUPPORTED",    // 9
+    "SL_RESULT_CONTENT_NOT_FOUND",      // 10
+    "SL_RESULT_PERMISSION_DENIED",      // 11
+    "SL_RESULT_FEATURE_UNSUPPORTED",    // 12
+    "SL_RESULT_INTERNAL_ERROR",         // 13
+    "SL_RESULT_UNKNOWN_ERROR",          // 14
+    "SL_RESULT_OPERATION_ABORTED",      // 15
+    "SL_RESULT_CONTROL_LOST",           // 16
+};
+
+const char *getSLErrStr(int code) {
+    if ((size_t)code >= ARRAY_SIZE(gErrorStrings)) {
+        return "SL_RESULT_UNKNOWN";
+    }
+    return gErrorStrings[code];
+}
+
+SLuint32 channelCountToMask(unsigned channelCount) {
+    switch (channelCount) {
+    case 1:
+        return SL_SPEAKER_FRONT_LEFT; // we prefer left over center
+    case 2:
+        return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
+    case 3:
+        return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT | SL_SPEAKER_FRONT_CENTER;
+    case 4:
+        return SL_ANDROID_SPEAKER_QUAD;
+    case 5:
+        return SL_ANDROID_SPEAKER_QUAD | SL_SPEAKER_FRONT_CENTER;
+    case 6:
+        return SL_ANDROID_SPEAKER_5DOT1;
+    case 7:
+        return SL_ANDROID_SPEAKER_5DOT1 | SL_SPEAKER_BACK_CENTER;
+    case 8:
+        return SL_ANDROID_SPEAKER_7DOT1;
+    default:
+        return 0;
+    }
+}
+
+static SLObjectItf createEngine() {
+    static SLEngineOption EngineOption[] = {
+            (SLuint32) SL_ENGINEOPTION_THREADSAFE,
+            (SLuint32) SL_BOOLEAN_TRUE
+    };
+    // create engine in thread-safe mode
+    SLObjectItf engine;
+    SLresult result = slCreateEngine(&engine,
+            1 /* numOptions */, EngineOption /* pEngineOptions */,
+            0 /* numInterfaces */, NULL /* pInterfaceIds */, NULL /* pInterfaceRequired */);
+    if (result != SL_RESULT_SUCCESS) {
+        ALOGE("slCreateEngine() failed: %s", getSLErrStr(result));
+        return NULL;
+    }
+    // realize the engine
+    result = (*engine)->Realize(engine, SL_BOOLEAN_FALSE /* async */);
+    if (result != SL_RESULT_SUCCESS) {
+        ALOGE("Realize() failed: %s", getSLErrStr(result));
+        (*engine)->Destroy(engine);
+        return NULL;
+    }
+    return engine;
+}
+
+SLObjectItf OpenSLEngine(bool global) {
+
+    if (!global) {
+        return createEngine();
+    }
+    Mutex::Autolock l(gLock);
+    if (gRefCount == 0) {
+        gEngineObject = createEngine();
+    }
+    gRefCount++;
+    return gEngineObject;
+}
+
+void CloseSLEngine(SLObjectItf engine) {
+    Mutex::Autolock l(gLock);
+    if (engine == gEngineObject) {
+        if (gRefCount == 0) {
+            ALOGE("CloseSLEngine(%p): refcount already 0", engine);
+            return;
+        }
+        if (--gRefCount != 0) {
+            return;
+        }
+        gEngineObject = NULL;
+    }
+    (*engine)->Destroy(engine);
+}
+
+} // namespace android
+
diff --git a/tests/tests/media/libaudiojni/sl-utils.h b/tests/tests/media/libaudiojni/sl-utils.h
new file mode 100644
index 0000000..8582648
--- /dev/null
+++ b/tests/tests/media/libaudiojni/sl-utils.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SL_UTILS_H
+#define ANDROID_SL_UTILS_H
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+
+#include <jni.h>
+#include <mutex>
+#include <utils/Log.h>
+
+#define CheckErr(res) LOG_ALWAYS_FATAL_IF( \
+        (res) != SL_RESULT_SUCCESS, "result error %s", android::getSLErrStr(res));
+
+namespace android {
+
+// FIXME: Move to common file.
+template <typename T>
+static inline
+const T &min(const T &a, const T &b) {
+    return a < b ? a : b;
+}
+
+/* Returns the error string for the OpenSL ES error code
+ */
+const char *getSLErrStr(int code);
+
+/* Returns the OpenSL ES equivalent standard channel mask
+ * for a given channel count, 0 if no such mask is available.
+ */
+SLuint32 channelCountToMask(unsigned channelCount);
+
+/* Returns an OpenSL ES Engine object interface.
+ * The engine created will be thread safe [3.2]
+ * The underlying implementation may not support more than one engine. [4.1.1]
+ *
+ * @param global if true, return and open the global engine instance or make
+ *   a local engine instance if false.
+ * @return NULL if unsuccessful or the Engine SLObjectItf.
+ */
+SLObjectItf OpenSLEngine(bool global = true);
+
+/* Closes an OpenSL ES Engine object returned by OpenSLEngine().
+ */
+void CloseSLEngine(SLObjectItf engine);
+
+// overloaded JNI array helper functions (same as in android_media_AudioRecord)
+inline
+jbyte *envGetArrayElements(JNIEnv *env, jbyteArray array, jboolean *isCopy) {
+    return env->GetByteArrayElements(array, isCopy);
+}
+
+inline
+void envReleaseArrayElements(JNIEnv *env, jbyteArray array, jbyte *elems, jint mode) {
+    env->ReleaseByteArrayElements(array, elems, mode);
+}
+
+inline
+jshort *envGetArrayElements(JNIEnv *env, jshortArray array, jboolean *isCopy) {
+    return env->GetShortArrayElements(array, isCopy);
+}
+
+inline
+void envReleaseArrayElements(JNIEnv *env, jshortArray array, jshort *elems, jint mode) {
+    env->ReleaseShortArrayElements(array, elems, mode);
+}
+
+inline
+jfloat *envGetArrayElements(JNIEnv *env, jfloatArray array, jboolean *isCopy) {
+    return env->GetFloatArrayElements(array, isCopy);
+}
+
+inline
+void envReleaseArrayElements(JNIEnv *env, jfloatArray array, jfloat *elems, jint mode) {
+    env->ReleaseFloatArrayElements(array, elems, mode);
+}
+
+} // namespace android
+
+#endif // ANDROID_SL_UTILS_H
diff --git a/tests/tests/media/src/android/media/cts/AudioHelper.java b/tests/tests/media/src/android/media/cts/AudioHelper.java
index efee024..6707ea6 100644
--- a/tests/tests/media/src/android/media/cts/AudioHelper.java
+++ b/tests/tests/media/src/android/media/cts/AudioHelper.java
@@ -211,13 +211,13 @@
      * This affects AudioRecord timing.
      */
     public static class AudioRecordAudit extends AudioRecord {
-        AudioRecordAudit(int audioSource, int sampleRate, int channelMask,
+        public AudioRecordAudit(int audioSource, int sampleRate, int channelMask,
                 int format, int bufferSize, boolean isChannelIndex) {
             this(audioSource, sampleRate, channelMask, format, bufferSize, isChannelIndex,
                     AudioManager.STREAM_MUSIC, 500 /*delayMs*/);
         }
 
-        AudioRecordAudit(int audioSource, int sampleRate, int channelMask,
+        public AudioRecordAudit(int audioSource, int sampleRate, int channelMask,
                 int format, int bufferSize,
                 boolean isChannelIndex, int auditStreamType, int delayMs) {
             // without channel index masks, one could call:
@@ -408,4 +408,84 @@
         private int mPosition;
         private long mFinishAtMs;
     }
+
+    /* AudioRecordAudit extends AudioRecord to allow concurrent playback
+     * of read content to an AudioTrack.  This is for testing only.
+     * For general applications, it is NOT recommended to extend AudioRecord.
+     * This affects AudioRecord timing.
+     */
+    public static class AudioRecordAuditNative extends AudioRecordNative {
+        public AudioRecordAuditNative() {
+            super();
+            // Caution: delayMs too large results in buffer sizes that cannot be created.
+            mTrack = new AudioTrackNative();
+        }
+
+        @Override
+        public boolean open(int numChannels, int sampleRate, boolean useFloat, int numBuffers) {
+            if (super.open(numChannels, sampleRate, useFloat, numBuffers)) {
+                if (!mTrack.open(numChannels, sampleRate, useFloat, 2 /* numBuffers */)) {
+                    mTrack = null; // remove track
+                }
+                return true;
+            }
+            return false;
+        }
+
+        @Override
+        public void close() {
+            super.close();
+            if (mTrack != null) {
+                mTrack.close();
+            }
+        }
+
+        @Override
+        public boolean start() {
+            if (super.start()) {
+                if (mTrack != null) {
+                    mTrack.start();
+                }
+                return true;
+            }
+            return false;
+        }
+
+        @Override
+        public boolean stop() {
+            if (super.stop()) {
+                if (mTrack != null) {
+                    mTrack.stop(); // doesn't allow remaining data to play out
+                }
+                return true;
+            }
+            return false;
+        }
+
+        @Override
+        public int read(short[] audioData, int offsetInShorts, int sizeInShorts, int readFlags) {
+            int samples = super.read(audioData, offsetInShorts, sizeInShorts, readFlags);
+            if (mTrack != null) {
+                Assert.assertEquals(samples, mTrack.write(audioData, offsetInShorts, samples,
+                        AudioTrackNative.WRITE_FLAG_BLOCKING));
+                mPosition += samples / mTrack.getChannelCount();
+            }
+            return samples;
+        }
+
+        @Override
+        public int read(float[] audioData, int offsetInFloats, int sizeInFloats, int readFlags) {
+            int samples = super.read(audioData, offsetInFloats, sizeInFloats, readFlags);
+            if (mTrack != null) {
+                Assert.assertEquals(samples, mTrack.write(audioData, offsetInFloats, samples,
+                        AudioTrackNative.WRITE_FLAG_BLOCKING));
+                mPosition += samples / mTrack.getChannelCount();
+            }
+            return samples;
+        }
+
+        public AudioTrackNative mTrack;
+        private final static String TAG = "AudioRecordAuditNative";
+        private int mPosition;
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/AudioNativeTest.java b/tests/tests/media/src/android/media/cts/AudioNativeTest.java
new file mode 100644
index 0000000..b10da0c
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/AudioNativeTest.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.content.pm.PackageManager;
+import android.cts.util.CtsAndroidTestCase;
+import android.util.Log;
+
+public class AudioNativeTest extends CtsAndroidTestCase {
+    public void testAppendixBBufferQueue() {
+        nativeAppendixBBufferQueue();
+    }
+
+    public void testAppendixBRecording() {
+        // better to detect presence of microphone here.
+        if (!hasMicrophone()) {
+            return;
+        }
+        nativeAppendixBRecording();
+    }
+
+    public void testStereo16Playback() {
+        assertTrue(AudioTrackNative.test(
+                2 /* numChannels */, 48000 /* sampleRate */, false /* useFloat */,
+                20 /* msecPerBuffer */, 8 /* numBuffers */));
+    }
+
+    public void testStereo16Record() {
+        assertTrue(AudioRecordNative.test(
+                2 /* numChannels */, 48000 /* sampleRate */, false /* useFloat */,
+                20 /* msecPerBuffer */, 8 /* numBuffers */));
+    }
+
+    public void testPlayStreamData() throws Exception {
+        final String TEST_NAME = "testPlayStreamData";
+        final boolean TEST_FLOAT_ARRAY[] = {
+                false,
+                true,
+        };
+        // due to downmixer algorithmic latency, source channels greater than 2 may
+        // sound shorter in duration at 4kHz sampling rate.
+        final int TEST_SR_ARRAY[] = {
+                /* 4000, */ // below limit of OpenSL ES
+                12345, // irregular sampling rate
+                44100,
+                48000,
+                96000,
+                192000,
+        };
+        // OpenSL ES Bug: MNC does not support channel counts of 3, 5, 7.
+        final int TEST_CHANNELS_ARRAY[] = {
+                1,
+                2,
+                // 3,
+                4,
+                // 5,
+                6,
+                // 7,
+                // 8  // can fail due to memory issues
+        };
+        final float TEST_SWEEP = 0; // sine wave only
+        final int TEST_TIME_IN_MSEC = 300;
+        final int TOLERANCE_MSEC = 20;
+
+        for (boolean TEST_FLOAT : TEST_FLOAT_ARRAY) {
+            double frequency = 400; // frequency changes for each test
+            for (int TEST_SR : TEST_SR_ARRAY) {
+                for (int TEST_CHANNELS : TEST_CHANNELS_ARRAY) {
+                    // OpenSL ES BUG: we run out of AudioTrack memory for this config on MNC
+                    // Log.d(TEST_NAME, "open channels:" + TEST_CHANNELS + " sr:" + TEST_SR);
+                    if (TEST_FLOAT == true && TEST_CHANNELS >= 6 && TEST_SR >= 192000) {
+                        continue;
+                    }
+                    AudioTrackNative track = new AudioTrackNative();
+                    assertTrue(TEST_NAME,
+                            track.open(TEST_CHANNELS, TEST_SR, TEST_FLOAT, 1 /* numBuffers */));
+                    assertTrue(TEST_NAME, track.start());
+
+                    final int sourceSamples =
+                            (int)((long)TEST_SR * TEST_TIME_IN_MSEC * TEST_CHANNELS / 1000);
+                    final double testFrequency = frequency / TEST_CHANNELS;
+                    if (TEST_FLOAT) {
+                        float data[] = AudioHelper.createSoundDataInFloatArray(
+                                sourceSamples, TEST_SR,
+                                testFrequency, TEST_SWEEP);
+                        assertEquals(sourceSamples,
+                                track.write(data, 0 /* offset */, sourceSamples,
+                                        AudioTrackNative.WRITE_FLAG_BLOCKING));
+                    } else {
+                        short data[] = AudioHelper.createSoundDataInShortArray(
+                                sourceSamples, TEST_SR,
+                                testFrequency, TEST_SWEEP);
+                        assertEquals(sourceSamples,
+                                track.write(data, 0 /* offset */, sourceSamples,
+                                        AudioTrackNative.WRITE_FLAG_BLOCKING));
+                    }
+
+                    while (true) {
+                        // OpenSL ES BUG: getPositionInMsec returns 0 after a data underrun.
+
+                        long position = track.getPositionInMsec();
+                        //Log.d(TEST_NAME, "position: " + position[0]);
+                        if (position >= (long)(TEST_TIME_IN_MSEC - TOLERANCE_MSEC)) {
+                            break;
+                        }
+
+                        // It is safer to use a buffer count of 0 to determine termination
+                        if (track.getBuffersPending() == 0) {
+                            break;
+                        }
+                        Thread.sleep(5 /* millis */);
+                    }
+                    track.stop();
+                    track.close();
+                    Thread.sleep(40 /* millis */);  // put a gap in the tone sequence
+                    frequency += 50; // increment test tone frequency
+                }
+            }
+        }
+    }
+
+    public void testRecordStreamData() throws Exception {
+        final String TEST_NAME = "testRecordStreamData";
+        final boolean TEST_FLOAT_ARRAY[] = {
+                false,
+                true,
+        };
+        final int TEST_SR_ARRAY[] = {
+                //4000, // below limit of OpenSL ES
+                12345, // irregular sampling rate
+                44100,
+                48000,
+                96000,
+                192000,
+        };
+        final int TEST_CHANNELS_ARRAY[] = {
+                1,
+                2,
+                // 3,
+                4,
+                // 5,
+                6,
+                // 7,
+                8,
+        };
+        final int SEGMENT_DURATION_IN_MSEC = 20;
+        final int NUMBER_SEGMENTS = 10;
+
+        for (boolean TEST_FLOAT : TEST_FLOAT_ARRAY) {
+            for (int TEST_SR : TEST_SR_ARRAY) {
+                for (int TEST_CHANNELS : TEST_CHANNELS_ARRAY) {
+                    // OpenSL ES BUG: we run out of AudioTrack memory for this config on MNC
+                    if (TEST_FLOAT == true && TEST_CHANNELS >= 8 && TEST_SR >= 192000) {
+                        continue;
+                    }
+                    AudioRecordNative record = new AudioRecordNative();
+                    doRecordTest(record, TEST_CHANNELS, TEST_SR, TEST_FLOAT,
+                            SEGMENT_DURATION_IN_MSEC, NUMBER_SEGMENTS);
+                }
+            }
+        }
+    }
+
+    public void testRecordAudit() throws Exception {
+        AudioRecordNative record = new AudioHelper.AudioRecordAuditNative();
+        doRecordTest(record, 4 /* numChannels */, 44100 /* sampleRate */, false /* useFloat */,
+                1000 /* segmentDurationMs */, 10 /* numSegments */);
+    }
+
+    static {
+        System.loadLibrary("audio_jni");
+    }
+
+    private static final String TAG = "AudioNativeTest";
+
+    private void doRecordTest(AudioRecordNative record,
+            int numChannels, int sampleRate, boolean useFloat,
+            int segmentDurationMs, int numSegments) {
+        final String TEST_NAME = "doRecordTest";
+        try {
+            // Log.d(TEST_NAME, "open numChannels:" + numChannels + " sampleRate:" + sampleRate);
+            assertTrue(TEST_NAME, record.open(numChannels, sampleRate, useFloat,
+                    numSegments /* numBuffers */));
+            assertTrue(TEST_NAME, record.start());
+
+            final int sourceSamples =
+                    (int)((long)sampleRate * segmentDurationMs * numChannels / 1000);
+
+            if (useFloat) {
+                float data[] = new float[sourceSamples];
+                for (int i = 0; i < numSegments; ++i) {
+                    assertEquals(sourceSamples,
+                            record.read(data, 0 /* offset */, sourceSamples,
+                                    AudioRecordNative.READ_FLAG_BLOCKING));
+                }
+            } else {
+                short data[] = new short[sourceSamples];
+                for (int i = 0; i < numSegments; ++i) {
+                    assertEquals(sourceSamples,
+                            record.read(data, 0 /* offset */, sourceSamples,
+                                    AudioRecordNative.READ_FLAG_BLOCKING));
+                }
+            }
+            assertTrue(TEST_NAME, record.stop());
+        } finally {
+            record.close();
+        }
+    }
+
+    private boolean hasMicrophone() {
+        return getContext().getPackageManager().hasSystemFeature(
+                PackageManager.FEATURE_MICROPHONE);
+    }
+
+    private static native void nativeAppendixBBufferQueue();
+    private static native void nativeAppendixBRecording();
+}
diff --git a/tests/tests/media/src/android/media/cts/AudioRecordNative.java b/tests/tests/media/src/android/media/cts/AudioRecordNative.java
new file mode 100644
index 0000000..18df8ee
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/AudioRecordNative.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.cts.util.CtsAndroidTestCase;
+import android.util.Log;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+public class AudioRecordNative {
+    // Must be kept in sync with C++ JNI audio-record-native (AudioRecordNative) READ_FLAG_*
+    public static final int READ_FLAG_BLOCKING = 1 << 0;
+    /** @hide */
+    @IntDef(flag = true,
+            value = {
+                    READ_FLAG_BLOCKING,
+            })
+    @Retention(RetentionPolicy.SOURCE)
+    public @interface ReadFlags { }
+
+    public AudioRecordNative() {
+        mNativeRecordInJavaObj = nativeCreateRecord();
+    }
+
+    public boolean open(int numChannels, int sampleRate, boolean useFloat, int numBuffers) {
+        if (nativeOpen(mNativeRecordInJavaObj, numChannels, sampleRate, useFloat, numBuffers)
+                == STATUS_OK) {
+            mChannelCount = numChannels;
+            return true;
+        }
+        return false;
+    }
+
+    public void close() {
+        nativeClose(mNativeRecordInJavaObj);
+    }
+
+    public boolean start() {
+        return nativeStart(mNativeRecordInJavaObj) == STATUS_OK;
+    }
+
+    public boolean stop() {
+        return nativeStop(mNativeRecordInJavaObj) == STATUS_OK;
+    }
+
+    public boolean pause() {
+        return nativePause(mNativeRecordInJavaObj) == STATUS_OK;
+    }
+
+    public boolean flush() {
+        return nativeFlush(mNativeRecordInJavaObj) == STATUS_OK;
+    }
+
+    public long getPositionInMsec() {
+        long[] position = new long[1];
+        if (nativeGetPositionInMsec(mNativeRecordInJavaObj, position) != STATUS_OK) {
+            throw new IllegalStateException();
+        }
+        return position[0];
+    }
+
+    public int getBuffersPending() {
+        return nativeGetBuffersPending(mNativeRecordInJavaObj);
+    }
+
+    public int read(@NonNull byte[] byteArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags) {
+        return nativeReadByteArray(
+                mNativeRecordInJavaObj, byteArray, offsetInSamples, sizeInSamples, readFlags);
+    }
+
+    public int read(@NonNull short[] shortArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags) {
+        return nativeReadShortArray(
+                mNativeRecordInJavaObj, shortArray, offsetInSamples, sizeInSamples, readFlags);
+    }
+
+    public int read(@NonNull float[] floatArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags) {
+        return nativeReadFloatArray(
+                mNativeRecordInJavaObj, floatArray, offsetInSamples, sizeInSamples, readFlags);
+    }
+
+    public int getChannelCount() {
+        return mChannelCount;
+    }
+
+    public static boolean test(int numChannels, int sampleRate, boolean useFloat,
+            int msecPerBuffer, int numBuffers) {
+        return nativeTest(numChannels, sampleRate, useFloat, msecPerBuffer, numBuffers)
+                == STATUS_OK;
+    }
+
+    @Override
+    protected void finalize() {
+        nativeClose(mNativeRecordInJavaObj);
+        nativeDestroyRecord(mNativeRecordInJavaObj);
+    }
+
+    static {
+        System.loadLibrary("audio_jni");
+    }
+
+    private static final String TAG = "AudioRecordNative";
+    private int mChannelCount;
+    private final long mNativeRecordInJavaObj;
+    private static final int STATUS_OK = 0;
+
+    // static native API.
+    // The native API uses a long "record handle" created by nativeCreateRecord.
+    // The handle must be destroyed after use by nativeDestroyRecord.
+    //
+    // Return codes from the native layer are status_t.
+    // Converted to Java booleans or exceptions at the public API layer.
+    private static native long nativeCreateRecord();
+    private static native void nativeDestroyRecord(long record);
+    private static native int nativeOpen(
+            long record, int numChannels, int sampleRate, boolean useFloat, int numBuffers);
+    private static native void nativeClose(long record);
+    private static native int nativeStart(long record);
+    private static native int nativeStop(long record);
+    private static native int nativePause(long record);
+    private static native int nativeFlush(long record);
+    private static native int nativeGetPositionInMsec(long record, @NonNull long[] position);
+    private static native int nativeGetBuffersPending(long record);
+    private static native int nativeReadByteArray(long record, @NonNull byte[] byteArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags);
+    private static native int nativeReadShortArray(long record, @NonNull short[] shortArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags);
+    private static native int nativeReadFloatArray(long record, @NonNull float[] floatArray,
+            int offsetInSamples, int sizeInSamples, @ReadFlags int readFlags);
+
+    // native interface for all-in-one testing, no record handle required.
+    private static native int nativeTest(
+            int numChannels, int sampleRate, boolean useFloat, int msecPerBuffer, int numBuffers);
+}
diff --git a/tests/tests/media/src/android/media/cts/AudioTrackNative.java b/tests/tests/media/src/android/media/cts/AudioTrackNative.java
new file mode 100644
index 0000000..1ce44ef
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/AudioTrackNative.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.cts.util.CtsAndroidTestCase;
+import android.util.Log;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+public class AudioTrackNative {
+    // Must be kept in sync with C++ JNI audio-track-native (AudioTrackNative) WRITE_FLAG_*
+    public static final int WRITE_FLAG_BLOCKING = 1 << 0;
+    /** @hide */
+    @IntDef(flag = true,
+            value = {
+                    WRITE_FLAG_BLOCKING,
+            })
+    @Retention(RetentionPolicy.SOURCE)
+    public @interface WriteFlags { }
+
+    public AudioTrackNative() {
+        mNativeTrackInJavaObj = nativeCreateTrack();
+    }
+
+    // TODO: eventually accept AudioFormat
+    // numBuffers is the number of internal buffers before hitting the OpenSL ES.
+    // A value of 0 means that all writes are blocking.
+    public boolean open(int numChannels, int sampleRate, boolean useFloat, int numBuffers) {
+        if (nativeOpen(mNativeTrackInJavaObj, numChannels, sampleRate, useFloat, numBuffers)
+                == STATUS_OK) {
+            mChannelCount = numChannels;
+            return true;
+        }
+        return false;
+    }
+
+    public void close() {
+        nativeClose(mNativeTrackInJavaObj);
+    }
+
+    public boolean start() {
+        return nativeStart(mNativeTrackInJavaObj) == STATUS_OK;
+    }
+
+    public boolean stop() {
+        return nativeStop(mNativeTrackInJavaObj) == STATUS_OK;
+    }
+
+    public boolean pause() {
+        return nativePause(mNativeTrackInJavaObj) == STATUS_OK;
+    }
+
+    public boolean flush() {
+        return nativeFlush(mNativeTrackInJavaObj) == STATUS_OK;
+    }
+
+    public long getPositionInMsec() {
+        long[] position = new long[1];
+        if (nativeGetPositionInMsec(mNativeTrackInJavaObj, position) != STATUS_OK) {
+            throw new IllegalStateException();
+        }
+        return position[0];
+    }
+
+    public int getBuffersPending() {
+        return nativeGetBuffersPending(mNativeTrackInJavaObj);
+    }
+
+    /* returns number of samples written.
+     * 0 may be returned if !isBlocking.
+     * negative value indicates error.
+     */
+    public int write(@NonNull byte[] byteArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags) {
+        return nativeWriteByteArray(
+                mNativeTrackInJavaObj, byteArray, offsetInSamples, sizeInSamples, writeFlags);
+    }
+
+    public int write(@NonNull short[] shortArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags) {
+        return nativeWriteShortArray(
+                mNativeTrackInJavaObj, shortArray, offsetInSamples, sizeInSamples, writeFlags);
+    }
+
+    public int write(@NonNull float[] floatArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags) {
+        return nativeWriteFloatArray(
+                mNativeTrackInJavaObj, floatArray, offsetInSamples, sizeInSamples, writeFlags);
+    }
+
+    public int getChannelCount() {
+        return mChannelCount;
+    }
+
+    public static boolean test(int numChannels, int sampleRate, boolean useFloat,
+            int msecPerBuffer, int numBuffers) {
+        return nativeTest(numChannels, sampleRate, useFloat, msecPerBuffer, numBuffers)
+                == STATUS_OK;
+    }
+
+    @Override
+    protected void finalize() {
+        nativeClose(mNativeTrackInJavaObj);
+        nativeDestroyTrack(mNativeTrackInJavaObj);
+    }
+
+    static {
+        System.loadLibrary("audio_jni");
+    }
+
+    private static final String TAG = "AudioTrackNative";
+    private int mChannelCount;
+    private final long mNativeTrackInJavaObj;
+    private static final int STATUS_OK = 0;
+
+    // static native API.
+    // The native API uses a long "track handle" created by nativeCreateTrack.
+    // The handle must be destroyed after use by nativeDestroyTrack.
+    //
+    // Return codes from the native layer are status_t.
+    // Converted to Java booleans or exceptions at the public API layer.
+    private static native long nativeCreateTrack();
+    private static native void nativeDestroyTrack(long track);
+    private static native int nativeOpen(
+            long track, int numChannels, int sampleRate, boolean useFloat, int numBuffers);
+    private static native void nativeClose(long track);
+    private static native int nativeStart(long track);
+    private static native int nativeStop(long track);
+    private static native int nativePause(long track);
+    private static native int nativeFlush(long track);
+    private static native int nativeGetPositionInMsec(long track, @NonNull long[] position);
+    private static native int nativeGetBuffersPending(long track);
+    private static native int nativeWriteByteArray(long track, @NonNull byte[] byteArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags);
+    private static native int nativeWriteShortArray(long track, @NonNull short[] shortArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags);
+    private static native int nativeWriteFloatArray(long track, @NonNull float[] floatArray,
+            int offsetInSamples, int sizeInSamples, @WriteFlags int writeFlags);
+
+    // native interface for all-in-one testing, no track handle required.
+    private static native int nativeTest(
+            int numChannels, int sampleRate, boolean useFloat, int msecPerBuffer, int numBuffers);
+}
diff --git a/tests/tests/telephony/src/android/telephony/cts/CellInfoTest.java b/tests/tests/telephony/src/android/telephony/cts/CellInfoTest.java
index 5b88525..9a93a60 100644
--- a/tests/tests/telephony/src/android/telephony/cts/CellInfoTest.java
+++ b/tests/tests/telephony/src/android/telephony/cts/CellInfoTest.java
@@ -18,7 +18,9 @@
 import android.content.Context;
 import android.net.ConnectivityManager;
 import android.telephony.CellInfo;
-import android.telephony.PhoneStateListener;
+import android.telephony.CellInfoGsm;
+import android.telephony.CellInfoLte;
+import android.telephony.CellInfoWcdma;
 import android.telephony.TelephonyManager;
 import android.test.AndroidTestCase;
 import android.util.Log;
@@ -36,6 +38,9 @@
     private TelephonyManager mTelephonyManager;
     private static ConnectivityManager mCm;
     private static final String TAG = "android.telephony.cts.CellInfoTest";
+    // Maximum and minimum possible RSSI values(in dbm).
+    private static final int MAX_RRSI = -10;
+    private static final int MIN_RSSI = -150;
 
     @Override
     protected void setUp() throws Exception {
@@ -57,5 +62,66 @@
         assertNotNull("TelephonyManager.getAllCellInfo() returned NULL!", allCellInfo);
         assertTrue("TelephonyManager.getAllCellInfo() returned zero-length list!",
             allCellInfo.size() > 0);
+
+        int numRegisteredCells = 0;
+        for (CellInfo cellInfo : allCellInfo) {
+            if (cellInfo.isRegistered()) {
+                ++numRegisteredCells;
+            }
+            if (cellInfo instanceof CellInfoLte) {
+                verifyLteInfo((CellInfoLte) cellInfo);
+            } else if (cellInfo instanceof CellInfoWcdma) {
+                verifyWcdmaInfo((CellInfoWcdma) cellInfo);
+            } else if (cellInfo instanceof CellInfoGsm) {
+                verifyGsmInfo((CellInfoGsm) cellInfo);
+            }
+        }
+        // At most two cells could be registered.
+        assertTrue("None or too many registered cells : " + numRegisteredCells,
+                numRegisteredCells > 0 && numRegisteredCells <= 2);
+    }
+
+    // Verify lte cell information is within correct range.
+    private void verifyLteInfo(CellInfoLte lte) {
+        verifyRssiDbm(lte.getCellSignalStrength().getDbm());
+        // Verify LTE neighbor information.
+        if (!lte.isRegistered()) {
+            // Only physical cell id is available for LTE neighbor.
+            int pci = lte.getCellIdentity().getPci();
+            // Physical cell id should be within [0, 503].
+            assertTrue("getPci() out of range [0, 503]", pci >= 0 && pci <= 503);
+        }
+    }
+
+    // Verify wcdma cell information is within correct range.
+    private void verifyWcdmaInfo(CellInfoWcdma wcdma) {
+        verifyRssiDbm(wcdma.getCellSignalStrength().getDbm());
+        // Verify wcdma neighbor.
+        if (!wcdma.isRegistered()) {
+            // For wcdma neighbor, only primary scrambling code is available.
+            // Primary scrambling code should be within [0, 511].
+            int psc = wcdma.getCellIdentity().getPsc();
+            assertTrue("getPsc() out of range [0, 511]", psc >= 0 && psc <= 511);
+        }
+    }
+
+    // Verify gsm cell information is within correct range.
+    private void verifyGsmInfo(CellInfoGsm gsm) {
+        verifyRssiDbm(gsm.getCellSignalStrength().getDbm());
+        // Verify gsm neighbor.
+        if (!gsm.isRegistered()) {
+            // lac and cid are available in GSM neighbor information.
+            // Local area code and cellid should be with [0, 65535].
+            int lac = gsm.getCellIdentity().getLac();
+            assertTrue("getLac() out of range [0, 65535]", lac >= 0 && lac <= 65535);
+            int cid = gsm.getCellIdentity().getCid();
+            assertTrue("getCid() out range [0, 65535]", cid >= 0 && cid <= 65535);
+        }
+    }
+
+    // Rssi(in dbm) should be within [MIN_RSSI, MAX_RSSI].
+    private void verifyRssiDbm(int dbm) {
+        assertTrue("getCellSignalStrength().getDbm() out of range",
+                dbm >= MIN_RSSI && dbm <= MAX_RRSI);
     }
 }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiClass.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiClass.java
index 31e1f8d..f5abd5e5 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiClass.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiClass.java
@@ -34,10 +34,25 @@
 
     private final List<ApiMethod> mApiMethods = new ArrayList<ApiMethod>();
 
-    ApiClass(String name, boolean deprecated, boolean classAbstract) {
+    private final String mSuperClassName;
+
+    private ApiClass mSuperClass;
+
+    /**
+     * @param name The name of the class
+     * @param deprecated true iff the class is marked as deprecated
+     * @param classAbstract true iff the class is abstract
+     * @param superClassName The fully qualified name of the super class
+     */
+    ApiClass(
+            String name,
+            boolean deprecated,
+            boolean classAbstract,
+            String superClassName) {
         mName = name;
         mDeprecated = deprecated;
         mAbstract = classAbstract;
+        mSuperClassName = superClassName;
     }
 
     @Override
@@ -54,22 +69,20 @@
         return mDeprecated;
     }
 
+    public String getSuperClassName() {
+        return mSuperClassName;
+    }
+
     public boolean isAbstract() {
         return mAbstract;
     }
 
+    public void setSuperClass(ApiClass superClass) { mSuperClass = superClass; }
+
     public void addConstructor(ApiConstructor constructor) {
         mApiConstructors.add(constructor);
     }
 
-    public ApiConstructor getConstructor(List<String> parameterTypes) {
-        for (ApiConstructor constructor : mApiConstructors) {
-            if (parameterTypes.equals(constructor.getParameterTypes())) {
-                return constructor;
-            }
-        }
-        return null;
-    }
 
     public Collection<ApiConstructor> getConstructors() {
         return Collections.unmodifiableList(mApiConstructors);
@@ -79,15 +92,29 @@
         mApiMethods.add(method);
     }
 
-    public ApiMethod getMethod(String name, List<String> parameterTypes, String returnType) {
-        for (ApiMethod method : mApiMethods) {
-            if (name.equals(method.getName())
-                    && parameterTypes.equals(method.getParameterTypes())
-                    && returnType.equals(method.getReturnType())) {
-                return method;
-            }
+    /** Look for a matching constructor and mark it as covered */
+    public void markConstructorCovered(List<String> parameterTypes) {
+        if (mSuperClass != null) {
+            // Mark matching constructors in the superclass
+            mSuperClass.markConstructorCovered(parameterTypes);
         }
-        return null;
+        ApiConstructor apiConstructor = getConstructor(parameterTypes);
+        if (apiConstructor != null) {
+            apiConstructor.setCovered(true);
+        }
+
+    }
+
+    /** Look for a matching method and if found and mark it as covered */
+    public void markMethodCovered(String name, List<String> parameterTypes, String returnType) {
+        if (mSuperClass != null) {
+            // Mark matching methods in the super class
+            mSuperClass.markMethodCovered(name, parameterTypes, returnType);
+        }
+        ApiMethod apiMethod = getMethod(name, parameterTypes, returnType);
+        if (apiMethod != null) {
+            apiMethod.setCovered(true);
+        }
     }
 
     public Collection<ApiMethod> getMethods() {
@@ -126,4 +153,24 @@
     public int getMemberSize() {
         return getTotalMethods();
     }
+
+    private ApiMethod getMethod(String name, List<String> parameterTypes, String returnType) {
+        for (ApiMethod method : mApiMethods) {
+            if (name.equals(method.getName())
+                    && parameterTypes.equals(method.getParameterTypes())
+                    && returnType.equals(method.getReturnType())) {
+                return method;
+            }
+        }
+        return null;
+    }
+
+    private ApiConstructor getConstructor(List<String> parameterTypes) {
+        for (ApiConstructor constructor : mApiConstructors) {
+            if (parameterTypes.equals(constructor.getParameterTypes())) {
+                return constructor;
+            }
+        }
+        return null;
+    }
 }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiCoverage.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiCoverage.java
index adf2ea9..953aab3 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiCoverage.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiCoverage.java
@@ -39,10 +39,11 @@
         return Collections.unmodifiableCollection(mPackages.values());
     }
 
-    public void removeEmptyAbstractClasses() {
+    /** Iterate through all packages and update all classes to include its superclass */
+    public void resolveSuperClasses() {
         for (Map.Entry<String, ApiPackage> entry : mPackages.entrySet()) {
             ApiPackage pkg = entry.getValue();
-            pkg.removeEmptyAbstractClasses();
+            pkg.resolveSuperClasses(mPackages);
         }
     }
 }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiMethod.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiMethod.java
index 053cd12..582c2b6 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiMethod.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiMethod.java
@@ -29,15 +29,35 @@
 
     private final String mReturnType;
 
-    private boolean mDeprecated;
+    private final boolean mDeprecated;
+
+    private final String mVisibility;
+
+    private final boolean mStaticMethod;
+
+    private final boolean mFinalMethod;
+
+    private final boolean mAbstractMethod;
 
     private boolean mIsCovered;
 
-    ApiMethod(String name, List<String> parameterTypes, String returnType, boolean deprecated) {
+    ApiMethod(
+            String name,
+            List<String> parameterTypes,
+            String returnType,
+            boolean deprecated,
+            String visibility,
+            boolean staticMethod,
+            boolean finalMethod,
+            boolean abstractMethod) {
         mName = name;
         mParameterTypes = new ArrayList<String>(parameterTypes);
         mReturnType = returnType;
         mDeprecated = deprecated;
+        mVisibility = visibility;
+        mStaticMethod = staticMethod;
+        mFinalMethod = finalMethod;
+        mAbstractMethod = abstractMethod;
     }
 
     @Override
@@ -65,6 +85,14 @@
         return mIsCovered;
     }
 
+    public String getVisibility() { return mVisibility; }
+
+    public boolean isAbstractMethod() { return mAbstractMethod; }
+
+    public boolean isStaticMethod() { return mStaticMethod; }
+
+    public boolean isFinalMethod() { return mFinalMethod; }
+
     public void setCovered(boolean covered) {
         mIsCovered = covered;
     }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiPackage.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiPackage.java
index e0bf73f..7be7e3c 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiPackage.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/ApiPackage.java
@@ -77,14 +77,26 @@
         return getTotalMethods();
     }
 
-    public void removeEmptyAbstractClasses() {
+    /** Iterate through all classes and add superclass. */
+    public void resolveSuperClasses(Map<String, ApiPackage> packageMap) {
         Iterator<Entry<String, ApiClass>> it = mApiClassMap.entrySet().iterator();
         while (it.hasNext()) {
             Map.Entry<String, ApiClass> entry = it.next();
-            ApiClass cls = entry.getValue();
-            if (cls.isAbstract() && (cls.getTotalMethods() == 0)) {
-                // this is essentially interface
-                it.remove();
+            ApiClass apiClass = entry.getValue();
+            if (apiClass.getSuperClassName() != null) {
+                String superClassName = apiClass.getSuperClassName();
+                // Split the fully qualified class name into package and class name.
+                String packageName = superClassName.substring(0, superClassName.lastIndexOf('.'));
+                String className = superClassName.substring(
+                        superClassName.lastIndexOf('.') + 1, superClassName.length());
+                if (packageMap.containsKey(packageName)) {
+                    ApiPackage apiPackage = packageMap.get(packageName);
+                    ApiClass superClass = apiPackage.getClass(className);
+                    if (superClass != null) {
+                        // Add the super class
+                        apiClass.setSuperClass(superClass);
+                    }
+                }
             }
         }
     }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/CtsApiCoverage.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/CtsApiCoverage.java
index 05cb4e19..3f2f353 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/CtsApiCoverage.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/CtsApiCoverage.java
@@ -117,7 +117,8 @@
          */
 
         ApiCoverage apiCoverage = getEmptyApiCoverage(apiXmlPath);
-        apiCoverage.removeEmptyAbstractClasses();
+        // Add superclass information into api coverage.
+        apiCoverage.resolveSuperClasses();
         for (File testApk : testApks) {
             addApiCoverage(apiCoverage, testApk, dexDeps);
         }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/CurrentXmlHandler.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/CurrentXmlHandler.java
index b9f9e9c..de9f5d5 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/CurrentXmlHandler.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/CurrentXmlHandler.java
@@ -40,6 +40,12 @@
 
     private boolean mCurrentMethodIsAbstract;
 
+    private String mCurrentMethodVisibility;
+
+    private boolean mCurrentMethodStaticMethod;
+
+    private boolean mCurrentMethodFinalMethod;
+
     private boolean mDeprecated;
 
 
@@ -69,7 +75,9 @@
             mIgnoreCurrentClass = false;
             mCurrentClassName = getValue(attributes, "name");
             mDeprecated = isDeprecated(attributes);
-            ApiClass apiClass = new ApiClass(mCurrentClassName, mDeprecated, isAbstract(attributes));
+            String superClass = attributes.getValue("extends");
+            ApiClass apiClass = new ApiClass(
+                    mCurrentClassName, mDeprecated, is(attributes, "abstract"), superClass);
             ApiPackage apiPackage = mApiCoverage.getPackage(mCurrentPackageName);
             apiPackage.addClass(apiClass);
         } else if ("interface".equalsIgnoreCase(localName)) {
@@ -82,7 +90,10 @@
             mDeprecated = isDeprecated(attributes);
             mCurrentMethodName = getValue(attributes, "name");
             mCurrentMethodReturnType = getValue(attributes, "return");
-            mCurrentMethodIsAbstract = isAbstract(attributes);
+            mCurrentMethodIsAbstract = is(attributes, "abstract");
+            mCurrentMethodVisibility = getValue(attributes, "visibility");
+            mCurrentMethodStaticMethod = is(attributes, "static");
+            mCurrentMethodFinalMethod = is(attributes, "final");
             mCurrentParameterTypes.clear();
         } else if ("parameter".equalsIgnoreCase(localName)) {
             mCurrentParameterTypes.add(getValue(attributes, "type"));
@@ -107,11 +118,15 @@
             ApiClass apiClass = apiPackage.getClass(mCurrentClassName);
             apiClass.addConstructor(apiConstructor);
         }  else if ("method".equalsIgnoreCase(localName)) {
-            if (mCurrentMethodIsAbstract) { // do not add abstract method
-                return;
-            }
-            ApiMethod apiMethod = new ApiMethod(mCurrentMethodName, mCurrentParameterTypes,
-                    mCurrentMethodReturnType, mDeprecated);
+            ApiMethod apiMethod = new ApiMethod(
+                    mCurrentMethodName,
+                    mCurrentParameterTypes,
+                    mCurrentMethodReturnType,
+                    mDeprecated,
+                    mCurrentMethodVisibility,
+                    mCurrentMethodStaticMethod,
+                    mCurrentMethodFinalMethod,
+                    mCurrentMethodIsAbstract);
             ApiPackage apiPackage = mApiCoverage.getPackage(mCurrentPackageName);
             ApiClass apiClass = apiPackage.getClass(mCurrentClassName);
             apiClass.addMethod(apiMethod);
@@ -129,8 +144,8 @@
         return "deprecated".equals(attributes.getValue("deprecated"));
     }
 
-    private boolean isAbstract(Attributes attributes) {
-        return "true".equals(attributes.getValue("abstract"));
+    private static boolean is(Attributes attributes, String valueName) {
+        return "true".equals(attributes.getValue(valueName));
     }
 
     private boolean isEnum(Attributes attributes) {
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/DexDepsXmlHandler.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/DexDepsXmlHandler.java
index 0a90bdd..3df532e 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/DexDepsXmlHandler.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/DexDepsXmlHandler.java
@@ -73,10 +73,7 @@
             if (apiPackage != null) {
                 ApiClass apiClass = apiPackage.getClass(mCurrentClassName);
                 if (apiClass != null) {
-                    ApiConstructor apiConstructor = apiClass.getConstructor(mCurrentParameterTypes);
-                    if (apiConstructor != null) {
-                        apiConstructor.setCovered(true);
-                    }
+                    apiClass.markConstructorCovered(mCurrentParameterTypes);
                 }
             }
         }  else if ("method".equalsIgnoreCase(localName)) {
@@ -84,11 +81,8 @@
             if (apiPackage != null) {
                 ApiClass apiClass = apiPackage.getClass(mCurrentClassName);
                 if (apiClass != null) {
-                    ApiMethod apiMethod = apiClass.getMethod(mCurrentMethodName,
-                            mCurrentParameterTypes, mCurrentMethodReturnType);
-                    if (apiMethod != null) {
-                        apiMethod.setCovered(true);
-                    }
+                    apiClass.markMethodCovered(
+                            mCurrentMethodName, mCurrentParameterTypes, mCurrentMethodReturnType);
                 }
             }
         }
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/TextReport.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/TextReport.java
index e3e2e7c..3adc020 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/TextReport.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/TextReport.java
@@ -103,7 +103,18 @@
     private static void printMethod(ApiMethod method, PrintStream out) {
         StringBuilder builder = new StringBuilder("    [")
                 .append(method.isCovered() ? "X" : " ")
-                .append("] ").append(method.getReturnType()).append(" ")
+                .append("] ")
+                .append(method.getVisibility()).append(" ");
+        if (method.isAbstractMethod()) {
+            builder.append("abstract ");
+        }
+        if (method.isStaticMethod()) {
+            builder.append("static ");
+        }
+        if (method.isFinalMethod()) {
+            builder.append("final ");
+        }
+        builder.append(method.getReturnType()).append(" ")
                 .append(method.getName()).append("(");
         List<String> parameterTypes = method.getParameterTypes();
         int numParameterTypes = parameterTypes.size();
diff --git a/tools/cts-api-coverage/src/com/android/cts/apicoverage/XmlReport.java b/tools/cts-api-coverage/src/com/android/cts/apicoverage/XmlReport.java
index 570b316..4310d20 100644
--- a/tools/cts-api-coverage/src/com/android/cts/apicoverage/XmlReport.java
+++ b/tools/cts-api-coverage/src/com/android/cts/apicoverage/XmlReport.java
@@ -66,7 +66,7 @@
                         + "\" numCovered=\"" + pkgTotalCovered
                         + "\" numTotal=\"" + pkgTotal
                         + "\" coveragePercentage=\""
-                            + Math.round(pkg.getCoveragePercentage())
+                        + Math.round(pkg.getCoveragePercentage())
                         + "\">");
 
                 List<ApiClass> classes = new ArrayList<ApiClass>(pkg.getClasses());
@@ -103,6 +103,10 @@
                             out.println("<method name=\"" + method.getName()
                                     + "\" returnType=\"" + method.getReturnType()
                                     + "\" deprecated=\"" + method.isDeprecated()
+                                    + "\" static=\"" + method.isStaticMethod()
+                                    + "\" final=\"" + method.isFinalMethod()
+                                    + "\" visibility=\"" + method.getVisibility()
+                                    + "\" abstract=\"" + method.isAbstractMethod()
                                     + "\" covered=\"" + method.isCovered() + "\">");
                             if (method.isDeprecated()) {
                                 if (method.isCovered()) {
diff --git a/tools/cts-api-coverage/src/res/api-coverage.xsl b/tools/cts-api-coverage/src/res/api-coverage.xsl
index b11a8c4..1a56eb0 100644
--- a/tools/cts-api-coverage/src/res/api-coverage.xsl
+++ b/tools/cts-api-coverage/src/res/api-coverage.xsl
@@ -101,14 +101,17 @@
                     <xsl:for-each select="api-coverage/api/package">
                         <xsl:call-template name="packageOrClassListItem">
                             <xsl:with-param name="bulletClass" select="'package'" />
+                            <xsl:with-param name="toggleId" select="@name" />
                         </xsl:call-template>
                         <div class="packageDetails" id="{@name}" style="display: none">
                             <ul>
                                 <xsl:for-each select="class">
+                                    <xsl:variable name="packageClassId" select="concat(../@name, '.', @name)"/>
                                     <xsl:call-template name="packageOrClassListItem">
                                         <xsl:with-param name="bulletClass" select="'class'" />
+                                        <xsl:with-param name="toggleId" select="$packageClassId" />
                                     </xsl:call-template>
-                                    <div class="classDetails" id="{@name}" style="display: none">
+                                    <div class="classDetails" id="{$packageClassId}" style="display: none">
                                         <xsl:for-each select="constructor">
                                             <xsl:call-template name="methodListItem" />
                                         </xsl:for-each>
@@ -124,9 +127,10 @@
             </body>
         </html>
     </xsl:template>
-    
+
     <xsl:template name="packageOrClassListItem">
         <xsl:param name="bulletClass" />
+        <xsl:param name="toggleId"/>
 
         <xsl:variable name="colorClass">
             <xsl:choose>
@@ -135,7 +139,7 @@
                 <xsl:otherwise>green</xsl:otherwise>
             </xsl:choose>
         </xsl:variable>
-        
+
         <xsl:variable name="deprecatedClass">
             <xsl:choose>
                 <xsl:when test="@deprecated = 'true'">deprecated</xsl:when>
@@ -143,15 +147,15 @@
             </xsl:choose>
         </xsl:variable>
 
-        <li class="{$bulletClass}" onclick="toggleVisibility('{@name}')">
+        <li class="{$bulletClass}" onclick="toggleVisibility('{$toggleId}')">
             <span class="{$colorClass} {$deprecatedClass}">
                 <b><xsl:value-of select="@name" /></b>
                 &nbsp;<xsl:value-of select="@coveragePercentage" />%
                 &nbsp;(<xsl:value-of select="@numCovered" />/<xsl:value-of select="@numTotal" />)
             </span>
-        </li>   
+        </li>
     </xsl:template>
-  
+
   <xsl:template name="methodListItem">
 
     <xsl:variable name="deprecatedClass">
@@ -166,6 +170,10 @@
         <xsl:when test="@covered = 'true'">[X]</xsl:when>
         <xsl:otherwise>[ ]</xsl:otherwise>
       </xsl:choose>
+      <xsl:if test="@visibility != ''">&nbsp;<xsl:value-of select="@visibility" /></xsl:if>
+      <xsl:if test="@abstract = 'true'">&nbsp;abstract</xsl:if>
+      <xsl:if test="@static = 'true'">&nbsp;static</xsl:if>
+      <xsl:if test="@final = 'true'">&nbsp;final</xsl:if>
       <xsl:if test="@returnType != ''">&nbsp;<xsl:value-of select="@returnType" /></xsl:if>
       <b>&nbsp;<xsl:value-of select="@name" /></b><xsl:call-template name="formatParameters" />
     </span>