Tunneled Video Playback CTS Tests

Added CTS tests to DecoderTest.java to test Tunneled video playback.

Bug: 17883772

Change-Id: I31e30bf3f3808e742996bb370e9522f97cebb5ba
diff --git a/tests/tests/media/src/android/media/cts/CodecState.java b/tests/tests/media/src/android/media/cts/CodecState.java
index cd6b68f..8f62227 100644
--- a/tests/tests/media/src/android/media/cts/CodecState.java
+++ b/tests/tests/media/src/android/media/cts/CodecState.java
@@ -33,6 +33,9 @@
 
     private boolean mSawInputEOS, mSawOutputEOS;
     private boolean mLimitQueueDepth;
+    private boolean mTunneled;
+    private boolean mIsAudio;
+    private int mAudioSessionId;
     private ByteBuffer[] mCodecInputBuffers;
     private ByteBuffer[] mCodecOutputBuffers;
     private int mTrackIndex;
@@ -40,8 +43,9 @@
     private LinkedList<Integer> mAvailableOutputBufferIndices;
     private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
     private long mPresentationTimeUs;
+    private long mSampleBaseTimeUs;
     private MediaCodec mCodec;
-    private MediaCodecCencPlayer mMediaCodecPlayer;
+    private MediaTimeProvider mMediaTimeProvider;
     private MediaExtractor mExtractor;
     private MediaFormat mFormat;
     private MediaFormat mOutputFormat;
@@ -51,19 +55,23 @@
      * Manages audio and video playback using MediaCodec and AudioTrack.
      */
     public CodecState(
-            MediaCodecCencPlayer mediaCodecPlayer,
+            MediaTimeProvider mediaTimeProvider,
             MediaExtractor extractor,
             int trackIndex,
             MediaFormat format,
             MediaCodec codec,
-            boolean limitQueueDepth) {
-
-        mMediaCodecPlayer = mediaCodecPlayer;
+            boolean limitQueueDepth,
+            boolean tunneled,
+            int audioSessionId) {
+        mMediaTimeProvider = mediaTimeProvider;
         mExtractor = extractor;
         mTrackIndex = trackIndex;
         mFormat = format;
         mSawInputEOS = mSawOutputEOS = false;
         mLimitQueueDepth = limitQueueDepth;
+        mTunneled = tunneled;
+        mAudioSessionId = audioSessionId;
+        mSampleBaseTimeUs = -1;
 
         mCodec = codec;
 
@@ -72,6 +80,10 @@
         mAvailableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
 
         mPresentationTimeUs = 0;
+
+        String mime = mFormat.getString(MediaFormat.KEY_MIME);
+        Log.d(TAG, "CodecState::onOutputFormatChanged " + mime);
+        mIsAudio = mime.startsWith("audio/");
     }
 
     public void release() {
@@ -100,7 +112,9 @@
     public void start() {
         mCodec.start();
         mCodecInputBuffers = mCodec.getInputBuffers();
-        mCodecOutputBuffers = mCodec.getOutputBuffers();
+        if (!mTunneled || mIsAudio) {
+            mCodecOutputBuffers = mCodec.getOutputBuffers();
+        }
 
         if (mAudioTrack != null) {
             mAudioTrack.play();
@@ -119,15 +133,17 @@
 
     public void flush() {
         mAvailableInputBufferIndices.clear();
-        mAvailableOutputBufferIndices.clear();
-        mAvailableOutputBufferInfos.clear();
+        if (!mTunneled || mIsAudio) {
+            mAvailableOutputBufferIndices.clear();
+            mAvailableOutputBufferInfos.clear();
+        }
 
         mSawInputEOS = false;
         mSawOutputEOS = false;
 
         if (mAudioTrack != null
-                && mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED) {
-            mAudioTrack.play();
+                && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+            mAudioTrack.flush();
         }
 
         mCodec.flush();
@@ -153,20 +169,22 @@
         while (feedInputBuffer()) {
         }
 
-        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
-        int indexOutput = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
+        if (mIsAudio || !mTunneled) {
+            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+            int indexOutput = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
 
-        if (indexOutput == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
-            mOutputFormat = mCodec.getOutputFormat();
-            onOutputFormatChanged();
-        } else if (indexOutput == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
-            mCodecOutputBuffers = mCodec.getOutputBuffers();
-        } else if (indexOutput != MediaCodec.INFO_TRY_AGAIN_LATER) {
-            mAvailableOutputBufferIndices.add(indexOutput);
-            mAvailableOutputBufferInfos.add(info);
-        }
+            if (indexOutput == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+                mOutputFormat = mCodec.getOutputFormat();
+                onOutputFormatChanged();
+            } else if (indexOutput == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+                mCodecOutputBuffers = mCodec.getOutputBuffers();
+            } else if (indexOutput != MediaCodec.INFO_TRY_AGAIN_LATER) {
+                mAvailableOutputBufferIndices.add(indexOutput);
+                mAvailableOutputBufferInfos.add(info);
+            }
 
-        while (drainOutputBuffer()) {
+            while (drainOutputBuffer()) {
+            }
         }
     }
 
@@ -200,9 +218,24 @@
                 Log.d(TAG, "sampleSize: " + sampleSize + " trackIndex:" + trackIndex +
                         " sampleTime:" + sampleTime + " sampleFlags:" + sampleFlags);
                 mSawInputEOS = true;
+                // FIX-ME: in tunneled mode we currently use input EOS as output EOS indicator
+                // we should stream duration
+                if (mTunneled && !mIsAudio) {
+                    mSawOutputEOS = true;
+                }
                 return false;
             }
 
+            if (mTunneled && !mIsAudio) {
+                if (mSampleBaseTimeUs == -1) {
+                    mSampleBaseTimeUs = sampleTime;
+                }
+                sampleTime -= mSampleBaseTimeUs;
+                // FIX-ME: in tunneled mode we currently use input buffer time
+                // as video presentation time. This is not accurate and should be fixed
+                mPresentationTimeUs = sampleTime;
+            }
+
             if ((sampleFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
                 MediaCodec.CryptoInfo info = new MediaCodec.CryptoInfo();
                 mExtractor.getSampleCryptoInfo(info);
@@ -255,7 +288,8 @@
                     sampleRate < 8000 || sampleRate > 128000) {
                 return;
             }
-            mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount);
+            mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount,
+                                    mTunneled, mAudioSessionId);
             mAudioTrack.play();
         }
 
@@ -285,9 +319,9 @@
         }
 
         long realTimeUs =
-            mMediaCodecPlayer.getRealTimeUsForMediaTime(info.presentationTimeUs);
+            mMediaTimeProvider.getRealTimeUsForMediaTime(info.presentationTimeUs);
 
-        long nowUs = mMediaCodecPlayer.getNowUs();
+        long nowUs = mMediaTimeProvider.getNowUs();
 
         long lateUs = nowUs - realTimeUs;
 
diff --git a/tests/tests/media/src/android/media/cts/DecoderTest.java b/tests/tests/media/src/android/media/cts/DecoderTest.java
index d79baf2..c0d1beb 100644
--- a/tests/tests/media/src/android/media/cts/DecoderTest.java
+++ b/tests/tests/media/src/android/media/cts/DecoderTest.java
@@ -25,12 +25,15 @@
 import android.cts.util.MediaUtils;
 import android.graphics.ImageFormat;
 import android.media.Image;
+import android.media.AudioManager;
 import android.media.MediaCodec;
+import android.media.MediaCodecList;
 import android.media.MediaCodecInfo;
 import android.media.MediaExtractor;
 import android.media.MediaFormat;
 import android.util.Log;
 import android.view.Surface;
+import android.net.Uri;
 
 import java.io.BufferedInputStream;
 import java.io.IOException;
@@ -40,6 +43,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.zip.CRC32;
+import java.util.concurrent.TimeUnit;
 
 public class DecoderTest extends MediaPlayerTestBase {
     private static final String TAG = "DecoderTest";
@@ -57,6 +61,24 @@
     private Resources mResources;
     short[] mMasterBuffer;
 
+    private MediaCodecTunneledPlayer mMediaCodecPlayer;
+    private static final int SLEEP_TIME_MS = 1000;
+    private static final long PLAY_TIME_MS = TimeUnit.MILLISECONDS.convert(1, TimeUnit.MINUTES);
+    private static final Uri AUDIO_URL = Uri.parse(
+            "http://redirector.c.youtube.com/videoplayback?id=c80658495af60617"
+                + "&itag=18&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
+                + "&sparams=ip,ipbits,expire,id,itag,source"
+                + "&signature=A11D8BA0AA67A27F1409BE0C0B96B756625DB88B."
+                + "9BF4C93A130583ADBDF2B953AD5A8A58F518B012"
+                + "&key=test_key1&user=android-device-test");  // H.264 Base + AAC
+    private static final Uri VIDEO_URL = Uri.parse(
+            "http://redirector.c.youtube.com/videoplayback?id=c80658495af60617"
+                + "&itag=18&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000"
+                + "&sparams=ip,ipbits,expire,id,itag,source"
+                + "&signature=A11D8BA0AA67A27F1409BE0C0B96B756625DB88B."
+                + "9BF4C93A130583ADBDF2B953AD5A8A58F518B012"
+                + "&key=test_key1&user=android-device-test");  // H.264 Base + AAC
+
     @Override
     protected void setUp() throws Exception {
         super.setUp();
@@ -1850,5 +1872,81 @@
         return maxvalue;
     }
 
+    /* return true if a particular video feature is supported for the given mimetype */
+    private boolean isVideoFeatureSupported(String mimeType, String feature) {
+        MediaFormat format = MediaFormat.createVideoFormat( mimeType, 1920, 1080);
+        format.setFeatureEnabled(feature, true);
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        String codecName = mcl.findDecoderForFormat(format);
+        return (codecName == null) ? false : true;
+    }
+
+
+    /**
+     * Test tunneled video playback mode if supported
+     */
+    public void testTunneledVideoPlayback() throws Exception {
+        if (!isVideoFeatureSupported(MediaFormat.MIMETYPE_VIDEO_AVC,
+                MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback)) {
+            MediaUtils.skipTest(TAG, "No tunneled video playback codec found!");
+            return;
+        }
+
+        AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
+        mMediaCodecPlayer = new MediaCodecTunneledPlayer(
+                getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
+
+        mMediaCodecPlayer.setAudioDataSource(AUDIO_URL, null);
+        mMediaCodecPlayer.setVideoDataSource(VIDEO_URL, null);
+        assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
+        assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+
+        // starts video playback
+        mMediaCodecPlayer.startThread();
+
+        long timeOut = System.currentTimeMillis() + 4*PLAY_TIME_MS;
+        while (timeOut > System.currentTimeMillis() && !mMediaCodecPlayer.isEnded()) {
+            Thread.sleep(SLEEP_TIME_MS);
+            if (mMediaCodecPlayer.getCurrentPosition() >= mMediaCodecPlayer.getDuration() ) {
+                Log.d(TAG, "testTunneledVideoPlayback -- current pos = " +
+                        mMediaCodecPlayer.getCurrentPosition() +
+                        ">= duration = " + mMediaCodecPlayer.getDuration());
+                break;
+            }
+        }
+        assertTrue(timeOut > System.currentTimeMillis(),
+                "Tunneled video playback timeout exceeded!");
+
+        Log.d(TAG, "playVideo player.reset()");
+        mMediaCodecPlayer.reset();
+    }
+
+    /**
+     * Test tunneled video playback flush if supported
+     */
+    public void testTunneledVideoFlush() throws Exception {
+        if (!isVideoFeatureSupported(MediaFormat.MIMETYPE_VIDEO_AVC,
+                MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback)) {
+            MediaUtils.skipTest(TAG, "No tunneled video playback codec found!");
+            return;
+        }
+
+        AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
+        mMediaCodecPlayer = new MediaCodecTunneledPlayer(
+                getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
+
+        mMediaCodecPlayer.setAudioDataSource(AUDIO_URL, null);
+        mMediaCodecPlayer.setVideoDataSource(VIDEO_URL, null);
+        assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
+        assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+
+        // starts video playback
+        mMediaCodecPlayer.startThread();
+        Thread.sleep(SLEEP_TIME_MS);
+        mMediaCodecPlayer.pause();
+        mMediaCodecPlayer.flush();
+        Thread.sleep(SLEEP_TIME_MS);
+        mMediaCodecPlayer.reset();
+    }
 }
 
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecCencPlayer.java b/tests/tests/media/src/android/media/cts/MediaCodecCencPlayer.java
index 90696ff..b96d38c 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecCencPlayer.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecCencPlayer.java
@@ -15,6 +15,7 @@
  */
 package android.media.cts;
 
+import android.media.AudioManager;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecList;
@@ -40,7 +41,7 @@
  * {@link MediaDrm} can be used to obtain keys for decrypting protected media streams,
  * in conjunction with MediaCrypto.
  */
-public class MediaCodecCencPlayer {
+public class MediaCodecCencPlayer implements MediaTimeProvider {
     private static final String TAG = MediaCodecCencPlayer.class.getSimpleName();
 
     private static final int STATE_IDLE = 1;
@@ -300,10 +301,14 @@
 
         CodecState state;
         if (isVideo) {
-            state = new CodecState(this, mVideoExtractor, trackIndex, format, codec, true);
+            state = new CodecState((MediaTimeProvider)this, mVideoExtractor,
+                            trackIndex, format, codec, true, false, 
+                            AudioManager.AUDIO_SESSION_ID_GENERATE);
             mVideoCodecStates.put(Integer.valueOf(trackIndex), state);
         } else {
-            state = new CodecState(this, mAudioExtractor, trackIndex, format, codec, true);
+            state = new CodecState((MediaTimeProvider)this, mAudioExtractor,
+                            trackIndex, format, codec, true, false,
+                            AudioManager.AUDIO_SESSION_ID_GENERATE);
             mAudioCodecStates.put(Integer.valueOf(trackIndex), state);
         }
 
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecTest.java b/tests/tests/media/src/android/media/cts/MediaCodecTest.java
index 0aff8ed..494e823 100644
--- a/tests/tests/media/src/android/media/cts/MediaCodecTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaCodecTest.java
@@ -36,7 +36,6 @@
 import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-
 /**
  * General MediaCodec tests.
  *
diff --git a/tests/tests/media/src/android/media/cts/MediaCodecTunneledPlayer.java b/tests/tests/media/src/android/media/cts/MediaCodecTunneledPlayer.java
new file mode 100644
index 0000000..411cd14
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/MediaCodecTunneledPlayer.java
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.cts;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.net.Uri;
+import android.util.Log;
+import android.view.SurfaceHolder;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * JB(API 21) introduces {@link MediaCodec} tunneled mode API.  It allows apps
+ * to use MediaCodec to delegate their Audio/Video rendering to a vendor provided
+ * Codec component.
+ */
+public class MediaCodecTunneledPlayer implements MediaTimeProvider {
+    private static final String TAG = MediaCodecTunneledPlayer.class.getSimpleName();
+
+    private static final int STATE_IDLE = 1;
+    private static final int STATE_PREPARING = 2;
+    private static final int STATE_PLAYING = 3;
+    private static final int STATE_PAUSED = 4;
+
+    private Boolean mThreadStarted = false;
+    private byte[] mSessionId;
+    private CodecState mAudioTrackState;
+    private int mMediaFormatHeight;
+    private int mMediaFormatWidth;
+    private Integer mState;
+    private long mDeltaTimeUs;
+    private long mDurationUs;
+    private Map<Integer, CodecState> mAudioCodecStates;
+    private Map<Integer, CodecState> mVideoCodecStates;
+    private Map<String, String> mAudioHeaders;
+    private Map<String, String> mVideoHeaders;
+    private MediaExtractor mAudioExtractor;
+    private MediaExtractor mVideoExtractor;
+    private SurfaceHolder mSurfaceHolder;
+    private Thread mThread;
+    private Uri mAudioUri;
+    private Uri mVideoUri;
+    private boolean mTunneled;
+    private int mAudioSessionId;
+
+    /*
+     * Media player class to playback video using tunneled MediaCodec.
+     */
+    public MediaCodecTunneledPlayer(SurfaceHolder holder, boolean tunneled, int AudioSessionId) {
+        mSurfaceHolder = holder;
+        mTunneled = tunneled;
+        mAudioTrackState = null;
+        mState = STATE_IDLE;
+        mAudioSessionId = AudioSessionId;
+        mThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                while (true) {
+                    synchronized (mThreadStarted) {
+                        if (mThreadStarted == false) {
+                            break;
+                        }
+                    }
+                    synchronized (mState) {
+                        if (mState == STATE_PLAYING) {
+                            doSomeWork();
+                            if (mAudioTrackState != null) {
+                                mAudioTrackState.process();
+                            }
+                        }
+                    }
+                    try {
+                        Thread.sleep(5);
+                    } catch (InterruptedException ex) {
+                        Log.d(TAG, "Thread interrupted");
+                    }
+                }
+            }
+        });
+    }
+
+    public void setAudioDataSource(Uri uri, Map<String, String> headers) {
+        mAudioUri = uri;
+        mAudioHeaders = headers;
+    }
+
+    public void setVideoDataSource(Uri uri, Map<String, String> headers) {
+        mVideoUri = uri;
+        mVideoHeaders = headers;
+    }
+
+    public final int getMediaFormatHeight() {
+        return mMediaFormatHeight;
+    }
+
+    public final int getMediaFormatWidth() {
+        return mMediaFormatWidth;
+    }
+
+    private boolean prepareAudio() throws IOException {
+        for (int i = mAudioExtractor.getTrackCount(); i-- > 0;) {
+            MediaFormat format = mAudioExtractor.getTrackFormat(i);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+
+            if (!mime.startsWith("audio/")) {
+                continue;
+            }
+
+            Log.d(TAG, "audio track #" + i + " " + format + " " + mime +
+                  " Is ADTS:" + getMediaFormatInteger(format, MediaFormat.KEY_IS_ADTS) +
+                  " Sample rate:" + getMediaFormatInteger(format, MediaFormat.KEY_SAMPLE_RATE) +
+                  " Channel count:" +
+                  getMediaFormatInteger(format, MediaFormat.KEY_CHANNEL_COUNT));
+
+            mAudioExtractor.selectTrack(i);
+            if (!addTrack(i, format)) {
+                Log.e(TAG, "prepareAudio - addTrack() failed!");
+                return false;
+            }
+
+            if (format.containsKey(MediaFormat.KEY_DURATION)) {
+                long durationUs = format.getLong(MediaFormat.KEY_DURATION);
+
+                if (durationUs > mDurationUs) {
+                    mDurationUs = durationUs;
+                }
+                Log.d(TAG, "audio track format #" + i +
+                        " Duration:" + mDurationUs + " microseconds");
+            }
+        }
+        return true;
+    }
+
+    private boolean prepareVideo() throws IOException {
+        for (int i = mVideoExtractor.getTrackCount(); i-- > 0;) {
+            MediaFormat format = mVideoExtractor.getTrackFormat(i);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+
+            if (!mime.startsWith("video/")) {
+                continue;
+            }
+
+            mMediaFormatHeight = getMediaFormatInteger(format, MediaFormat.KEY_HEIGHT);
+            mMediaFormatWidth = getMediaFormatInteger(format, MediaFormat.KEY_WIDTH);
+            Log.d(TAG, "video track #" + i + " " + format + " " + mime +
+                  " Width:" + mMediaFormatWidth + ", Height:" + mMediaFormatHeight);
+
+            mVideoExtractor.selectTrack(i);
+            if (!addTrack(i, format)) {
+                Log.e(TAG, "prepareVideo - addTrack() failed!");
+                return false;
+            }
+
+            if (format.containsKey(MediaFormat.KEY_DURATION)) {
+                long durationUs = format.getLong(MediaFormat.KEY_DURATION);
+
+                if (durationUs > mDurationUs) {
+                    mDurationUs = durationUs;
+                }
+                Log.d(TAG, "track format #" + i + " Duration:" +
+                        mDurationUs + " microseconds");
+            }
+        }
+        return true;
+    }
+
+    public boolean prepare() throws IOException {
+        if (null == mAudioExtractor) {
+            mAudioExtractor = new MediaExtractor();
+            if (null == mAudioExtractor) {
+                Log.e(TAG, "prepare - Cannot create Audio extractor.");
+                return false;
+            }
+        }
+
+        if (null == mVideoExtractor){
+            mVideoExtractor = new MediaExtractor();
+            if (null == mVideoExtractor) {
+                Log.e(TAG, "prepare - Cannot create Video extractor.");
+                return false;
+            }
+        }
+
+        mAudioExtractor.setDataSource(mAudioUri.toString(), mAudioHeaders);
+        mVideoExtractor.setDataSource(mVideoUri.toString(), mVideoHeaders);
+
+        if (null == mVideoCodecStates) {
+            mVideoCodecStates = new HashMap<Integer, CodecState>();
+        } else {
+            mVideoCodecStates.clear();
+        }
+
+        if (null == mAudioCodecStates) {
+            mAudioCodecStates = new HashMap<Integer, CodecState>();
+        } else {
+            mAudioCodecStates.clear();
+        }
+
+        if (!prepareAudio()) {
+            Log.e(TAG,"prepare - prepareAudio() failed!");
+            return false;
+        }
+        if (!prepareVideo()) {
+            Log.e(TAG,"prepare - prepareVideo() failed!");
+            return false;
+        }
+
+        synchronized (mState) {
+            mState = STATE_PAUSED;
+        }
+        return true;
+    }
+
+    private boolean addTrack(int trackIndex, MediaFormat format) throws IOException {
+        String mime = format.getString(MediaFormat.KEY_MIME);
+        boolean isVideo = mime.startsWith("video/");
+        boolean isAudio = mime.startsWith("audio/");
+        MediaCodec codec;
+
+        // setup tunneled video codec if needed
+        if (isVideo && mTunneled) {
+            format.setFeatureEnabled(MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback,
+                        true);
+            MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+            String codecName = mcl.findDecoderForFormat(format);
+            if (codecName == null) {
+                Log.e(TAG,"addTrack - Could not find Tunneled playback codec for "+mime+
+                        " format!");
+                return false;
+            }
+
+            codec = MediaCodec.createByCodecName(codecName);
+            if (codec == null) {
+                Log.e(TAG, "addTrack - Could not create Tunneled playback codec "+
+                        codecName+"!");
+                return false;
+            }
+
+            if (mAudioTrackState != null) {
+                format.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, mAudioSessionId);
+            }
+        }
+        else {
+            codec = MediaCodec.createDecoderByType(mime);
+            if (codec == null) {
+                Log.e(TAG, "addTrack - Could not create regular playback codec for mime "+
+                        mime+"!");
+                return false;
+            }
+        }
+        codec.configure(
+                format,
+                isVideo ? mSurfaceHolder.getSurface() : null, null, 0);
+
+        CodecState state;
+        if (isVideo) {
+            state = new CodecState((MediaTimeProvider)this, mVideoExtractor,
+                            trackIndex, format, codec, true, mTunneled, mAudioSessionId);
+            mVideoCodecStates.put(Integer.valueOf(trackIndex), state);
+        } else {
+            state = new CodecState((MediaTimeProvider)this, mAudioExtractor,
+                            trackIndex, format, codec, true, mTunneled, mAudioSessionId);
+            mAudioCodecStates.put(Integer.valueOf(trackIndex), state);
+        }
+
+        if (isAudio) {
+            mAudioTrackState = state;
+        }
+
+        return true;
+    }
+
+    protected int getMediaFormatInteger(MediaFormat format, String key) {
+        return format.containsKey(key) ? format.getInteger(key) : 0;
+    }
+
+    public boolean start() {
+        Log.d(TAG, "start");
+
+        synchronized (mState) {
+            if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
+                return true;
+            } else if (mState == STATE_IDLE) {
+                mState = STATE_PREPARING;
+                return true;
+            } else if (mState != STATE_PAUSED) {
+                throw new IllegalStateException();
+            }
+
+            for (CodecState state : mVideoCodecStates.values()) {
+                state.start();
+            }
+
+            for (CodecState state : mAudioCodecStates.values()) {
+                state.start();
+            }
+
+            mDeltaTimeUs = -1;
+            mState = STATE_PLAYING;
+        }
+        return false;
+    }
+
+    public void startWork() throws IOException, Exception {
+        try {
+            // Just change state from STATE_IDLE to STATE_PREPARING.
+            start();
+            // Extract media information from uri asset, and change state to STATE_PAUSED.
+            prepare();
+            // Start CodecState, and change from STATE_PAUSED to STATE_PLAYING.
+            start();
+        } catch (IOException e) {
+            throw e;
+        }
+
+        synchronized (mThreadStarted) {
+            mThreadStarted = true;
+            mThread.start();
+        }
+    }
+
+    public void startThread() {
+        start();
+        synchronized (mThreadStarted) {
+            mThreadStarted = true;
+            mThread.start();
+        }
+    }
+
+    public void pause() {
+        Log.d(TAG, "pause");
+
+        synchronized (mState) {
+            if (mState == STATE_PAUSED) {
+                return;
+            } else if (mState != STATE_PLAYING) {
+                throw new IllegalStateException();
+            }
+
+            for (CodecState state : mVideoCodecStates.values()) {
+                state.pause();
+            }
+
+            for (CodecState state : mAudioCodecStates.values()) {
+                state.pause();
+            }
+
+            mState = STATE_PAUSED;
+        }
+    }
+
+    public void flush() {
+        Log.d(TAG, "flush");
+
+        synchronized (mState) {
+            if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
+                return;
+            }
+
+            for (CodecState state : mAudioCodecStates.values()) {
+                state.flush();
+            }
+
+            for (CodecState state : mVideoCodecStates.values()) {
+                state.flush();
+            }
+        }
+    }
+
+    public void reset() {
+        synchronized (mState) {
+            if (mState == STATE_PLAYING) {
+                pause();
+            }
+            if (mVideoCodecStates != null) {
+                for (CodecState state : mVideoCodecStates.values()) {
+                    state.release();
+                }
+                mVideoCodecStates = null;
+            }
+
+            if (mAudioCodecStates != null) {
+                for (CodecState state : mAudioCodecStates.values()) {
+                    state.release();
+                }
+                mAudioCodecStates = null;
+            }
+
+            if (mAudioExtractor != null) {
+                mAudioExtractor.release();
+                mAudioExtractor = null;
+            }
+
+            if (mVideoExtractor != null) {
+                mVideoExtractor.release();
+                mVideoExtractor = null;
+            }
+
+            mDurationUs = -1;
+            mState = STATE_IDLE;
+        }
+        synchronized (mThreadStarted) {
+            mThreadStarted = false;
+        }
+        try {
+            mThread.join();
+        } catch (InterruptedException ex) {
+            Log.d(TAG, "mThread.join " + ex);
+        }
+    }
+
+    public boolean isEnded() {
+        for (CodecState state : mVideoCodecStates.values()) {
+          if (!state.isEnded()) {
+            return false;
+          }
+        }
+
+        for (CodecState state : mAudioCodecStates.values()) {
+            if (!state.isEnded()) {
+              return false;
+            }
+        }
+
+        return true;
+    }
+
+    private void doSomeWork() {
+        try {
+            for (CodecState state : mVideoCodecStates.values()) {
+                state.doSomeWork();
+            }
+        } catch (IllegalStateException e) {
+            throw new Error("Video CodecState.doSomeWork" + e);
+        }
+
+        try {
+            for (CodecState state : mAudioCodecStates.values()) {
+                state.doSomeWork();
+            }
+        } catch (IllegalStateException e) {
+            throw new Error("Audio CodecState.doSomeWork" + e);
+        }
+
+    }
+
+    public long getNowUs() {
+        if (mAudioTrackState == null) {
+            return System.currentTimeMillis() * 1000;
+        }
+
+        return mAudioTrackState.getAudioTimeUs();
+    }
+
+    public long getRealTimeUsForMediaTime(long mediaTimeUs) {
+        if (mDeltaTimeUs == -1) {
+            long nowUs = getNowUs();
+            mDeltaTimeUs = nowUs - mediaTimeUs;
+        }
+
+        return mDeltaTimeUs + mediaTimeUs;
+    }
+
+    public int getDuration() {
+        return (int)((mDurationUs + 500) / 1000);
+    }
+
+    public int getCurrentPosition() {
+        if (mVideoCodecStates == null) {
+                return 0;
+        }
+
+        long positionUs = 0;
+
+        for (CodecState state : mVideoCodecStates.values()) {
+            long trackPositionUs = state.getCurrentPositionUs();
+
+            if (trackPositionUs > positionUs) {
+                positionUs = trackPositionUs;
+            }
+        }
+        return (int)((positionUs + 500) / 1000);
+    }
+
+}
diff --git a/tests/tests/media/src/android/media/cts/MediaTimeProvider.java b/tests/tests/media/src/android/media/cts/MediaTimeProvider.java
new file mode 100644
index 0000000..4f6837e
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/MediaTimeProvider.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.cts;
+
+/*
+ * Interface used by CodecState to retrieve Media timing info from parent Player
+ */
+public interface MediaTimeProvider {
+    public long getNowUs();
+    public long getRealTimeUsForMediaTime(long mediaTimeUs);
+}
diff --git a/tests/tests/media/src/android/media/cts/NonBlockingAudioTrack.java b/tests/tests/media/src/android/media/cts/NonBlockingAudioTrack.java
index 3ba1ce8..20c1dff 100644
--- a/tests/tests/media/src/android/media/cts/NonBlockingAudioTrack.java
+++ b/tests/tests/media/src/android/media/cts/NonBlockingAudioTrack.java
@@ -18,6 +18,7 @@
 import android.media.AudioFormat;
 import android.media.AudioManager;
 import android.media.AudioTrack;
+import android.media.AudioAttributes;
 import android.util.Log;
 
 import java.util.LinkedList;
@@ -46,7 +47,8 @@
     private int mNumBytesQueued = 0;
     private LinkedList<QueueElem> mQueue = new LinkedList<QueueElem>();
 
-    public NonBlockingAudioTrack(int sampleRate, int channelCount) {
+    public NonBlockingAudioTrack(int sampleRate, int channelCount, boolean hwAvSync,
+                    int audioSessionId) {
         int channelConfig;
         switch (channelCount) {
             case 1:
@@ -70,13 +72,29 @@
 
         int bufferSize = 2 * minBufferSize;
 
-        mAudioTrack = new AudioTrack(
-                AudioManager.STREAM_MUSIC,
-                sampleRate,
-                channelConfig,
-                AudioFormat.ENCODING_PCM_16BIT,
-                bufferSize,
-                AudioTrack.MODE_STREAM);
+        if (!hwAvSync) {
+            mAudioTrack = new AudioTrack(
+                    AudioManager.STREAM_MUSIC,
+                    sampleRate,
+                    channelConfig,
+                    AudioFormat.ENCODING_PCM_16BIT,
+                    bufferSize,
+                    AudioTrack.MODE_STREAM);
+        }
+        else {
+            // build AudioTrack using Audio Attributes and FLAG_HW_AV_SYNC
+            AudioAttributes audioAttributes = (new AudioAttributes.Builder())
+                            .setLegacyStreamType(AudioManager.STREAM_MUSIC)
+                            .setFlags(AudioAttributes.FLAG_HW_AV_SYNC)
+                            .build();
+            AudioFormat audioFormat = (new AudioFormat.Builder())
+                            .setChannelMask(channelConfig)
+                            .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+                            .setSampleRate(sampleRate)
+                            .build();
+             mAudioTrack = new AudioTrack(audioAttributes, audioFormat, bufferSize,
+                                    AudioTrack.MODE_STREAM, audioSessionId);
+        }
 
         mSampleRate = sampleRate;
         mFrameSize = 2 * channelCount;
@@ -113,6 +131,16 @@
         mAudioTrack.pause();
     }
 
+    public void flush() {
+        if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+            return;
+        }
+        mAudioTrack.flush();
+        mNumFramesSubmitted = 0;
+        mQueue.clear();
+        mNumBytesQueued = 0;
+    }
+
     public void release() {
         cancelWriteMore();