Merge "stagefright: TinyCacheSource to read continuously" into mnc-dev
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 61da4f2..5af6c10 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -429,7 +429,8 @@
 private:
 
      // Implements the IEffectClient interface
-    class EffectClient : public android::BnEffectClient,  public android::IBinder::DeathRecipient
+    class EffectClient :
+        public android::BnEffectClient, public android::IBinder::DeathRecipient
     {
     public:
 
@@ -437,24 +438,39 @@
 
         // IEffectClient
         virtual void controlStatusChanged(bool controlGranted) {
-            mEffect->controlStatusChanged(controlGranted);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->controlStatusChanged(controlGranted);
+            }
         }
         virtual void enableStatusChanged(bool enabled) {
-            mEffect->enableStatusChanged(enabled);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->enableStatusChanged(enabled);
+            }
         }
         virtual void commandExecuted(uint32_t cmdCode,
                                      uint32_t cmdSize,
                                      void *pCmdData,
                                      uint32_t replySize,
                                      void *pReplyData) {
-            mEffect->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->commandExecuted(
+                    cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+            }
         }
 
         // IBinder::DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who) {mEffect->binderDied();}
+        virtual void binderDied(const wp<IBinder>& who) {
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->binderDied();
+            }
+        }
 
     private:
-        AudioEffect *mEffect;
+        wp<AudioEffect> mEffect;
     };
 
     void binderDied();
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 7023453..3d29e4a 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -198,7 +198,7 @@
         EVENT,
         EMPTY_BUFFER_DONE,
         FILL_BUFFER_DONE,
-
+        FRAME_RENDERED,
     } type;
 
     IOMX::node_id node;
@@ -226,6 +226,11 @@
             OMX_TICKS timestamp;
         } extended_buffer_data;
 
+        // if type == FRAME_RENDERED
+        struct {
+            OMX_TICKS timestamp;
+            OMX_S64 nanoTime;
+        } render_data;
     } u;
 };
 
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index d4891a1..f9ea38e 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -24,6 +24,7 @@
 #include <media/IOMX.h>
 #include <media/stagefright/foundation/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <OMX_Audio.h>
 
@@ -162,6 +163,7 @@
         sp<ABuffer> mData;
         sp<GraphicBuffer> mGraphicBuffer;
         int mFenceFd;
+        FrameRenderTracker::Info *mRenderInfo;
 
         // The following field and 4 methods are used for debugging only
         bool mIsReadFence;
@@ -214,6 +216,7 @@
     sp<AMessage> mOutputFormat;
     sp<AMessage> mBaseOutputFormat;
 
+    FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
     Vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
@@ -375,6 +378,23 @@
     void deferMessage(const sp<AMessage> &msg);
     void processDeferredMessages();
 
+    void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+    // called when we have dequeued a buffer |buf| from the native window to track render info.
+    // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
+    // stored.
+    void updateRenderInfoForDequeuedBuffer(
+            ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
+
+    // Checks to see if any frames have rendered up until |until|, and to notify client
+    // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
+    // unrendered frame. These frames are removed from the render queue.
+    // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
+    // queue, allowing all rendered framed up till then to be notified of.
+    // (This will effectively clear the render queue up-until (and including) |until|.)
+    // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
+    void notifyOfRenderedFrames(
+            bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
+
     void sendFormatChange(const sp<AMessage> &reply);
     status_t getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify);
 
diff --git a/include/media/stagefright/CodecBase.h b/include/media/stagefright/CodecBase.h
index 989df4f..bb36052 100644
--- a/include/media/stagefright/CodecBase.h
+++ b/include/media/stagefright/CodecBase.h
@@ -43,6 +43,7 @@
         kWhatInputSurfaceAccepted = 'isfa',
         kWhatSignaledInputEOS    = 'seos',
         kWhatBuffersAllocated    = 'allc',
+        kWhatOutputFramesRendered = 'outR',
     };
 
     virtual void setNotificationMessage(const sp<AMessage> &msg) = 0;
diff --git a/include/media/stagefright/FrameRenderTracker.h b/include/media/stagefright/FrameRenderTracker.h
new file mode 100644
index 0000000..3b0db5a
--- /dev/null
+++ b/include/media/stagefright/FrameRenderTracker.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_RENDER_TRACKER_H_
+
+#define FRAME_RENDER_TRACKER_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+#include <system/window.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <list>
+
+namespace android {
+
+class Fence;
+class GraphicBuffer;
+
+struct FrameRenderTracker : public RefBase {
+    // Tracks the render information about a frame. Frames go through several states while
+    // the render information is tracked:
+    //
+    // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+    // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+    // Key characteristics: mFence is not NULL and mIndex is negative.
+    //
+    // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+    // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+    // invalid.
+    //
+    // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+    // Key characteristics: mFence is NULL.
+    //
+    struct Info {
+        // set by client during onFrameQueued or onFrameRendered
+        int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+
+        // -1 if frame is not yet rendered
+        nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+        // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+        ssize_t getIndex() const        { return mIndex; }
+
+        // creates information for a queued frame
+        Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence)
+            : mMediaTimeUs(mediaTimeUs),
+              mRenderTimeNs(-1),
+              mIndex(-1),
+              mGraphicBuffer(graphicBuffer),
+              mFence(fence) {
+        }
+
+        // creates information for a frame rendered on a tunneled surface
+        Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+            : mMediaTimeUs(mediaTimeUs),
+              mRenderTimeNs(renderTimeNs),
+              mIndex(-1),
+              mGraphicBuffer(NULL),
+              mFence(NULL) {
+        }
+
+    private:
+        int64_t mMediaTimeUs;
+        nsecs_t mRenderTimeNs;
+        ssize_t mIndex;         // to be used by client
+        sp<GraphicBuffer> mGraphicBuffer;
+        sp<Fence> mFence;
+
+        friend class FrameRenderTracker;
+    };
+
+    FrameRenderTracker();
+
+    void setComponentName(const AString &componentName);
+
+    // clears all tracked frames, and resets last render time
+    void clear(nsecs_t lastRenderTimeNs);
+
+    // called when |graphicBuffer| corresponding to |mediaTimeUs| is
+    // queued to the output surface using |fence|.
+    void onFrameQueued(
+            int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence);
+
+    // Called when we have dequeued a buffer |buf| from the native window to track render info.
+    // |fenceFd| is the dequeue fence, and |index| is a positive buffer ID to be usable by the
+    // client to track this render info among the dequeued buffers.
+    // Returns pointer to the tracked info, or NULL if buffer is not tracked or if |index|
+    // is negative.
+    Info *updateInfoForDequeuedBuffer(ANativeWindowBuffer *buf, int fenceFd, int index);
+
+    // called when tunneled codec signals frame rendered event
+    // returns BAD_VALUE if systemNano is not monotonic. Otherwise, returns OK.
+    status_t onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+    // Checks to see if any frames have rendered up until |until|. If |until| is NULL or not a
+    // tracked info, this method searches the entire render queue.
+    // Returns list of rendered frames up-until the frame pointed to by |until| or the first
+    // unrendered frame, as well as any dropped frames (those with invalid fence) up-until |until|.
+    // These frames are removed from the render queue.
+    // If |dropIncomplete| is true, unrendered frames up-until |until| will also be dropped from the
+    // queue, allowing all rendered framed up till then to be notified of.
+    // (This will effectively clear the render queue up-until (and including) |until|.)
+    std::list<Info> checkFencesAndGetRenderedFrames(const Info *until, bool dropIncomplete);
+
+    // Stop tracking a queued frame (e.g. if the frame has been discarded). If |info| is NULL or is
+    // not tracked, this method is a no-op.
+    void untrackFrame(const Info *info);
+
+    void dumpRenderQueue() const;
+
+    virtual ~FrameRenderTracker();
+
+private:
+
+    // Render information for buffers. Regular surface buffers are queued in the order of
+    // rendering. Tunneled buffers are queued in the order of receipt.
+    std::list<Info> mRenderQueue;
+    nsecs_t mLastRenderTimeNs;
+    AString mComponentName;
+
+    DISALLOW_EVIL_CONSTRUCTORS(FrameRenderTracker);
+};
+
+}  // namespace android
+
+#endif  // FRAME_RENDER_TRACKER_H_
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index d62b35d..09cbe8f 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -22,6 +22,7 @@
 #include <media/hardware/CryptoAPI.h>
 #include <media/MediaResource.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
 
 namespace android {
@@ -76,6 +77,8 @@
 
     status_t setCallback(const sp<AMessage> &callback);
 
+    status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
     status_t setInputSurface(const sp<PersistentSurface> &surface);
@@ -157,6 +160,12 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    // Create a MediaCodec notification message from a list of rendered or dropped render infos
+    // by adding rendered frame information to a base notification message. Returns the number
+    // of frames that were rendered.
+    static size_t CreateFramesRenderedMessage(
+            std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg);
+
 protected:
     virtual ~MediaCodec();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -212,6 +221,7 @@
         kWhatGetName                        = 'getN',
         kWhatSetParameters                  = 'setP',
         kWhatSetCallback                    = 'setC',
+        kWhatSetNotification                = 'setN',
     };
 
     enum {
@@ -275,9 +285,11 @@
     status_t mStickyError;
     sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
+
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
+    sp<AMessage> mOnFrameRenderedNotification;
     sp<MemoryDealer> mDealer;
 
     sp<IResourceManagerClient> mResourceManagerClient;
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index bbeb854..ff82544 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -134,12 +134,14 @@
 
     if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) {
         ALOGE("set(): AudioFlinger could not create effect, status: %d", mStatus);
+        if (iEffect == 0) {
+            mStatus = NO_INIT;
+        }
         return mStatus;
     }
 
     mEnabled = (volatile int32_t)enabled;
 
-    mIEffect = iEffect;
     cblk = iEffect->getCblk();
     if (cblk == 0) {
         mStatus = NO_INIT;
@@ -147,6 +149,7 @@
         return mStatus;
     }
 
+    mIEffect = iEffect;
     mCblkMemory = cblk;
     mCblk = static_cast<effect_param_cblk_t*>(cblk->pointer());
     int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
@@ -177,11 +180,11 @@
             mIEffect->disconnect();
             IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
         }
+        mIEffect.clear();
+        mCblkMemory.clear();
+        mIEffectClient.clear();
         IPCThreadState::self()->flushCommands();
     }
-    mIEffect.clear();
-    mIEffectClient.clear();
-    mCblkMemory.clear();
 }
 
 
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index ca33aed..e8d495b 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -70,12 +70,6 @@
         return STAGEFRIGHT_PLAYER;
     }
 
-    // TODO: remove this EXPERIMENTAL developer settings property
-    if (property_get("persist.sys.media.use-awesome", value, NULL)
-            && !strcasecmp("true", value)) {
-        return STAGEFRIGHT_PLAYER;
-    }
-
     return NU_PLAYER;
 }
 
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c0b35e8..ae869d6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1679,7 +1679,7 @@
     t->setVolume(mLeftVolume, mRightVolume);
 
     mSampleRateHz = sampleRate;
-    mFlags = flags;
+    mFlags = t->getFlags(); // we suggest the flags above, but new AudioTrack() may not grant it.
     mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
     uint32_t pos;
     if (t->getPosition(&pos) == OK) {
@@ -1688,7 +1688,9 @@
     mTrack = t;
 
     status_t res = NO_ERROR;
-    if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) {
+    // Note some output devices may give us a direct track even though we don't specify it.
+    // Example: Line application b/17459982.
+    if ((mFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
         res = t->setPlaybackRate(mPlaybackRate);
         if (res == NO_ERROR) {
             t->setAuxEffectSendLevel(mSendLevel);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 8760cbb..ef96a28 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1729,13 +1729,15 @@
     return renderer->getCurrentPosition(mediaUs);
 }
 
-void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) {
-    sp<DecoderBase> decoder = getDecoder(false /* audio */);
-    if (decoder != NULL) {
-        decoder->getStats(numFramesTotal, numFramesDropped);
-    } else {
-        *numFramesTotal = 0;
-        *numFramesDropped = 0;
+void NuPlayer::getStats(Vector<sp<AMessage> > *mTrackStats) {
+    CHECK(mTrackStats != NULL);
+
+    mTrackStats->clear();
+    if (mVideoDecoder != NULL) {
+        mTrackStats->push_back(mVideoDecoder->getStats());
+    }
+    if (mAudioDecoder != NULL) {
+        mTrackStats->push_back(mAudioDecoder->getStats());
     }
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index d7aa830..298ea6d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -77,7 +77,7 @@
     status_t getSelectedTrack(int32_t type, Parcel* reply) const;
     status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
     status_t getCurrentPosition(int64_t *mediaUs);
-    void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped);
+    void getStats(Vector<sp<AMessage> > *mTrackStats);
 
     sp<MetaData> getFileMeta();
     float getFrameRate();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index c649c62..99a2a84 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -58,7 +58,10 @@
       mCCDecoder(ccDecoder),
       mSkipRenderingUntilMediaTimeUs(-1ll),
       mNumFramesTotal(0ll),
-      mNumFramesDropped(0ll),
+      mNumInputFramesDropped(0ll),
+      mNumOutputFramesDropped(0ll),
+      mVideoWidth(0),
+      mVideoHeight(0),
       mIsAudio(true),
       mIsVideoAVC(false),
       mIsSecure(false),
@@ -77,11 +80,11 @@
     releaseAndResetMediaBuffers();
 }
 
-void NuPlayer::Decoder::getStats(
-        int64_t *numFramesTotal,
-        int64_t *numFramesDropped) const {
-    *numFramesTotal = mNumFramesTotal;
-    *numFramesDropped = mNumFramesDropped;
+sp<AMessage> NuPlayer::Decoder::getStats() const {
+    mStats->setInt64("frames-total", mNumFramesTotal);
+    mStats->setInt64("frames-dropped-input", mNumInputFramesDropped);
+    mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped);
+    return mStats;
 }
 
 void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
@@ -237,6 +240,18 @@
     CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
     CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
 
+    mStats->setString("mime", mime.c_str());
+    mStats->setString("component-name", mComponentName.c_str());
+
+    if (!mIsAudio) {
+        int32_t width, height;
+        if (mOutputFormat->findInt32("width", &width)
+                && mOutputFormat->findInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
+    }
+
     sp<AMessage> reply = new AMessage(kWhatCodecNotify, this);
     mCodec->setCallback(reply);
 
@@ -520,6 +535,8 @@
         mSkipRenderingUntilMediaTimeUs = -1;
     }
 
+    mNumFramesTotal += !mIsAudio;
+
     // wait until 1st frame comes out to signal resume complete
     notifyResumeCompleteIfNecessary();
 
@@ -536,6 +553,12 @@
 
 void NuPlayer::Decoder::handleOutputFormatChange(const sp<AMessage> &format) {
     if (!mIsAudio) {
+        int32_t width, height;
+        if (format->findInt32("width", &width)
+                && format->findInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
         sp<AMessage> notify = mNotify->dup();
         notify->setInt32("what", kWhatVideoSizeChanged);
         notify->setMessage("format", format);
@@ -654,10 +677,6 @@
             return ERROR_END_OF_STREAM;
         }
 
-        if (!mIsAudio) {
-            ++mNumFramesTotal;
-        }
-
         dropAccessUnit = false;
         if (!mIsAudio
                 && !mIsSecure
@@ -665,7 +684,7 @@
                 && mIsVideoAVC
                 && !IsAVCReferenceFrame(accessUnit)) {
             dropAccessUnit = true;
-            ++mNumFramesDropped;
+            ++mNumInputFramesDropped;
         }
     } while (dropAccessUnit);
 
@@ -833,6 +852,7 @@
         CHECK(msg->findInt64("timestampNs", &timestampNs));
         err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
     } else {
+        mNumOutputFramesDropped += !mIsAudio;
         err = mCodec->releaseOutputBuffer(bufferIx);
     }
     if (err != OK) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 070d51a..ceccb7a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -30,9 +30,7 @@
             const sp<Surface> &surface = NULL,
             const sp<CCDecoder> &ccDecoder = NULL);
 
-    virtual void getStats(
-            int64_t *mNumFramesTotal,
-            int64_t *mNumFramesDropped) const;
+    virtual sp<AMessage> getStats() const;
 
 protected:
     virtual ~Decoder();
@@ -77,7 +75,10 @@
 
     int64_t mSkipRenderingUntilMediaTimeUs;
     int64_t mNumFramesTotal;
-    int64_t mNumFramesDropped;
+    int64_t mNumInputFramesDropped;
+    int64_t mNumOutputFramesDropped;
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
     bool mIsAudio;
     bool mIsVideoAVC;
     bool mIsSecure;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
index 9d509bf..7e76842 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
@@ -31,6 +31,7 @@
 NuPlayer::DecoderBase::DecoderBase(const sp<AMessage> &notify)
     :  mNotify(notify),
        mBufferGeneration(0),
+       mStats(new AMessage),
        mRequestInputBuffersPending(false) {
     // Every decoder has its own looper because MediaCodec operations
     // are blocking, but NuPlayer needs asynchronous operations.
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
index b52e7f7..8f030f0 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
@@ -42,9 +42,9 @@
     void signalResume(bool notifyComplete);
     void initiateShutdown();
 
-    virtual void getStats(
-            int64_t *mNumFramesTotal,
-            int64_t *mNumFramesDropped) const = 0;
+    virtual sp<AMessage> getStats() const {
+        return mStats;
+    }
 
     enum {
         kWhatInputDiscontinuity  = 'inDi',
@@ -76,6 +76,7 @@
 
     sp<AMessage> mNotify;
     int32_t mBufferGeneration;
+    sp<AMessage> mStats;
 
 private:
     enum {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index d7b070e..30146c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -59,12 +59,6 @@
 NuPlayer::DecoderPassThrough::~DecoderPassThrough() {
 }
 
-void NuPlayer::DecoderPassThrough::getStats(
-        int64_t *numFramesTotal, int64_t *numFramesDropped) const {
-    *numFramesTotal = 0;
-    *numFramesDropped = 0;
-}
-
 void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
     ALOGV("[%s] onConfigure", mComponentName.c_str());
     mCachedBytes = 0;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
index 2f6df2c..db33e87 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -29,10 +29,6 @@
                        const sp<Source> &source,
                        const sp<Renderer> &renderer);
 
-    virtual void getStats(
-            int64_t *mNumFramesTotal,
-            int64_t *mNumFramesDropped) const;
-
 protected:
 
     virtual ~DecoderPassThrough();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 84ae25e..551e07a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "NuPlayerDriver"
 #include <inttypes.h>
 #include <utils/Log.h>
+#include <cutils/properties.h>
 
 #include "NuPlayerDriver.h"
 
@@ -484,6 +485,13 @@
         notifyListener_l(MEDIA_STOPPED);
     }
 
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("persist.debug.sf.stats", value, NULL) &&
+            (!strcmp("1", value) || !strcasecmp("true", value))) {
+        Vector<String16> args;
+        dump(-1, args);
+    }
+
     mState = STATE_RESET_IN_PROGRESS;
     mPlayer->resetAsync();
 
@@ -652,22 +660,59 @@
 
 status_t NuPlayerDriver::dump(
         int fd, const Vector<String16> & /* args */) const {
-    int64_t numFramesTotal;
-    int64_t numFramesDropped;
-    mPlayer->getStats(&numFramesTotal, &numFramesDropped);
 
-    FILE *out = fdopen(dup(fd), "w");
+    Vector<sp<AMessage> > trackStats;
+    mPlayer->getStats(&trackStats);
 
-    fprintf(out, " NuPlayer\n");
-    fprintf(out, "  numFramesTotal(%" PRId64 "), numFramesDropped(%" PRId64 "), "
-                 "percentageDropped(%.2f)\n",
-                 numFramesTotal,
-                 numFramesDropped,
-                 numFramesTotal == 0
-                    ? 0.0 : (double)numFramesDropped / numFramesTotal);
+    AString logString(" NuPlayer\n");
+    char buf[256] = {0};
 
-    fclose(out);
-    out = NULL;
+    for (size_t i = 0; i < trackStats.size(); ++i) {
+        const sp<AMessage> &stats = trackStats.itemAt(i);
+
+        AString mime;
+        if (stats->findString("mime", &mime)) {
+            snprintf(buf, sizeof(buf), "  mime(%s)\n", mime.c_str());
+            logString.append(buf);
+        }
+
+        AString name;
+        if (stats->findString("component-name", &name)) {
+            snprintf(buf, sizeof(buf), "    decoder(%s)\n", name.c_str());
+            logString.append(buf);
+        }
+
+        if (mime.startsWith("video/")) {
+            int32_t width, height;
+            if (stats->findInt32("width", &width)
+                    && stats->findInt32("height", &height)) {
+                snprintf(buf, sizeof(buf), "    resolution(%d x %d)\n", width, height);
+                logString.append(buf);
+            }
+
+            int64_t numFramesTotal = 0;
+            int64_t numFramesDropped = 0;
+
+            stats->findInt64("frames-total", &numFramesTotal);
+            stats->findInt64("frames-dropped-output", &numFramesDropped);
+            snprintf(buf, sizeof(buf), "    numFramesTotal(%lld), numFramesDropped(%lld), "
+                     "percentageDropped(%.2f%%)\n",
+                     (long long)numFramesTotal,
+                     (long long)numFramesDropped,
+                     numFramesTotal == 0
+                            ? 0.0 : (double)(numFramesDropped * 100) / numFramesTotal);
+            logString.append(buf);
+        }
+    }
+
+    ALOGI("%s", logString.c_str());
+
+    if (fd >= 0) {
+        FILE *out = fdopen(dup(fd), "w");
+        fprintf(out, "%s", logString.c_str());
+        fclose(out);
+        out = NULL;
+    }
 
     return OK;
 }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index f7e3117..fb2e767 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -425,14 +425,14 @@
 
         case kWhatDrainAudioQueue:
         {
+            mDrainAudioQueuePending = false;
+
             int32_t generation;
             CHECK(msg->findInt32("drainGeneration", &generation));
             if (generation != getDrainGeneration(true /* audio */)) {
                 break;
             }
 
-            mDrainAudioQueuePending = false;
-
             if (onDrainAudioQueue()) {
                 uint32_t numFramesPlayed;
                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
@@ -841,7 +841,7 @@
         if (written < 0) {
             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
             if (written == WOULD_BLOCK) {
-                ALOGW("AudioSink write would block when writing %zu bytes", copy);
+                ALOGV("AudioSink write would block when writing %zu bytes", copy);
             } else {
                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
                 notifyAudioTearDown();
@@ -1684,8 +1684,10 @@
                 onDisableOffloadAudio();
                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
                 ALOGV("openAudioSink: offload failed");
+            } else {
+                mUseAudioCallback = true;  // offload mode transfers data through callback
+                ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
             }
-            mUseAudioCallback = true;  // offload mode transfers data through callback
         }
     }
     if (!offloadOnly && !offloadingAudio()) {
@@ -1712,6 +1714,9 @@
         // Note: It is possible to set up the callback, but not use it to send audio data.
         // This requires a fix in AudioSink to explicitly specify the transfer mode.
         mUseAudioCallback = getUseAudioCallbackSetting();
+        if (mUseAudioCallback) {
+            ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
+        }
 
         // Compute the desired buffer size.
         // For callback mode, the amount of time before wakeup is about half the buffer size.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 172e19c..cf37eba 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -37,6 +37,7 @@
 #include <media/stagefright/foundation/AUtils.h>
 
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/OMXClient.h>
@@ -109,6 +110,8 @@
 struct MessageList : public RefBase {
     MessageList() {
     }
+    virtual ~MessageList() {
+    }
     std::list<sp<AMessage> > &getList() { return mList; }
 private:
     std::list<sp<AMessage> > mList;
@@ -125,15 +128,19 @@
 
     // from IOMXObserver
     virtual void onMessages(const std::list<omx_message> &messages) {
-        sp<AMessage> notify;
+        if (messages.empty()) {
+            return;
+        }
+
+        sp<AMessage> notify = mNotify->dup();
         bool first = true;
         sp<MessageList> msgList = new MessageList();
         for (std::list<omx_message>::const_iterator it = messages.cbegin();
               it != messages.cend(); ++it) {
             const omx_message &omx_msg = *it;
             if (first) {
-                notify = mNotify->dup();
                 notify->setInt32("node", omx_msg.node);
+                first = false;
             }
 
             sp<AMessage> msg = new AMessage;
@@ -175,6 +182,15 @@
                     break;
                 }
 
+                case omx_message::FRAME_RENDERED:
+                {
+                    msg->setInt64(
+                            "media_time_us", omx_msg.u.render_data.timestamp);
+                    msg->setInt64(
+                            "system_nano", omx_msg.u.render_data.nanoTime);
+                    break;
+                }
+
                 default:
                     ALOGE("Unrecognized message type: %d", omx_msg.type);
                     break;
@@ -238,6 +254,8 @@
             int64_t timeUs,
             int fenceFd);
 
+    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
     void getMoreInputDataIfPossible();
 
     DISALLOW_EVIL_CONSTRUCTORS(BaseState);
@@ -354,6 +372,7 @@
     virtual void stateEntered();
 
     virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
 
 private:
     bool mActive;
@@ -372,6 +391,7 @@
     virtual void stateEntered();
 
     virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
 
 private:
     DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
@@ -763,10 +783,13 @@
 
             // If using gralloc or native source input metadata buffers, allocate largest
             // metadata size as we prefer to generate native source metadata, but component
-            // may require gralloc source.
+            // may require gralloc source. For camera source, allocate at least enough
+            // size for native metadata buffers.
             int32_t allottedSize = bufSize;
-            if (portIndex == kPortIndexInput && type > 0) {
+            if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) {
                 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
+            } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
+                bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata));
             }
 
             ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port",
@@ -786,6 +809,7 @@
                 BufferInfo info;
                 info.mStatus = BufferInfo::OWNED_BY_US;
                 info.mFenceFd = -1;
+                info.mRenderInfo = NULL;
 
                 uint32_t requiresAllocateBufferBit =
                     (portIndex == kPortIndexInput)
@@ -1002,6 +1026,7 @@
         info.mStatus = BufferInfo::OWNED_BY_US;
         info.mFenceFd = fenceFd;
         info.mIsReadFence = false;
+        info.mRenderInfo = NULL;
         info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
         info.mGraphicBuffer = graphicBuffer;
         mBuffers[kPortIndexOutput].push(info);
@@ -1073,6 +1098,7 @@
         BufferInfo info;
         info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
         info.mFenceFd = -1;
+        info.mRenderInfo = NULL;
         info.mGraphicBuffer = NULL;
         info.mDequeuedAt = mDequeueCounter;
 
@@ -1219,6 +1245,42 @@
     return err;
 }
 
+void ACodec::updateRenderInfoForDequeuedBuffer(
+        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
+
+    info->mRenderInfo =
+        mRenderTracker.updateInfoForDequeuedBuffer(
+                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
+
+    // check for any fences already signaled
+    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
+}
+
+void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
+        mRenderTracker.dumpRenderQueue();
+    }
+}
+
+void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
+    sp<AMessage> msg = mNotify->dup();
+    msg->setInt32("what", CodecBase::kWhatOutputFramesRendered);
+    std::list<FrameRenderTracker::Info> done =
+        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
+
+    // unlink untracked frames
+    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
+            it != done.cend(); ++it) {
+        if (it->getIndex() >= 0) {
+            mBuffers[kPortIndexOutput].editItemAt(it->getIndex()).mRenderInfo = NULL;
+        }
+    }
+
+    if (MediaCodec::CreateFramesRenderedMessage(done, msg)) {
+        msg->post();
+    }
+}
+
 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
     ANativeWindowBuffer *buf;
     CHECK(mNativeWindow.get() != NULL);
@@ -1242,7 +1304,7 @@
             BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
 
             if (info->mGraphicBuffer != NULL &&
-                info->mGraphicBuffer->handle == buf->handle) {
+                    info->mGraphicBuffer->handle == buf->handle) {
                 // Since consumers can attach buffers to BufferQueues, it is possible
                 // that a known yet stale buffer can return from a surface that we
                 // once used.  We can simply ignore this as we have already dequeued
@@ -1255,9 +1317,11 @@
                     stale = true;
                     break;
                 }
+
                 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer());
                 info->mStatus = BufferInfo::OWNED_BY_US;
                 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
+                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
                 return info;
             }
         }
@@ -1301,6 +1365,8 @@
     oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
     oldest->mStatus = BufferInfo::OWNED_BY_US;
     oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
+    mRenderTracker.untrackFrame(oldest->mRenderInfo);
+    oldest->mRenderInfo = NULL;
 
     mOMX->updateGraphicBufferInMeta(
             mNode, kPortIndexOutput, oldest->mGraphicBuffer,
@@ -1324,6 +1390,7 @@
                 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
     }
 
+    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
     return oldest;
 }
 
@@ -1398,6 +1465,9 @@
         ::close(info->mFenceFd);
     }
 
+    mRenderTracker.untrackFrame(info->mRenderInfo);
+    info->mRenderInfo = NULL;
+
     // remove buffer even if mOMX->freeBuffer fails
     mBuffers[portIndex].removeAt(i);
 
@@ -4518,9 +4588,20 @@
     CHECK(msg->findObject("messages", &obj));
     sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
 
+    bool receivedRenderedEvents = false;
     for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
           it != msgList->getList().cend(); ++it) {
         onOMXMessage(*it);
+        int32_t type;
+        CHECK((*it)->findInt32("type", &type));
+        if (type == omx_message::FRAME_RENDERED) {
+            receivedRenderedEvents = true;
+        }
+    }
+
+    if (receivedRenderedEvents) {
+        // NOTE: all buffers are rendered in this case
+        mCodec->notifyOfRenderedFrames();
     }
     return true;
 }
@@ -4587,12 +4668,29 @@
                     fenceFd);
         }
 
+        case omx_message::FRAME_RENDERED:
+        {
+            int64_t mediaTimeUs, systemNano;
+
+            CHECK(msg->findInt64("media_time_us", &mediaTimeUs));
+            CHECK(msg->findInt64("system_nano", &systemNano));
+
+            return onOMXFrameRendered(
+                    mediaTimeUs, systemNano);
+        }
+
         default:
             ALOGE("Unexpected message type: %d", type);
             return false;
     }
 }
 
+bool ACodec::BaseState::onOMXFrameRendered(
+        int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) {
+    // ignore outside of Executing and PortSettingsChanged states
+    return true;
+}
+
 bool ACodec::BaseState::onOMXEvent(
         OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
     if (event != OMX_EventError) {
@@ -4949,6 +5047,15 @@
     info->mDequeuedAt = ++mCodec->mDequeueCounter;
     info->mStatus = BufferInfo::OWNED_BY_US;
 
+    if (info->mRenderInfo != NULL) {
+        // The fence for an emptied buffer must have signaled, but there still could be queued
+        // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
+        // as we will soon requeue this buffer to the surface. While in theory we could still keep
+        // track of buffers that are requeued to the surface, it is better to add support to the
+        // buffer-queue to notify us of released buffers and their fences (in the future).
+        mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
+    }
+
     // byte buffers cannot take fences, so wait for any fence now
     if (mCodec->mNativeWindow == NULL) {
         (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
@@ -5085,6 +5192,14 @@
         ATRACE_NAME("render");
         // The client wants this buffer to be rendered.
 
+        // save buffers sent to the surface so we can get render time when they return
+        int64_t mediaTimeUs = -1;
+        info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+        if (mediaTimeUs >= 0) {
+            mCodec->mRenderTracker.onFrameQueued(
+                    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
+        }
+
         int64_t timestampNs = 0;
         if (!msg->findInt64("timestampNs", &timestampNs)) {
             // TODO: it seems like we should use the timestamp
@@ -5367,6 +5482,7 @@
     observer->setNotificationMessage(notify);
 
     mCodec->mComponentName = componentName;
+    mCodec->mRenderTracker.setComponentName(componentName);
     mCodec->mFlags = 0;
 
     if (componentName.endsWith(".secure")) {
@@ -5969,6 +6085,7 @@
 void ACodec::ExecutingState::stateEntered() {
     ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
 
+    mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
     mCodec->processDeferredMessages();
 }
 
@@ -6179,6 +6296,11 @@
     notify->post();
 }
 
+bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    return true;
+}
+
 bool ACodec::ExecutingState::onOMXEvent(
         OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
     switch (event) {
@@ -6266,6 +6388,12 @@
          mCodec->mComponentName.c_str());
 }
 
+bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
+        int64_t mediaTimeUs, nsecs_t systemNano) {
+    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    return true;
+}
+
 bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
         OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
     switch (event) {
@@ -6644,6 +6772,8 @@
         // the native window for rendering. Let's get those back as well.
         mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
 
+        mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
         sp<AMessage> notify = mCodec->mNotify->dup();
         notify->setInt32("what", CodecBase::kWhatFlushCompleted);
         notify->post();
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 0dfa300..69128bd 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -23,6 +23,7 @@
         ESDS.cpp                          \
         FileSource.cpp                    \
         FLACExtractor.cpp                 \
+        FrameRenderTracker.cpp            \
         HTTPBase.cpp                      \
         JPEGSource.cpp                    \
         MP3Extractor.cpp                  \
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index df01e7c..4e6c2a6 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -113,11 +113,11 @@
         CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
 
         render((const uint8_t *)buffer->data() + buffer->range_offset(),
-               buffer->range_length(), timeUs * 1000);
+               buffer->range_length(), timeUs, timeUs * 1000);
     }
 
-    void render(const void *data, size_t size, int64_t timestampNs) {
-        mTarget->render(data, size, timestampNs, NULL, mFormat);
+    void render(const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs) {
+        (void)mTarget->render(data, size, mediaTimeUs, renderTimeNs, NULL, mFormat);
     }
 
 protected:
diff --git a/media/libstagefright/FrameRenderTracker.cpp b/media/libstagefright/FrameRenderTracker.cpp
new file mode 100644
index 0000000..ebd2197
--- /dev/null
+++ b/media/libstagefright/FrameRenderTracker.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameRenderTracker"
+
+#include <inttypes.h>
+#include <gui/Surface.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/FrameRenderTracker.h>
+
+namespace android {
+
+FrameRenderTracker::FrameRenderTracker()
+    : mLastRenderTimeNs(-1),
+      mComponentName("unknown component") {
+}
+
+FrameRenderTracker::~FrameRenderTracker() {
+}
+
+void FrameRenderTracker::setComponentName(const AString &componentName) {
+    mComponentName = componentName;
+}
+
+void FrameRenderTracker::clear(nsecs_t lastRenderTimeNs) {
+    mRenderQueue.clear();
+    mLastRenderTimeNs = lastRenderTimeNs;
+}
+
+void FrameRenderTracker::onFrameQueued(
+        int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence) {
+    mRenderQueue.emplace_back(mediaTimeUs, graphicBuffer, fence);
+}
+
+FrameRenderTracker::Info *FrameRenderTracker::updateInfoForDequeuedBuffer(
+        ANativeWindowBuffer *buf, int fenceFd, int index) {
+    if (index < 0) {
+        return NULL;
+    }
+
+    // see if this is a buffer that was to be rendered
+    std::list<Info>::iterator renderInfo = mRenderQueue.end();
+    for (std::list<Info>::iterator it = mRenderQueue.begin();
+            it != mRenderQueue.end(); ++it) {
+        if (it->mGraphicBuffer->handle == buf->handle) {
+            renderInfo = it;
+            break;
+        }
+    }
+    if (renderInfo == mRenderQueue.end()) {
+        // could have been canceled after fence has signaled
+        return NULL;
+    }
+
+    if (renderInfo->mIndex >= 0) {
+        // buffer has been dequeued before, so there is nothing to do
+        return NULL;
+    }
+
+    // was this frame dropped (we could also infer this if the fence is invalid or a dup of
+    // the queued fence; however, there is no way to figure that out.)
+    if (fenceFd < 0) {
+        // frame is new or was dropped
+        mRenderQueue.erase(renderInfo);
+        return NULL;
+    }
+
+    // store dequeue fence and buffer index
+    renderInfo->mFence = new Fence(::dup(fenceFd));
+    renderInfo->mIndex = index;
+    return &*renderInfo;
+}
+
+status_t FrameRenderTracker::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+    // ensure monotonic timestamps
+    if (mLastRenderTimeNs >= systemNano) {
+        ALOGW("[%s] Ignoring out of order/stale system nano %lld for media time %lld from codec.",
+                mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs);
+        return BAD_VALUE;
+    }
+
+    nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (systemNano > now) {
+        ALOGW("[%s] Ignoring system nano %lld in the future for media time %lld from codec.",
+                mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs);
+        return OK;
+    }
+
+    mRenderQueue.emplace_back(mediaTimeUs, systemNano);
+    mLastRenderTimeNs = systemNano;
+    return OK;
+}
+
+std::list<FrameRenderTracker::Info> FrameRenderTracker::checkFencesAndGetRenderedFrames(
+        const FrameRenderTracker::Info *until, bool dropIncomplete) {
+    std::list<Info> done;
+
+    // complete any frames queued prior to this and drop any incomplete ones if requested
+    for (std::list<Info>::iterator it = mRenderQueue.begin();
+            it != mRenderQueue.end(); ) {
+        bool drop = false; // whether to drop each frame
+        if (it->mIndex < 0) {
+            // frame not yet dequeued (or already rendered on a tunneled surface)
+            drop = dropIncomplete;
+        } else if (it->mFence != NULL) {
+            // check if fence signaled
+            nsecs_t signalTime = it->mFence->getSignalTime();
+            if (signalTime < 0) { // invalid fence
+                drop = true;
+            } else if (signalTime == INT64_MAX) { // unsignaled fence
+                drop = dropIncomplete;
+            } else { // signaled
+                // save render time
+                it->mFence.clear();
+                it->mRenderTimeNs = signalTime;
+            }
+        }
+        bool foundFrame = (Info *)&*it == until;
+
+        // Return frames with signaled fences at the start of the queue, as they are
+        // in submit order, and we don't have to wait for any in-between frames.
+        // Also return any dropped frames.
+        if (drop || (it->mFence == NULL && it == mRenderQueue.begin())) {
+            // (unrendered) dropped frames have their mRenderTimeNs still set to -1
+            done.splice(done.end(), mRenderQueue, it++);
+        } else {
+            ++it;
+        }
+        if (foundFrame) {
+            break;
+        }
+    }
+
+    return done;
+}
+
+void FrameRenderTracker::untrackFrame(const FrameRenderTracker::Info *info) {
+    if (info != NULL) {
+        for (std::list<Info>::iterator it = mRenderQueue.begin();
+                it != mRenderQueue.end(); ++it) {
+            if (&*it == info) {
+                mRenderQueue.erase(it);
+                return;
+            }
+        }
+    }
+}
+
+void FrameRenderTracker::dumpRenderQueue() const {
+    ALOGI("[%s] Render Queue: (last render time: %lldns)",
+            mComponentName.c_str(), (long long)mLastRenderTimeNs);
+    for (std::list<Info>::const_iterator it = mRenderQueue.cbegin();
+            it != mRenderQueue.cend(); ++it) {
+        if (it->mFence == NULL) {
+            ALOGI("  RENDERED: handle: %p, media time: %lldus, index: %zd, render time: %lldns",
+                    it->mGraphicBuffer == NULL ? NULL : it->mGraphicBuffer->handle,
+                    (long long)it->mMediaTimeUs, it->mIndex, (long long)it->mRenderTimeNs);
+        } else if (it->mIndex < 0) {
+            ALOGI("    QUEUED: handle: %p, media time: %lldus, fence: %s",
+                    it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs,
+                    it->mFence->isValid() ? "YES" : "NO");
+        } else {
+            ALOGI("  DEQUEUED: handle: %p, media time: %lldus, index: %zd",
+                    it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs, it->mIndex);
+        }
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
old mode 100644
new mode 100755
index 62612c7..8bf47b1
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1665,7 +1665,18 @@
                     return err;
                 }
             }
+            if (mPath.size() >= 2
+                    && mPath[mPath.size() - 2] == FOURCC('m', 'p', '4', 'v')) {
+                // Check if the video is MPEG2
+                ESDS esds(&buffer[4], chunk_data_size - 4);
 
+                uint8_t objectTypeIndication;
+                if (esds.getObjectTypeIndication(&objectTypeIndication) == OK) {
+                    if (objectTypeIndication >= 0x60 && objectTypeIndication <= 0x65) {
+                        mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG2);
+                    }
+                }
+            }
             break;
         }
 
@@ -2847,6 +2858,7 @@
             return ERROR_MALFORMED;
         }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
+            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         if (!track->meta->findData(kKeyESDS, &type, &data, &size)
                 || type != kTypeESDS) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f918d2d..7ee84a8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -397,6 +397,12 @@
     return PostAndAwaitResponse(msg, &response);
 }
 
+status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+    msg->setMessage("on-frame-rendered", notify);
+    return msg->post();
+}
+
 status_t MediaCodec::configure(
         const sp<AMessage> &format,
         const sp<Surface> &surface,
@@ -1333,6 +1339,18 @@
                     break;
                 }
 
+                case CodecBase::kWhatOutputFramesRendered:
+                {
+                    // ignore these in all states except running, and check that we have a
+                    // notification set
+                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                        sp<AMessage> notify = mOnFrameRenderedNotification->dup();
+                        notify->setMessage("data", msg);
+                        notify->post();
+                    }
+                    break;
+                }
+
                 case CodecBase::kWhatFillThisBuffer:
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -1530,6 +1548,15 @@
             break;
         }
 
+        case kWhatSetNotification:
+        {
+            sp<AMessage> notify;
+            if (msg->findMessage("on-frame-rendered", &notify)) {
+                mOnFrameRenderedNotification = notify;
+            }
+            break;
+        }
+
         case kWhatSetCallback:
         {
             sp<AReplyToken> replyID;
@@ -2372,6 +2399,23 @@
     return OK;
 }
 
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg) {
+    size_t index = 0;
+
+    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
+            it != done.cend(); ++it) {
+        if (it->getRenderTimeNs() < 0) {
+            continue; // dropped frame from tracking
+        }
+        msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
+        msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
+        ++index;
+    }
+    return index;
+}
+
 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
     size_t index;
     CHECK(msg->findSize("index", &index));
@@ -2404,26 +2448,37 @@
     if (render && info->mData != NULL && info->mData->size() != 0) {
         info->mNotify->setInt32("render", true);
 
-        int64_t timestampNs = 0;
-        if (msg->findInt64("timestampNs", &timestampNs)) {
-            info->mNotify->setInt64("timestampNs", timestampNs);
+        int64_t mediaTimeUs = -1;
+        info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+
+        int64_t renderTimeNs = 0;
+        if (msg->findInt64("timestampNs", &renderTimeNs)) {
+            info->mNotify->setInt64("timestampNs", renderTimeNs);
         } else {
             // TODO: it seems like we should use the timestamp
             // in the (media)buffer as it potentially came from
             // an input surface, but we did not propagate it prior to
             // API 20.  Perhaps check for target SDK version.
 #if 0
-            if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
-                ALOGV("using buffer PTS of %" PRId64, timestampNs);
-                timestampNs *= 1000;
-            }
+            ALOGV("using buffer PTS of %" PRId64, timestampNs);
+            renderTimeNs = mediaTimeUs * 1000;
 #endif
         }
 
         if (mSoftRenderer != NULL) {
-            mSoftRenderer->render(
+            std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
                     info->mData->data(), info->mData->size(),
-                    timestampNs, NULL, info->mFormat);
+                    mediaTimeUs, renderTimeNs, NULL, info->mFormat);
+
+            // if we are running, notify rendered frames
+            if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                sp<AMessage> notify = mOnFrameRenderedNotification->dup();
+                sp<AMessage> data = new AMessage;
+                if (CreateFramesRenderedMessage(doneFrames, data)) {
+                    notify->setMessage("data", data);
+                    notify->post();
+                }
+            }
         }
     }
 
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 2451641..1c663a3 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -191,7 +191,8 @@
     MyOpusExtractor(const sp<DataSource> &source)
         : MyOggExtractor(source, MEDIA_MIMETYPE_AUDIO_OPUS, /*numHeaders*/ 2, kOpusSeekPreRollUs),
           mChannelCount(0),
-          mCodecDelay(0) {
+          mCodecDelay(0),
+          mStartGranulePosition(-1) {
     }
 
     virtual uint64_t approxBitrate() const {
@@ -211,6 +212,7 @@
 
     uint8_t mChannelCount;
     uint16_t mCodecDelay;
+    int64_t mStartGranulePosition;
 };
 
 static void extractAlbumArt(
@@ -557,6 +559,37 @@
 }
 
 status_t MyOpusExtractor::readNextPacket(MediaBuffer **out) {
+    if (mOffset <= mFirstDataOffset && mStartGranulePosition < 0) {
+        // The first sample might not start at time 0; find out where by subtracting
+        // the number of samples on the first page from the granule position
+        // (position of last complete sample) of the first page. This happens
+        // the first time before we attempt to read a packet from the first page.
+        MediaBuffer *mBuf;
+        uint32_t numSamples = 0;
+        uint64_t curGranulePosition = 0;
+        while (true) {
+            status_t err = _readNextPacket(&mBuf, /* calcVorbisTimestamp = */false);
+            if (err != OK && err != ERROR_END_OF_STREAM) {
+                return err;
+            }
+            // First two pages are header pages.
+            if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) {
+                break;
+            }
+            curGranulePosition = mCurrentPage.mGranulePosition;
+            numSamples += getNumSamplesInPacket(mBuf);
+            mBuf->release();
+            mBuf = NULL;
+        }
+
+        if (curGranulePosition > numSamples) {
+            mStartGranulePosition = curGranulePosition - numSamples;
+        } else {
+            mStartGranulePosition = 0;
+        }
+        seekToOffset(0);
+    }
+
     status_t err = _readNextPacket(out, /* calcVorbisTimestamp = */false);
     if (err != OK) {
         return err;
@@ -567,6 +600,10 @@
     // We assume that we only seek to page boundaries.
     if ((*out)->meta_data()->findInt32(kKeyValidSamples, &currentPageSamples)) {
         // first packet in page
+        if (mOffset == mFirstDataOffset) {
+            currentPageSamples -= mStartGranulePosition;
+            (*out)->meta_data()->setInt32(kKeyValidSamples, currentPageSamples);
+        }
         mCurGranulePosition = mCurrentPage.mGranulePosition - currentPageSamples;
     }
 
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 695cfc8..d22451b 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -196,17 +196,29 @@
                 mNativeWindow.get(), transform));
 }
 
-void SoftwareRenderer::render(
-        const void *data, size_t size, int64_t timestampNs,
+void SoftwareRenderer::clearTracker() {
+    mRenderTracker.clear(-1 /* lastRenderTimeNs */);
+}
+
+std::list<FrameRenderTracker::Info> SoftwareRenderer::render(
+        const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,
         void* /*platformPrivate*/, const sp<AMessage>& format) {
     resetFormatIfChanged(format);
+    FrameRenderTracker::Info *info = NULL;
 
     ANativeWindowBuffer *buf;
-    int err;
-    if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
-            &buf)) != 0) {
+    int fenceFd = -1;
+    int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+    if (err == 0 && fenceFd >= 0) {
+        info = mRenderTracker.updateInfoForDequeuedBuffer(buf, fenceFd, 0);
+        sp<Fence> fence = new Fence(fenceFd);
+        err = fence->waitForever("SoftwareRenderer::render");
+    }
+    if (err != 0) {
         ALOGW("Surface::dequeueBuffer returned error %d", err);
-        return;
+        // complete (drop) dequeued frame if fence wait failed; otherwise,
+        // this returns an empty list as no frames should have rendered and not yet returned.
+        return mRenderTracker.checkFencesAndGetRenderedFrames(info, false /* dropIncomplete */);
     }
 
     GraphicBufferMapper &mapper = GraphicBufferMapper::get();
@@ -342,16 +354,21 @@
 skip_copying:
     CHECK_EQ(0, mapper.unlock(buf->handle));
 
-    if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
-            timestampNs)) != 0) {
-        ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+    if (renderTimeNs >= 0) {
+        if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
+                renderTimeNs)) != 0) {
+            ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+        }
     }
 
-    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf,
-            -1)) != 0) {
+    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {
         ALOGW("Surface::queueBuffer returned error %d", err);
+    } else {
+        mRenderTracker.onFrameQueued(mediaTimeUs, (GraphicBuffer *)buf, Fence::NO_FENCE);
     }
+
     buf = NULL;
+    return mRenderTracker.checkFencesAndGetRenderedFrames(info, info != NULL /* dropIncomplete */);
 }
 
 }  // namespace android
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index fa3ea89..9e652d5 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,9 +19,12 @@
 #define SOFTWARE_RENDERER_H_
 
 #include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/FrameRenderTracker.h>
 #include <utils/RefBase.h>
 #include <system/window.h>
 
+#include <list>
+
 namespace android {
 
 struct AMessage;
@@ -32,9 +35,10 @@
 
     ~SoftwareRenderer();
 
-    void render(
-            const void *data, size_t size, int64_t timestampNs,
+    std::list<FrameRenderTracker::Info> render(
+            const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,
             void *platformPrivate, const sp<AMessage> &format);
+    void clearTracker();
 
 private:
     enum YUVMode {
@@ -48,6 +52,7 @@
     int32_t mWidth, mHeight;
     int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
     int32_t mCropWidth, mCropHeight;
+    FrameRenderTracker mRenderTracker;
 
     SoftwareRenderer(const SoftwareRenderer &);
     SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 53423ec..e3c3e80 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -694,7 +694,8 @@
             status_t err = flush(event);
 
             if (err != OK) {
-                return err;
+                ALOGW("Error (%08x) happened while flushing; we simply discard "
+                      "the PES packet and continue.", err);
             }
         }
 
@@ -996,10 +997,6 @@
                 return ERROR_MALFORMED;
             }
 
-            if (br->numBitsLeft() < dataLength * 8) {
-                return ERROR_MALFORMED;
-            }
-
             onPayloadData(
                     PTS_DTS_flags, PTS, DTS, br->data(), dataLength, event);
 
@@ -1397,6 +1394,11 @@
     unsigned adaptation_field_length = br->getBits(8);
 
     if (adaptation_field_length > 0) {
+        if (adaptation_field_length * 8 > br->numBitsLeft()) {
+            ALOGV("Adaptation field should be included in a single TS packet.");
+            return ERROR_MALFORMED;
+        }
+
         unsigned discontinuity_indicator = br->getBits(1);
 
         if (discontinuity_indicator) {
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index e94adbd..cb7ab5e 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -34,6 +34,7 @@
 
 #include <OMX_AsString.h>
 #include <OMX_Component.h>
+#include <OMX_VideoExt.h>
 
 namespace android {
 
@@ -455,12 +456,35 @@
         OMX_IN OMX_EVENTTYPE eEvent,
         OMX_IN OMX_U32 nData1,
         OMX_IN OMX_U32 nData2,
-        OMX_IN OMX_PTR /* pEventData */) {
+        OMX_IN OMX_PTR pEventData) {
     ALOGV("OnEvent(%d, %" PRIu32", %" PRIu32 ")", eEvent, nData1, nData2);
 
     // Forward to OMXNodeInstance.
     findInstance(node)->onEvent(eEvent, nData1, nData2);
 
+    sp<OMX::CallbackDispatcher> dispatcher = findDispatcher(node);
+
+    // output rendered events are not processed as regular events until they hit the observer
+    if (eEvent == OMX_EventOutputRendered) {
+        if (pEventData == NULL) {
+            return OMX_ErrorBadParameter;
+        }
+
+        // process data from array
+        OMX_VIDEO_RENDEREVENTTYPE *renderData = (OMX_VIDEO_RENDEREVENTTYPE *)pEventData;
+        for (size_t i = 0; i < nData1; ++i) {
+            omx_message msg;
+            msg.type = omx_message::FRAME_RENDERED;
+            msg.node = node;
+            msg.fenceFd = -1;
+            msg.u.render_data.timestamp = renderData[i].nMediaTimeUs;
+            msg.u.render_data.nanoTime = renderData[i].nSystemTimeNs;
+
+            dispatcher->post(msg, false /* realTime */);
+        }
+        return OMX_ErrorNone;
+    }
+
     omx_message msg;
     msg.type = omx_message::EVENT;
     msg.node = node;
@@ -469,7 +493,7 @@
     msg.u.event_data.data1 = nData1;
     msg.u.event_data.data2 = nData2;
 
-    findDispatcher(node)->post(msg);
+    dispatcher->post(msg, true /* realTime */);
 
     return OMX_ErrorNone;
 }
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 7e92da8..6ee1a77 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -683,7 +683,7 @@
     }
 
     CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
-            *buffer, portIndex, "%u@%p", allottedSize, params->pointer()));
+            *buffer, portIndex, "%u(%zu)@%p", allottedSize, params->size(), params->pointer()));
     return OK;
 }
 
@@ -1024,8 +1024,8 @@
         bufferSource->addCodecBuffer(header);
     }
 
-    CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%u@%p :> %p",
-            allottedSize, params->pointer(), header->pBuffer));
+    CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %u@%p",
+            params->size(), params->pointer(), allottedSize, header->pBuffer));
 
     return OK;
 }
@@ -1087,18 +1087,6 @@
     Mutex::Autolock autoLock(mLock);
 
     OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
-    // rangeLength and rangeOffset must be a subset of the allocated data in the buffer.
-    // corner case: we permit rangeOffset == end-of-buffer with rangeLength == 0.
-    if (rangeOffset > header->nAllocLen
-            || rangeLength > header->nAllocLen - rangeOffset) {
-        if (fenceFd >= 0) {
-            ::close(fenceFd);
-        }
-        return BAD_VALUE;
-    }
-    header->nFilledLen = rangeLength;
-    header->nOffset = rangeOffset;
-
     BufferMeta *buffer_meta =
         static_cast<BufferMeta *>(header->pAppPrivate);
     sp<ABuffer> backup = buffer_meta->getBuffer(header, true /* backup */);
@@ -1112,11 +1100,25 @@
                     == kMetadataBufferTypeANWBuffer) {
         VideoNativeMetadata &backupMeta = *(VideoNativeMetadata *)backup->base();
         VideoGrallocMetadata &codecMeta = *(VideoGrallocMetadata *)codec->base();
-        ALOGV("converting ANWB %p to handle %p", backupMeta.pBuffer, backupMeta.pBuffer->handle);
+        CLOG_BUFFER(emptyBuffer, "converting ANWB %p to handle %p",
+                backupMeta.pBuffer, backupMeta.pBuffer->handle);
         codecMeta.pHandle = backupMeta.pBuffer->handle;
         codecMeta.eType = kMetadataBufferTypeGrallocSource;
-        header->nFilledLen = sizeof(codecMeta);
+        header->nFilledLen = rangeLength ? sizeof(codecMeta) : 0;
+        header->nOffset = 0;
     } else {
+        // rangeLength and rangeOffset must be a subset of the allocated data in the buffer.
+        // corner case: we permit rangeOffset == end-of-buffer with rangeLength == 0.
+        if (rangeOffset > header->nAllocLen
+                || rangeLength > header->nAllocLen - rangeOffset) {
+            if (fenceFd >= 0) {
+                ::close(fenceFd);
+            }
+            return BAD_VALUE;
+        }
+        header->nFilledLen = rangeLength;
+        header->nOffset = rangeOffset;
+
         buffer_meta->CopyToOMX(header);
     }
 
@@ -1421,7 +1423,9 @@
         }
     }
 
-    mObserver->onMessages(messages);
+    if (!messages.empty()) {
+        mObserver->onMessages(messages);
+    }
 }
 
 void OMXNodeInstance::onObserverDied(OMXMaster *master) {
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index ba47172..78dfbb1 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -11,13 +11,15 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
-	main_mediaserver.cpp
+	main_mediaserver.cpp \
+	icuutils.cpp
 
 LOCAL_SHARED_LIBRARIES := \
 	libaudioflinger \
 	libaudiopolicyservice \
 	libcamera_metadata\
 	libcameraservice \
+	libicuuc \
 	libmedialogservice \
 	libresourcemanagerservice \
 	libcutils \
diff --git a/media/mediaserver/IcuUtils.h b/media/mediaserver/IcuUtils.h
new file mode 100644
index 0000000..52fab6d
--- /dev/null
+++ b/media/mediaserver/IcuUtils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ICU_UTILS_H
+#define ICU_UTILS_H
+
+// Initializes ICU or dies trying. This must be called when the process
+// is single threaded.
+void initializeIcuOrDie();
+
+#endif  // ICU_UTILS_H
+
diff --git a/media/mediaserver/icuutils.cpp b/media/mediaserver/icuutils.cpp
new file mode 100644
index 0000000..4015849
--- /dev/null
+++ b/media/mediaserver/icuutils.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IcuUtils.h"
+
+#include "unicode/putil.h"
+#include "unicode/uclean.h"
+#include "unicode/utypes.h"
+#include "utils/Log.h"
+
+#include <stdlib.h>
+
+void initializeIcuOrDie() {
+    const char* systemPathPrefix = getenv("ANDROID_ROOT");
+    LOG_ALWAYS_FATAL_IF(systemPathPrefix == NULL, "ANDROID_ROOT environment variable not set");
+
+    char buf[256];
+    const int num_written = snprintf(buf, sizeof(buf), "%s/usr/icu/", systemPathPrefix);
+    LOG_ALWAYS_FATAL_IF((num_written < 0 || static_cast<size_t>(num_written) >= sizeof(buf)),
+            "Unable to construct ICU path.");
+
+    u_setDataDirectory(buf);
+    UErrorCode status = U_ZERO_ERROR;
+
+    // u_setDataDirectory doesn't try doing anything with the directory we gave
+    // it, so we'll have to call u_init to make sure it was successful.
+    u_init(&status);
+    LOG_ALWAYS_FATAL_IF(!U_SUCCESS(status), "Failed to initialize ICU %s", u_errorName(status));
+}
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 06b3c6e..27a40b2 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -31,6 +31,7 @@
 // from LOCAL_C_INCLUDES
 #include "AudioFlinger.h"
 #include "CameraService.h"
+#include "IcuUtils.h"
 #include "MediaLogService.h"
 #include "MediaPlayerService.h"
 #include "ResourceManagerService.h"
@@ -124,6 +125,7 @@
             prctl(PR_SET_PDEATHSIG, SIGKILL);   // if parent media.log dies before me, kill me also
             setpgid(0, 0);                      // but if I die first, don't kill my parent
         }
+        initializeIcuOrDie();
         sp<ProcessState> proc(ProcessState::self());
         sp<IServiceManager> sm = defaultServiceManager();
         ALOGI("ServiceManager: %p", sm.get());
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index dc6710f..ad445a5 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -743,7 +743,8 @@
 
 String8 channelMaskToString(audio_channel_mask_t mask, bool output) {
     String8 s;
-    const audio_channel_representation_t representation = audio_channel_mask_get_representation(mask);
+    const audio_channel_representation_t representation =
+            audio_channel_mask_get_representation(mask);
 
     switch (representation) {
     case AUDIO_CHANNEL_REPRESENTATION_POSITION: {
@@ -6709,11 +6710,8 @@
 
     audio_format_t reqFormat = mFormat;
     uint32_t samplingRate = mSampleRate;
+    // TODO this may change if we want to support capture from HDMI PCM multi channel (e.g on TVs).
     audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(mChannelCount);
-    // possible that we are > 2 channels, use channel index mask
-    if (channelMask == AUDIO_CHANNEL_INVALID && mChannelCount <= FCC_8) {
-        audio_channel_mask_for_index_assignment_from_count(mChannelCount);
-    }
 
     AudioParameter param = AudioParameter(keyValuePair);
     int value;
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 50f16098..7a785eb 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -498,6 +498,10 @@
                 device2 = availableOutputDevices.types() & AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
             }
         }
+        if (isInCall() && (strategy == STRATEGY_MEDIA)) {
+            device = getDeviceForStrategy(STRATEGY_PHONE);
+            break;
+        }
         if ((device2 == AUDIO_DEVICE_NONE) &&
                 (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
                 (outputs.getA2dpOutput() != 0)) {
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 8613ac6..280bb9d 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -359,7 +359,7 @@
         delete mMetadata;
     }
 
-    mAnw.clear();
+    mSurface.clear();
     mSurfaceTexture.clear();
     mProducer.clear();
     mConsumer.clear();
@@ -395,11 +395,11 @@
         return res;
     }
 
-    mAnw = new Surface(mProducer, /*useAsync*/ true);
-    if (mAnw == NULL) {
+    mSurface = new Surface(mProducer, /*useAsync*/ true);
+    if (mSurface == NULL) {
         return NO_MEMORY;
     }
-    res = device->createStream(mAnw, width, height, format,
+    res = device->createStream(mSurface, width, height, format,
             HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mStreamId);
     if (res) {
         return res;
@@ -653,7 +653,7 @@
 CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
     disconnectCameraDevice();
 
-    mAnw.clear();
+    mSurface.clear();
     mSurfaceTexture.clear();
     mProducer.clear();
     mConsumer.clear();
@@ -810,18 +810,18 @@
         return res;
     }
 
-    mAnw = new Surface(mProducer, /*useAsync*/ true);
-    if (mAnw == NULL) {
+    mSurface = new Surface(mProducer, /*useAsync*/ true);
+    if (mSurface == NULL) {
         return NO_MEMORY;
     }
 
-    res = native_window_api_connect(mAnw.get(), NATIVE_WINDOW_API_CAMERA);
+    res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
     if (res) {
         ALOGE("%s: Unable to connect to native window", __FUNCTION__);
         return res;
     }
 
-    return device->setPreviewWindow(mAnw);
+    return device->setPreviewWindow(mSurface);
 }
 
 status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
@@ -870,7 +870,7 @@
             CameraParameters::FLASH_MODE_OFF);
     mDevice->setParameters(mParameters);
     mDevice->stopPreview();
-    status_t res = native_window_api_disconnect(mAnw.get(),
+    status_t res = native_window_api_disconnect(mSurface.get(),
             NATIVE_WINDOW_API_CAMERA);
     if (res) {
         ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 30f01f0..4d5fe8d 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -166,7 +166,7 @@
         sp<IGraphicBufferProducer> mProducer;
         sp<IGraphicBufferConsumer>  mConsumer;
         sp<GLConsumer> mSurfaceTexture;
-        sp<ANativeWindow> mAnw;
+        sp<Surface> mSurface;
         int32_t mStreamId;
 
         Mutex mLock;
@@ -215,7 +215,7 @@
         sp<IGraphicBufferProducer> mProducer;
         sp<IGraphicBufferConsumer>  mConsumer;
         sp<GLConsumer> mSurfaceTexture;
-        sp<ANativeWindow> mAnw;
+        sp<Surface> mSurface;
 
         Mutex mLock;
 };
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ca14cdb..e109595 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -529,7 +529,7 @@
     if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
 
     sp<IBinder> binder;
-    sp<ANativeWindow> window;
+    sp<Surface> window;
     if (bufferProducer != 0) {
         binder = IInterface::asBinder(bufferProducer);
         // Using controlledByApp flag to ensure that the buffer queue remains in
@@ -541,7 +541,7 @@
 }
 
 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
-        sp<ANativeWindow> window) {
+        sp<Surface> window) {
     ATRACE_CALL();
     status_t res;
 
@@ -666,7 +666,7 @@
     status_t res;
     if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
 
-    sp<ANativeWindow> window;
+    sp<Surface> window;
     if (callbackProducer != 0) {
         window = new Surface(callbackProducer);
     }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index c6df228..c288313 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -150,7 +150,7 @@
     typedef camera2::Parameters Parameters;
 
     status_t setPreviewWindowL(const sp<IBinder>& binder,
-            sp<ANativeWindow> window);
+            sp<Surface> window);
     status_t startPreviewL(Parameters &params, bool restart);
     void     stopPreviewL();
     status_t startRecordingL(Parameters &params, bool restart);
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 143cc61..5f4fb22 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -55,7 +55,7 @@
 }
 
 status_t CallbackProcessor::setCallbackWindow(
-        sp<ANativeWindow> callbackWindow) {
+        sp<Surface> callbackWindow) {
     ATRACE_CALL();
     status_t res;
 
@@ -115,7 +115,7 @@
         BufferQueue::createBufferQueue(&producer, &consumer);
         mCallbackConsumer = new CpuConsumer(consumer, kCallbackHeapCount);
         mCallbackConsumer->setFrameAvailableListener(this);
-        mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
+        mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
         mCallbackWindow = new Surface(producer);
     }
 
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 7fdc329..a290536 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -47,7 +47,7 @@
     void onFrameAvailable(const BufferItem& item);
 
     // Set to NULL to disable the direct-to-app callback window
-    status_t setCallbackWindow(sp<ANativeWindow> callbackWindow);
+    status_t setCallbackWindow(sp<Surface> callbackWindow);
     status_t updateStream(const Parameters &params);
     status_t deleteStream();
     int getStreamId() const;
@@ -73,7 +73,7 @@
     int mCallbackStreamId;
     static const size_t kCallbackHeapCount = 6;
     sp<CpuConsumer>    mCallbackConsumer;
-    sp<ANativeWindow>  mCallbackWindow;
+    sp<Surface>        mCallbackWindow;
     sp<Camera2Heap>    mCallbackHeap;
     int mCallbackHeapId;
     size_t mCallbackHeapHead, mCallbackHeapFree;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 88987f9..bd9786f 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -87,7 +87,7 @@
         BufferQueue::createBufferQueue(&producer, &consumer);
         mCaptureConsumer = new CpuConsumer(consumer, 1);
         mCaptureConsumer->setFrameAvailableListener(this);
-        mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
+        mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
         mCaptureWindow = new Surface(producer);
     }
 
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index 2040b30..fbdae11 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -70,8 +70,8 @@
 
     int mCaptureStreamId;
     sp<CpuConsumer>    mCaptureConsumer;
-    sp<ANativeWindow>  mCaptureWindow;
-    sp<MemoryHeapBase>    mCaptureHeap;
+    sp<Surface>        mCaptureWindow;
+    sp<MemoryHeapBase> mCaptureHeap;
 
     virtual bool threadLoop();
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 36d143b..66d7b00 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -64,7 +64,7 @@
     deleteRecordingStream();
 }
 
-status_t StreamingProcessor::setPreviewWindow(sp<ANativeWindow> window) {
+status_t StreamingProcessor::setPreviewWindow(sp<Surface> window) {
     ATRACE_CALL();
     status_t res;
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 42e9e7a..e0cad3a 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -43,7 +43,7 @@
     StreamingProcessor(sp<Camera2Client> client);
     ~StreamingProcessor();
 
-    status_t setPreviewWindow(sp<ANativeWindow> window);
+    status_t setPreviewWindow(sp<Surface> window);
 
     bool haveValidPreviewWindow() const;
 
@@ -108,7 +108,7 @@
     int32_t mPreviewRequestId;
     int mPreviewStreamId;
     CameraMetadata mPreviewRequest;
-    sp<ANativeWindow> mPreviewWindow;
+    sp<Surface> mPreviewWindow;
 
     // Recording-related members
     static const nsecs_t kWaitDuration = 50000000; // 50 ms
@@ -117,7 +117,7 @@
     int mRecordingStreamId;
     int mRecordingFrameCount;
     sp<BufferItemConsumer> mRecordingConsumer;
-    sp<ANativeWindow>  mRecordingWindow;
+    sp<Surface>  mRecordingWindow;
     CameraMetadata mRecordingRequest;
     sp<camera2::Camera2Heap> mRecordingHeap;
 
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index d8500df..0b79b31 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -139,7 +139,7 @@
             GRALLOC_USAGE_HW_CAMERA_ZSL,
             kZslBufferDepth);
         mZslConsumer->setFrameAvailableListener(this);
-        mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
+        mZslConsumer->setName(String8("Camera2-ZslConsumer"));
         mZslWindow = new Surface(producer);
     }
 
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 5f50d7b..5870bd3 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -101,7 +101,7 @@
     int mZslStreamId;
     int mZslReprocessStreamId;
     sp<BufferItemConsumer> mZslConsumer;
-    sp<ANativeWindow>      mZslWindow;
+    sp<Surface>            mZslWindow;
 
     struct ZslPair {
         BufferItem buffer;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 4d276be..3b83f63 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -411,27 +411,28 @@
             (consumerUsage & allowedFlags) != 0;
 
     sp<IBinder> binder = IInterface::asBinder(bufferProducer);
-    sp<ANativeWindow> anw = new Surface(bufferProducer, useAsync);
+    sp<Surface> surface = new Surface(bufferProducer, useAsync);
+    ANativeWindow *anw = surface.get();
 
     int width, height, format;
     android_dataspace dataSpace;
 
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+    if ((res = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
         ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__,
               mCameraId);
         return res;
     }
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+    if ((res = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
         ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__,
               mCameraId);
         return res;
     }
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) {
+    if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
         ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__,
               mCameraId);
         return res;
     }
-    if ((res = anw->query(anw.get(), NATIVE_WINDOW_DEFAULT_DATASPACE,
+    if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
                             reinterpret_cast<int*>(&dataSpace))) != OK) {
         ALOGE("%s: Camera %d: Failed to query Surface dataSpace", __FUNCTION__,
               mCameraId);
@@ -456,7 +457,7 @@
     }
 
     int streamId = -1;
-    res = mDevice->createStream(anw, width, height, format, dataSpace,
+    res = mDevice->createStream(surface, width, height, format, dataSpace,
                                 static_cast<camera3_stream_rotation_t>
                                         (outputConfiguration.getRotation()),
                                 &streamId);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 27c33a3..06177e3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -106,7 +106,7 @@
      * For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the
      * logical dimensions of the buffer, not the number of bytes.
      */
-    virtual status_t createStream(sp<ANativeWindow> consumer,
+    virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id) = 0;
 
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 88f555b..dfe5565 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -240,7 +240,7 @@
     return mRequestQueue.waitForDequeue(requestId, timeout);
 }
 
-status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
+status_t Camera2Device::createStream(sp<Surface> consumer,
         uint32_t width, uint32_t height, int format,
         android_dataspace /*dataSpace*/, camera3_stream_rotation_t rotation, int *id) {
     ATRACE_CALL();
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index a001a91..c9f3a2c 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -56,7 +56,7 @@
                                              int64_t *lastFrameNumber = NULL);
     virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
     virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
-    virtual status_t createStream(sp<ANativeWindow> consumer,
+    virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id);
     virtual status_t createInputStream(
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 852751c..c28a57e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -57,6 +57,7 @@
 
 Camera3Device::Camera3Device(int id):
         mId(id),
+        mIsConstrainedHighSpeedConfiguration(false),
         mHal3Device(NULL),
         mStatus(STATUS_UNINITIALIZED),
         mUsePartialResult(false),
@@ -420,7 +421,7 @@
     }
     lines.appendFormat("    Stream configuration:\n");
     lines.appendFormat("    Operation mode: %s \n", mIsConstrainedHighSpeedConfiguration ?
-            "CONSTAINED HIGH SPEED VIDEO" : "NORMAL");
+            "CONSTRAINED HIGH SPEED VIDEO" : "NORMAL");
 
     if (mInputStream != NULL) {
         write(fd, lines.string(), lines.size());
@@ -816,7 +817,7 @@
     return OK;
 }
 
-status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
+status_t Camera3Device::createStream(sp<Surface> consumer,
         uint32_t width, uint32_t height, int format, android_dataspace dataSpace,
         camera3_stream_rotation_t rotation, int *id) {
     ATRACE_CALL();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 180b1f8..e2fd8d4 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -94,7 +94,7 @@
     // Actual stream creation/deletion is delayed until first request is submitted
     // If adding streams while actively capturing, will pause device before adding
     // stream, reconfiguring device, and unpausing.
-    virtual status_t createStream(sp<ANativeWindow> consumer,
+    virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id);
     virtual status_t createInputStream(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 7a0331b..8c611d5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -32,7 +32,7 @@
 namespace camera3 {
 
 Camera3OutputStream::Camera3OutputStream(int id,
-        sp<ANativeWindow> consumer,
+        sp<Surface> consumer,
         uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera3_stream_rotation_t rotation) :
         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
@@ -48,7 +48,7 @@
 }
 
 Camera3OutputStream::Camera3OutputStream(int id,
-        sp<ANativeWindow> consumer,
+        sp<Surface> consumer,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera3_stream_rotation_t rotation) :
         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
@@ -229,6 +229,7 @@
     (void) args;
     String8 lines;
     lines.appendFormat("    Stream[%d]: Output\n", mId);
+    lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
     write(fd, lines.string(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
@@ -278,6 +279,8 @@
         return res;
     }
 
+    mConsumerName = mConsumer->getConsumerName();
+
     res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
     if (res != OK) {
         ALOGE("%s: Unable to configure usage %08x for stream %d",
@@ -326,7 +329,8 @@
     }
 
     int maxConsumerBuffers;
-    res = mConsumer->query(mConsumer.get(),
+    res = static_cast<ANativeWindow*>(mConsumer.get())->query(
+            mConsumer.get(),
             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
     if (res != OK) {
         ALOGE("%s: Unable to query consumer undequeued"
@@ -401,7 +405,7 @@
 
     status_t res;
     int32_t u = 0;
-    res = mConsumer->query(mConsumer.get(),
+    res = static_cast<ANativeWindow*>(mConsumer.get())->query(mConsumer.get(),
             NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
 
     // If an opaque output stream's endpoint is ImageReader, add
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 513b695..941d693 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -38,7 +38,7 @@
     /**
      * Set up a stream for formats that have 2 dimensions, such as RAW and YUV.
      */
-    Camera3OutputStream(int id, sp<ANativeWindow> consumer,
+    Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation);
 
@@ -46,7 +46,7 @@
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
      */
-    Camera3OutputStream(int id, sp<ANativeWindow> consumer,
+    Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation);
 
@@ -81,7 +81,7 @@
 
     virtual status_t disconnectLocked();
 
-    sp<ANativeWindow> mConsumer;
+    sp<Surface> mConsumer;
   private:
     int               mTransform;
 
@@ -89,6 +89,9 @@
 
     bool mTraceFirstBuffer;
 
+    // Name of Surface consumer
+    String8           mConsumerName;
+
     /**
      * Internal Camera3Stream interface
      */
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 10d7f2e..eefcb44 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -122,6 +122,7 @@
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
     mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL, bufferCount);
+    mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
     mConsumer = new Surface(producer);
 }