am 3371ce02: (-s ours) am 93c38623: am c3708121: am 2d945b6a: am 4855cc3d: Merge "DO NOT MERGE Avoid size_t overflow in base64 decoding once again" into lmp-dev

* commit '3371ce02b725abbd5304933862fc8e02821197c4':
  DO NOT MERGE Avoid size_t overflow in base64 decoding once again
diff --git a/camera/Android.mk b/camera/Android.mk
index da5ac59..471cb0d 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -28,14 +28,13 @@
 	ICameraClient.cpp \
 	ICameraService.cpp \
 	ICameraServiceListener.cpp \
+	ICameraServiceProxy.cpp \
 	ICameraRecordingProxy.cpp \
 	ICameraRecordingProxyListener.cpp \
-	IProCameraUser.cpp \
-	IProCameraCallbacks.cpp \
 	camera2/ICameraDeviceUser.cpp \
 	camera2/ICameraDeviceCallbacks.cpp \
 	camera2/CaptureRequest.cpp \
-	ProCamera.cpp \
+	camera2/OutputConfiguration.cpp \
 	CameraBase.cpp \
 	CameraUtils.cpp \
 	VendorTagDescriptor.cpp
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 3a9fb4c..9bf3134 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -97,7 +97,8 @@
         c->mStatus = NO_ERROR;
         camera = c;
     } else {
-        ALOGW("An error occurred while connecting to camera: %d", cameraId);
+        ALOGW("An error occurred while connecting to camera %d: %d (%s)",
+                cameraId, status, strerror(-status));
         c.clear();
     }
     return status;
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 65a1a47..5d50aa8 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -29,7 +29,6 @@
 #include <camera/ICameraService.h>
 
 // needed to instantiate
-#include <camera/ProCamera.h>
 #include <camera/Camera.h>
 
 #include <system/camera_metadata.h>
@@ -217,7 +216,6 @@
     return cs->removeListener(listener);
 }
 
-template class CameraBase<ProCamera>;
 template class CameraBase<Camera>;
 
 } // namespace android
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 043437f..46bcc1d 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -74,7 +74,7 @@
     clear();
 }
 
-const camera_metadata_t* CameraMetadata::getAndLock() {
+const camera_metadata_t* CameraMetadata::getAndLock() const {
     mLocked = true;
     return mBuffer;
 }
@@ -289,6 +289,17 @@
         ALOGE("%s: Tag %d not found", __FUNCTION__, tag);
         return BAD_VALUE;
     }
+    // Safety check - ensure that data isn't pointing to this metadata, since
+    // that would get invalidated if a resize is needed
+    size_t bufferSize = get_camera_metadata_size(mBuffer);
+    uintptr_t bufAddr = reinterpret_cast<uintptr_t>(mBuffer);
+    uintptr_t dataAddr = reinterpret_cast<uintptr_t>(data);
+    if (dataAddr > bufAddr && dataAddr < (bufAddr + bufferSize)) {
+        ALOGE("%s: Update attempted with data from the same metadata buffer!",
+                __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
     size_t data_size = calculate_camera_metadata_entry_data_size(type,
             data_count);
 
@@ -583,7 +594,7 @@
      */
     WritableBlob blob;
     do {
-        res = data.writeBlob(blobSize, &blob);
+        res = data.writeBlob(blobSize, false, &blob);
         if (res != OK) {
             break;
         }
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index e5e4e90..68969cf 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -526,8 +526,12 @@
         !strcmp(format, PIXEL_FORMAT_RGBA8888) ?
             HAL_PIXEL_FORMAT_RGBA_8888 :    // RGB8888
         !strcmp(format, PIXEL_FORMAT_BAYER_RGGB) ?
-            HAL_PIXEL_FORMAT_RAW_SENSOR :   // Raw sensor data
+            HAL_PIXEL_FORMAT_RAW16 :   // Raw sensor data
         -1;
 }
 
+bool CameraParameters::isEmpty() const {
+    return mMap.isEmpty();
+}
+
 }; // namespace android
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index fc3e437..b359f57 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -2,16 +2,16 @@
 **
 ** Copyright 2008, The Android Open Source Project
 **
-** Licensed under the Apache License, Version 2.0 (the "License"); 
-** you may not use this file except in compliance with the License. 
-** You may obtain a copy of the License at 
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
 **
-**     http://www.apache.org/licenses/LICENSE-2.0 
+**     http://www.apache.org/licenses/LICENSE-2.0
 **
-** Unless required by applicable law or agreed to in writing, software 
-** distributed under the License is distributed on an "AS IS" BASIS, 
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
-** See the License for the specific language governing permissions and 
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
 ** limitations under the License.
 */
 
@@ -20,6 +20,7 @@
 #include <utils/Errors.h>
 #include <utils/String16.h>
 
+#include <inttypes.h>
 #include <stdint.h>
 #include <sys/types.h>
 
@@ -29,8 +30,6 @@
 
 #include <camera/ICameraService.h>
 #include <camera/ICameraServiceListener.h>
-#include <camera/IProCameraUser.h>
-#include <camera/IProCameraCallbacks.h>
 #include <camera/ICamera.h>
 #include <camera/ICameraClient.h>
 #include <camera/camera2/ICameraDeviceUser.h>
@@ -95,11 +94,18 @@
     {
     }
 
-    // get number of cameras available
+    // get number of cameras available that support standard camera operations
     virtual int32_t getNumberOfCameras()
     {
+        return getNumberOfCameras(CAMERA_TYPE_BACKWARD_COMPATIBLE);
+    }
+
+    // get number of cameras available of a given type
+    virtual int32_t getNumberOfCameras(int type)
+    {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
+        data.writeInt32(type);
         remote()->transact(BnCameraService::GET_NUMBER_OF_CAMERAS, data, &reply);
 
         if (readExceptionCode(reply)) return 0;
@@ -176,10 +182,13 @@
         data.writeInt32(cameraId);
         data.writeString16(clientPackageName);
         data.writeInt32(clientUid);
-        remote()->transact(BnCameraService::CONNECT, data, &reply);
+
+        status_t status;
+        status = remote()->transact(BnCameraService::CONNECT, data, &reply);
+        if (status != OK) return status;
 
         if (readExceptionCode(reply)) return -EPROTO;
-        status_t status = reply.readInt32();
+        status = reply.readInt32();
         if (reply.readInt32() != 0) {
             device = interface_cast<ICamera>(reply.readStrongBinder());
         }
@@ -199,36 +208,31 @@
         data.writeInt32(halVersion);
         data.writeString16(clientPackageName);
         data.writeInt32(clientUid);
-        remote()->transact(BnCameraService::CONNECT_LEGACY, data, &reply);
+
+        status_t status;
+        status = remote()->transact(BnCameraService::CONNECT_LEGACY, data, &reply);
+        if (status != OK) return status;
 
         if (readExceptionCode(reply)) return -EPROTO;
-        status_t status = reply.readInt32();
+        status = reply.readInt32();
         if (reply.readInt32() != 0) {
             device = interface_cast<ICamera>(reply.readStrongBinder());
         }
         return status;
     }
 
-    // connect to camera service (pro client)
-    virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, int cameraId,
-                                const String16 &clientPackageName, int clientUid,
-                                /*out*/
-                                sp<IProCameraUser>& device)
+    virtual status_t setTorchMode(const String16& cameraId, bool enabled,
+            const sp<IBinder>& clientBinder)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(cameraCb));
-        data.writeInt32(cameraId);
-        data.writeString16(clientPackageName);
-        data.writeInt32(clientUid);
-        remote()->transact(BnCameraService::CONNECT_PRO, data, &reply);
+        data.writeString16(cameraId);
+        data.writeInt32(enabled ? 1 : 0);
+        data.writeStrongBinder(clientBinder);
+        remote()->transact(BnCameraService::SET_TORCH_MODE, data, &reply);
 
         if (readExceptionCode(reply)) return -EPROTO;
-        status_t status = reply.readInt32();
-        if (reply.readInt32() != 0) {
-            device = interface_cast<IProCameraUser>(reply.readStrongBinder());
-        }
-        return status;
+        return reply.readInt32();
     }
 
     // connect to camera service (android.hardware.camera2.CameraDevice)
@@ -246,10 +250,13 @@
         data.writeInt32(cameraId);
         data.writeString16(clientPackageName);
         data.writeInt32(clientUid);
-        remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply);
+
+        status_t status;
+        status = remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply);
+        if (status != OK) return status;
 
         if (readExceptionCode(reply)) return -EPROTO;
-        status_t status = reply.readInt32();
+        status = reply.readInt32();
         if (reply.readInt32() != 0) {
             device = interface_cast<ICameraDeviceUser>(reply.readStrongBinder());
         }
@@ -285,6 +292,7 @@
         }
 
         Parcel data, reply;
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
 
         data.writeInt32(cameraId);
         remote()->transact(BnCameraService::GET_LEGACY_PARAMETERS, data, &reply);
@@ -304,6 +312,7 @@
     virtual status_t supportsCameraApi(int cameraId, int apiVersion) {
         Parcel data, reply;
 
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
         data.writeInt32(cameraId);
         data.writeInt32(apiVersion);
         remote()->transact(BnCameraService::SUPPORTS_CAMERA_API, data, &reply);
@@ -312,6 +321,16 @@
         status_t res = data.readInt32();
         return res;
     }
+
+    virtual void notifySystemEvent(int32_t eventId, const int32_t* args, size_t len) {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
+        data.writeInt32(eventId);
+        data.writeInt32Array(len, args);
+        remote()->transact(BnCameraService::NOTIFY_SYSTEM_EVENT, data, &reply,
+                IBinder::FLAG_ONEWAY);
+    }
+
 };
 
 IMPLEMENT_META_INTERFACE(CameraService, "android.hardware.ICameraService");
@@ -325,7 +344,7 @@
         case GET_NUMBER_OF_CAMERAS: {
             CHECK_INTERFACE(ICameraService, data, reply);
             reply->writeNoException();
-            reply->writeInt32(getNumberOfCameras());
+            reply->writeInt32(getNumberOfCameras(data.readInt32()));
             return NO_ERROR;
         } break;
         case GET_CAMERA_INFO: {
@@ -390,26 +409,6 @@
             }
             return NO_ERROR;
         } break;
-        case CONNECT_PRO: {
-            CHECK_INTERFACE(ICameraService, data, reply);
-            sp<IProCameraCallbacks> cameraClient =
-                interface_cast<IProCameraCallbacks>(data.readStrongBinder());
-            int32_t cameraId = data.readInt32();
-            const String16 clientName = data.readString16();
-            int32_t clientUid = data.readInt32();
-            sp<IProCameraUser> camera;
-            status_t status = connectPro(cameraClient, cameraId,
-                    clientName, clientUid, /*out*/camera);
-            reply->writeNoException();
-            reply->writeInt32(status);
-            if (camera != NULL) {
-                reply->writeInt32(1);
-                reply->writeStrongBinder(IInterface::asBinder(camera));
-            } else {
-                reply->writeInt32(0);
-            }
-            return NO_ERROR;
-        } break;
         case CONNECT_DEVICE: {
             CHECK_INTERFACE(ICameraService, data, reply);
             sp<ICameraDeviceCallbacks> cameraClient =
@@ -490,6 +489,41 @@
             }
             return NO_ERROR;
         } break;
+        case SET_TORCH_MODE: {
+            CHECK_INTERFACE(ICameraService, data, reply);
+            String16 cameraId = data.readString16();
+            bool enabled = data.readInt32() != 0 ? true : false;
+            const sp<IBinder> clientBinder = data.readStrongBinder();
+            status_t status = setTorchMode(cameraId, enabled, clientBinder);
+            reply->writeNoException();
+            reply->writeInt32(status);
+            return NO_ERROR;
+        } break;
+        case NOTIFY_SYSTEM_EVENT: {
+            CHECK_INTERFACE(ICameraService, data, reply);
+            int32_t eventId = data.readInt32();
+            int32_t len = data.readInt32();
+            if (len < 0) {
+                ALOGE("%s: Received poorly formatted length in binder request: notifySystemEvent.",
+                        __FUNCTION__);
+                return FAILED_TRANSACTION;
+            }
+            if (len > 512) {
+                ALOGE("%s: Length %" PRIi32 " too long in binder request: notifySystemEvent.",
+                        __FUNCTION__, len);
+                return FAILED_TRANSACTION;
+            }
+            int32_t events[len];
+            memset(events, 0, sizeof(int32_t) * len);
+            status_t status = data.read(events, sizeof(int32_t) * len);
+            if (status != NO_ERROR) {
+                ALOGE("%s: Received poorly formatted binder request: notifySystemEvent.",
+                        __FUNCTION__);
+                return FAILED_TRANSACTION;
+            }
+            notifySystemEvent(eventId, events, len);
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/ICameraServiceListener.cpp b/camera/ICameraServiceListener.cpp
index b2f1729..0010325 100644
--- a/camera/ICameraServiceListener.cpp
+++ b/camera/ICameraServiceListener.cpp
@@ -29,6 +29,7 @@
 namespace {
     enum {
         STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION,
+        TORCH_STATUS_CHANGED,
     };
 }; // namespace anonymous
 
@@ -44,8 +45,7 @@
     virtual void onStatusChanged(Status status, int32_t cameraId)
     {
         Parcel data, reply;
-        data.writeInterfaceToken(
-                              ICameraServiceListener::getInterfaceDescriptor());
+        data.writeInterfaceToken(ICameraServiceListener::getInterfaceDescriptor());
 
         data.writeInt32(static_cast<int32_t>(status));
         data.writeInt32(cameraId);
@@ -54,19 +54,29 @@
                            data,
                            &reply,
                            IBinder::FLAG_ONEWAY);
+    }
 
-        reply.readExceptionCode();
+    virtual void onTorchStatusChanged(TorchStatus status, const String16 &cameraId)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraServiceListener::getInterfaceDescriptor());
+
+        data.writeInt32(static_cast<int32_t>(status));
+        data.writeString16(cameraId);
+
+        remote()->transact(TORCH_STATUS_CHANGED,
+                           data,
+                           &reply,
+                           IBinder::FLAG_ONEWAY);
     }
 };
 
-IMPLEMENT_META_INTERFACE(CameraServiceListener,
-                         "android.hardware.ICameraServiceListener");
+IMPLEMENT_META_INTERFACE(CameraServiceListener, "android.hardware.ICameraServiceListener");
 
 // ----------------------------------------------------------------------
 
-status_t BnCameraServiceListener::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
+status_t BnCameraServiceListener::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+        uint32_t flags) {
     switch(code) {
         case STATUS_CHANGED: {
             CHECK_INTERFACE(ICameraServiceListener, data, reply);
@@ -75,7 +85,16 @@
             int32_t cameraId = data.readInt32();
 
             onStatusChanged(status, cameraId);
-            reply->writeNoException();
+
+            return NO_ERROR;
+        } break;
+        case TORCH_STATUS_CHANGED: {
+            CHECK_INTERFACE(ICameraServiceListener, data, reply);
+
+            TorchStatus status = static_cast<TorchStatus>(data.readInt32());
+            String16 cameraId = data.readString16();
+
+            onTorchStatusChanged(status, cameraId);
 
             return NO_ERROR;
         } break;
diff --git a/camera/ICameraServiceProxy.cpp b/camera/ICameraServiceProxy.cpp
new file mode 100644
index 0000000..06a5afb
--- /dev/null
+++ b/camera/ICameraServiceProxy.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BpCameraServiceProxy"
+
+#include <stdint.h>
+
+#include <binder/Parcel.h>
+
+#include <camera/ICameraServiceProxy.h>
+
+namespace android {
+
+class BpCameraServiceProxy: public BpInterface<ICameraServiceProxy> {
+public:
+    BpCameraServiceProxy(const sp<IBinder>& impl) : BpInterface<ICameraServiceProxy>(impl) {}
+
+    virtual void pingForUserUpdate() {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
+        remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, &reply,
+                IBinder::FLAG_ONEWAY);
+    }
+};
+
+
+IMPLEMENT_META_INTERFACE(CameraServiceProxy, "android.hardware.ICameraServiceProxy");
+
+status_t BnCameraServiceProxy::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+        uint32_t flags) {
+    switch(code) {
+        case PING_FOR_USER_UPDATE: {
+            CHECK_INTERFACE(ICameraServiceProxy, data, reply);
+            pingForUserUpdate();
+            return NO_ERROR;
+        } break;
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+}; // namespace android
+
diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp
deleted file mode 100644
index bd3d420..0000000
--- a/camera/IProCameraCallbacks.cpp
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-**
-** Copyright 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IProCameraCallbacks"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/Surface.h>
-#include <utils/Mutex.h>
-
-#include <camera/IProCameraCallbacks.h>
-
-#include "camera/CameraMetadata.h"
-
-namespace android {
-
-enum {
-    NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION,
-    LOCK_STATUS_CHANGED,
-    RESULT_RECEIVED,
-};
-
-class BpProCameraCallbacks: public BpInterface<IProCameraCallbacks>
-{
-public:
-    BpProCameraCallbacks(const sp<IBinder>& impl)
-        : BpInterface<IProCameraCallbacks>(impl)
-    {
-    }
-
-    // generic callback from camera service to app
-    void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
-    {
-        ALOGV("notifyCallback");
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
-        data.writeInt32(msgType);
-        data.writeInt32(ext1);
-        data.writeInt32(ext2);
-        remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void onLockStatusChanged(LockStatus newLockStatus) {
-        ALOGV("onLockStatusChanged");
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
-        data.writeInt32(newLockStatus);
-        remote()->transact(LOCK_STATUS_CHANGED, data, &reply,
-                           IBinder::FLAG_ONEWAY);
-    }
-
-    void onResultReceived(int32_t requestId, camera_metadata* result) {
-        ALOGV("onResultReceived");
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
-        data.writeInt32(requestId);
-        CameraMetadata::writeToParcel(data, result);
-        remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(ProCameraCallbacks,
-                                        "android.hardware.IProCameraCallbacks");
-
-// ----------------------------------------------------------------------
-
-status_t BnProCameraCallbacks::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    ALOGV("onTransact - code = %d", code);
-    switch(code) {
-        case NOTIFY_CALLBACK: {
-            ALOGV("NOTIFY_CALLBACK");
-            CHECK_INTERFACE(IProCameraCallbacks, data, reply);
-            int32_t msgType = data.readInt32();
-            int32_t ext1 = data.readInt32();
-            int32_t ext2 = data.readInt32();
-            notifyCallback(msgType, ext1, ext2);
-            return NO_ERROR;
-        } break;
-        case LOCK_STATUS_CHANGED: {
-            ALOGV("LOCK_STATUS_CHANGED");
-            CHECK_INTERFACE(IProCameraCallbacks, data, reply);
-            LockStatus newLockStatus
-                                 = static_cast<LockStatus>(data.readInt32());
-            onLockStatusChanged(newLockStatus);
-            return NO_ERROR;
-        } break;
-        case RESULT_RECEIVED: {
-            ALOGV("RESULT_RECEIVED");
-            CHECK_INTERFACE(IProCameraCallbacks, data, reply);
-            int32_t requestId = data.readInt32();
-            camera_metadata_t *result = NULL;
-            CameraMetadata::readFromParcel(data, &result);
-            onResultReceived(requestId, result);
-            return NO_ERROR;
-            break;
-        }
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp
deleted file mode 100644
index 9bd7597..0000000
--- a/camera/IProCameraUser.cpp
+++ /dev/null
@@ -1,324 +0,0 @@
-/*
-**
-** Copyright 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "IProCameraUser"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <camera/IProCameraUser.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/Surface.h>
-#include "camera/CameraMetadata.h"
-
-namespace android {
-
-enum {
-    DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    CONNECT,
-    EXCLUSIVE_TRY_LOCK,
-    EXCLUSIVE_LOCK,
-    EXCLUSIVE_UNLOCK,
-    HAS_EXCLUSIVE_LOCK,
-    SUBMIT_REQUEST,
-    CANCEL_REQUEST,
-    DELETE_STREAM,
-    CREATE_STREAM,
-    CREATE_DEFAULT_REQUEST,
-    GET_CAMERA_INFO,
-};
-
-class BpProCameraUser: public BpInterface<IProCameraUser>
-{
-public:
-    BpProCameraUser(const sp<IBinder>& impl)
-        : BpInterface<IProCameraUser>(impl)
-    {
-    }
-
-    // disconnect from camera service
-    void disconnect()
-    {
-        ALOGV("disconnect");
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        remote()->transact(DISCONNECT, data, &reply);
-        reply.readExceptionCode();
-    }
-
-    virtual status_t connect(const sp<IProCameraCallbacks>& cameraClient)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(cameraClient));
-        remote()->transact(CONNECT, data, &reply);
-        return reply.readInt32();
-    }
-
-    /* Shared ProCameraUser */
-
-    virtual status_t exclusiveTryLock()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        remote()->transact(EXCLUSIVE_TRY_LOCK, data, &reply);
-        return reply.readInt32();
-    }
-    virtual status_t exclusiveLock()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        remote()->transact(EXCLUSIVE_LOCK, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t exclusiveUnlock()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        remote()->transact(EXCLUSIVE_UNLOCK, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool hasExclusiveLock()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        remote()->transact(HAS_EXCLUSIVE_LOCK, data, &reply);
-        return !!reply.readInt32();
-    }
-
-    virtual int submitRequest(camera_metadata_t* metadata, bool streaming)
-    {
-
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-
-        // arg0+arg1
-        CameraMetadata::writeToParcel(data, metadata);
-
-        // arg2 = streaming (bool)
-        data.writeInt32(streaming);
-
-        remote()->transact(SUBMIT_REQUEST, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t cancelRequest(int requestId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeInt32(requestId);
-
-        remote()->transact(CANCEL_REQUEST, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t deleteStream(int streamId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeInt32(streamId);
-
-        remote()->transact(DELETE_STREAM, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t createStream(int width, int height, int format,
-                          const sp<IGraphicBufferProducer>& bufferProducer,
-                          /*out*/
-                          int* streamId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeInt32(width);
-        data.writeInt32(height);
-        data.writeInt32(format);
-
-        sp<IBinder> b(IInterface::asBinder(bufferProducer));
-        data.writeStrongBinder(b);
-
-        remote()->transact(CREATE_STREAM, data, &reply);
-
-        int sId = reply.readInt32();
-        if (streamId) {
-            *streamId = sId;
-        }
-        return reply.readInt32();
-    }
-
-    // Create a request object from a template.
-    virtual status_t createDefaultRequest(int templateId,
-                                 /*out*/
-                                  camera_metadata** request)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeInt32(templateId);
-        remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply);
-        CameraMetadata::readFromParcel(reply, /*out*/request);
-        return reply.readInt32();
-    }
-
-
-    virtual status_t getCameraInfo(int cameraId, camera_metadata** info)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-        data.writeInt32(cameraId);
-        remote()->transact(GET_CAMERA_INFO, data, &reply);
-        CameraMetadata::readFromParcel(reply, /*out*/info);
-        return reply.readInt32();
-    }
-
-
-private:
-
-
-};
-
-IMPLEMENT_META_INTERFACE(ProCameraUser, "android.hardware.IProCameraUser");
-
-// ----------------------------------------------------------------------
-
-status_t BnProCameraUser::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch(code) {
-        case DISCONNECT: {
-            ALOGV("DISCONNECT");
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            disconnect();
-            reply->writeNoException();
-            return NO_ERROR;
-        } break;
-        case CONNECT: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            sp<IProCameraCallbacks> cameraClient =
-                   interface_cast<IProCameraCallbacks>(data.readStrongBinder());
-            reply->writeInt32(connect(cameraClient));
-            return NO_ERROR;
-        } break;
-
-        /* Shared ProCameraUser */
-        case EXCLUSIVE_TRY_LOCK: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            reply->writeInt32(exclusiveTryLock());
-            return NO_ERROR;
-        } break;
-        case EXCLUSIVE_LOCK: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            reply->writeInt32(exclusiveLock());
-            return NO_ERROR;
-        } break;
-        case EXCLUSIVE_UNLOCK: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            reply->writeInt32(exclusiveUnlock());
-            return NO_ERROR;
-        } break;
-        case HAS_EXCLUSIVE_LOCK: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            reply->writeInt32(hasExclusiveLock());
-            return NO_ERROR;
-        } break;
-        case SUBMIT_REQUEST: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            camera_metadata_t* metadata;
-            CameraMetadata::readFromParcel(data, /*out*/&metadata);
-
-            // arg2 = streaming (bool)
-            bool streaming = data.readInt32();
-
-            // return code: requestId (int32)
-            reply->writeInt32(submitRequest(metadata, streaming));
-
-            return NO_ERROR;
-        } break;
-        case CANCEL_REQUEST: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            int requestId = data.readInt32();
-            reply->writeInt32(cancelRequest(requestId));
-            return NO_ERROR;
-        } break;
-        case DELETE_STREAM: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            int streamId = data.readInt32();
-            reply->writeInt32(deleteStream(streamId));
-            return NO_ERROR;
-        } break;
-        case CREATE_STREAM: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-            int width, height, format;
-
-            width = data.readInt32();
-            height = data.readInt32();
-            format = data.readInt32();
-
-            sp<IGraphicBufferProducer> bp =
-               interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
-
-            int streamId = -1;
-            status_t ret;
-            ret = createStream(width, height, format, bp, &streamId);
-
-            reply->writeInt32(streamId);
-            reply->writeInt32(ret);
-
-            return NO_ERROR;
-        } break;
-
-        case CREATE_DEFAULT_REQUEST: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-
-            int templateId = data.readInt32();
-
-            camera_metadata_t* request = NULL;
-            status_t ret;
-            ret = createDefaultRequest(templateId, &request);
-
-            CameraMetadata::writeToParcel(*reply, request);
-            reply->writeInt32(ret);
-
-            free_camera_metadata(request);
-
-            return NO_ERROR;
-        } break;
-        case GET_CAMERA_INFO: {
-            CHECK_INTERFACE(IProCameraUser, data, reply);
-
-            int cameraId = data.readInt32();
-
-            camera_metadata_t* info = NULL;
-            status_t ret;
-            ret = getCameraInfo(cameraId, &info);
-
-            CameraMetadata::writeToParcel(*reply, info);
-            reply->writeInt32(ret);
-
-            free_camera_metadata(info);
-
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
deleted file mode 100644
index 48f8e8e..0000000
--- a/camera/ProCamera.cpp
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
-**
-** Copyright (C) 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ProCamera"
-#include <utils/Log.h>
-#include <utils/threads.h>
-#include <utils/Mutex.h>
-
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
-#include <binder/IMemory.h>
-
-#include <camera/ProCamera.h>
-#include <camera/IProCameraUser.h>
-#include <camera/IProCameraCallbacks.h>
-
-#include <gui/IGraphicBufferProducer.h>
-
-#include <system/camera_metadata.h>
-
-namespace android {
-
-sp<ProCamera> ProCamera::connect(int cameraId)
-{
-    return CameraBaseT::connect(cameraId, String16(),
-                                 ICameraService::USE_CALLING_UID);
-}
-
-ProCamera::ProCamera(int cameraId)
-    : CameraBase(cameraId)
-{
-}
-
-CameraTraits<ProCamera>::TCamConnectService CameraTraits<ProCamera>::fnConnectService =
-        &ICameraService::connectPro;
-
-ProCamera::~ProCamera()
-{
-
-}
-
-/* IProCameraUser's implementation */
-
-// callback from camera service
-void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
-{
-    return CameraBaseT::notifyCallback(msgType, ext1, ext2);
-}
-
-void ProCamera::onLockStatusChanged(
-                                 IProCameraCallbacks::LockStatus newLockStatus)
-{
-    ALOGV("%s: newLockStatus = %d", __FUNCTION__, newLockStatus);
-
-    sp<ProCameraListener> listener;
-    {
-        Mutex::Autolock _l(mLock);
-        listener = mListener;
-    }
-    if (listener != NULL) {
-        switch (newLockStatus) {
-            case IProCameraCallbacks::LOCK_ACQUIRED:
-                listener->onLockAcquired();
-                break;
-            case IProCameraCallbacks::LOCK_RELEASED:
-                listener->onLockReleased();
-                break;
-            case IProCameraCallbacks::LOCK_STOLEN:
-                listener->onLockStolen();
-                break;
-            default:
-                ALOGE("%s: Unknown lock status: %d",
-                      __FUNCTION__, newLockStatus);
-        }
-    }
-}
-
-void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) {
-    ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result);
-
-    sp<ProCameraListener> listener;
-    {
-        Mutex::Autolock _l(mLock);
-        listener = mListener;
-    }
-
-    CameraMetadata tmp(result);
-
-    // Unblock waitForFrame(id) callers
-    {
-        Mutex::Autolock al(mWaitMutex);
-        mMetadataReady = true;
-        mLatestMetadata = tmp; // make copy
-        mWaitCondition.broadcast();
-    }
-
-    result = tmp.release();
-
-    if (listener != NULL) {
-        listener->onResultReceived(requestId, result);
-    } else {
-        free_camera_metadata(result);
-    }
-
-}
-
-status_t ProCamera::exclusiveTryLock()
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->exclusiveTryLock();
-}
-status_t ProCamera::exclusiveLock()
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->exclusiveLock();
-}
-status_t ProCamera::exclusiveUnlock()
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->exclusiveUnlock();
-}
-bool ProCamera::hasExclusiveLock()
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->hasExclusiveLock();
-}
-
-// Note that the callee gets a copy of the metadata.
-int ProCamera::submitRequest(const struct camera_metadata* metadata,
-                             bool streaming)
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->submitRequest(const_cast<struct camera_metadata*>(metadata),
-                            streaming);
-}
-
-status_t ProCamera::cancelRequest(int requestId)
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->cancelRequest(requestId);
-}
-
-status_t ProCamera::deleteStream(int streamId)
-{
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    status_t s = c->deleteStream(streamId);
-
-    mStreams.removeItem(streamId);
-
-    return s;
-}
-
-status_t ProCamera::createStream(int width, int height, int format,
-                                 const sp<Surface>& surface,
-                                 /*out*/
-                                 int* streamId)
-{
-    *streamId = -1;
-
-    ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
-                                                                       format);
-
-    if (surface == 0) {
-        return BAD_VALUE;
-    }
-
-    return createStream(width, height, format,
-                        surface->getIGraphicBufferProducer(),
-                        streamId);
-}
-
-status_t ProCamera::createStream(int width, int height, int format,
-                                 const sp<IGraphicBufferProducer>& bufferProducer,
-                                 /*out*/
-                                 int* streamId) {
-    *streamId = -1;
-
-    ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
-                                                                       format);
-
-    if (bufferProducer == 0) {
-        return BAD_VALUE;
-    }
-
-    sp <IProCameraUser> c = mCamera;
-    status_t stat = c->createStream(width, height, format, bufferProducer,
-                                    streamId);
-
-    if (stat == OK) {
-        StreamInfo s(*streamId);
-
-        mStreams.add(*streamId, s);
-    }
-
-    return stat;
-}
-
-status_t ProCamera::createStreamCpu(int width, int height, int format,
-                                    int heapCount,
-                                    /*out*/
-                                    sp<CpuConsumer>* cpuConsumer,
-                                    int* streamId) {
-    return createStreamCpu(width, height, format, heapCount,
-                           /*synchronousMode*/true,
-                           cpuConsumer, streamId);
-}
-
-status_t ProCamera::createStreamCpu(int width, int height, int format,
-                                    int heapCount,
-                                    bool synchronousMode,
-                                    /*out*/
-                                    sp<CpuConsumer>* cpuConsumer,
-                                    int* streamId)
-{
-    ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
-                                                                        format);
-
-    *cpuConsumer = NULL;
-
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    sp<IGraphicBufferProducer> producer;
-    sp<IGraphicBufferConsumer> consumer;
-    BufferQueue::createBufferQueue(&producer, &consumer);
-    sp<CpuConsumer> cc = new CpuConsumer(consumer, heapCount
-            /*, synchronousMode*/);
-    cc->setName(String8("ProCamera::mCpuConsumer"));
-
-    sp<Surface> stc = new Surface(producer);
-
-    status_t s = createStream(width, height, format,
-                              stc->getIGraphicBufferProducer(),
-                              streamId);
-
-    if (s != OK) {
-        ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__,
-                    width, height, format);
-        return s;
-    }
-
-    sp<ProFrameListener> frameAvailableListener =
-        new ProFrameListener(this, *streamId);
-
-    getStreamInfo(*streamId).cpuStream = true;
-    getStreamInfo(*streamId).cpuConsumer = cc;
-    getStreamInfo(*streamId).synchronousMode = synchronousMode;
-    getStreamInfo(*streamId).stc = stc;
-    // for lifetime management
-    getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener;
-
-    cc->setFrameAvailableListener(frameAvailableListener);
-
-    *cpuConsumer = cc;
-
-    return s;
-}
-
-camera_metadata* ProCamera::getCameraInfo(int cameraId) {
-    ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId);
-
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NULL;
-
-    camera_metadata* ptr = NULL;
-    status_t status = c->getCameraInfo(cameraId, &ptr);
-
-    if (status != OK) {
-        ALOGE("%s: Failed to get camera info, error = %d", __FUNCTION__, status);
-    }
-
-    return ptr;
-}
-
-status_t ProCamera::createDefaultRequest(int templateId,
-                                             camera_metadata** request) const {
-    ALOGV("%s: templateId = %d", __FUNCTION__, templateId);
-
-    sp <IProCameraUser> c = mCamera;
-    if (c == 0) return NO_INIT;
-
-    return c->createDefaultRequest(templateId, request);
-}
-
-void ProCamera::onFrameAvailable(int streamId) {
-    ALOGV("%s: streamId = %d", __FUNCTION__, streamId);
-
-    sp<ProCameraListener> listener = mListener;
-    StreamInfo& stream = getStreamInfo(streamId);
-
-    if (listener.get() != NULL) {
-        listener->onFrameAvailable(streamId, stream.cpuConsumer);
-    }
-
-    // Unblock waitForFrame(id) callers
-    {
-        Mutex::Autolock al(mWaitMutex);
-        getStreamInfo(streamId).frameReady++;
-        mWaitCondition.broadcast();
-    }
-}
-
-int ProCamera::waitForFrameBuffer(int streamId) {
-    status_t stat = BAD_VALUE;
-    Mutex::Autolock al(mWaitMutex);
-
-    StreamInfo& si = getStreamInfo(streamId);
-
-    if (si.frameReady > 0) {
-        int numFrames = si.frameReady;
-        si.frameReady = 0;
-        return numFrames;
-    } else {
-        while (true) {
-            stat = mWaitCondition.waitRelative(mWaitMutex,
-                                                mWaitTimeout);
-            if (stat != OK) {
-                ALOGE("%s: Error while waiting for frame buffer: %d",
-                    __FUNCTION__, stat);
-                return stat;
-            }
-
-            if (si.frameReady > 0) {
-                int numFrames = si.frameReady;
-                si.frameReady = 0;
-                return numFrames;
-            }
-            // else it was some other stream that got unblocked
-        }
-    }
-
-    return stat;
-}
-
-int ProCamera::dropFrameBuffer(int streamId, int count) {
-    StreamInfo& si = getStreamInfo(streamId);
-
-    if (!si.cpuStream) {
-        return BAD_VALUE;
-    } else if (count < 0) {
-        return BAD_VALUE;
-    }
-
-    if (!si.synchronousMode) {
-        ALOGW("%s: No need to drop frames on asynchronous streams,"
-              " as asynchronous mode only keeps 1 latest frame around.",
-              __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    int numDropped = 0;
-    for (int i = 0; i < count; ++i) {
-        CpuConsumer::LockedBuffer buffer;
-        if (si.cpuConsumer->lockNextBuffer(&buffer) != OK) {
-            break;
-        }
-
-        si.cpuConsumer->unlockBuffer(buffer);
-        numDropped++;
-    }
-
-    return numDropped;
-}
-
-status_t ProCamera::waitForFrameMetadata() {
-    status_t stat = BAD_VALUE;
-    Mutex::Autolock al(mWaitMutex);
-
-    if (mMetadataReady) {
-        return OK;
-    } else {
-        while (true) {
-            stat = mWaitCondition.waitRelative(mWaitMutex,
-                                               mWaitTimeout);
-
-            if (stat != OK) {
-                ALOGE("%s: Error while waiting for metadata: %d",
-                        __FUNCTION__, stat);
-                return stat;
-            }
-
-            if (mMetadataReady) {
-                mMetadataReady = false;
-                return OK;
-            }
-            // else it was some other stream or metadata
-        }
-    }
-
-    return stat;
-}
-
-CameraMetadata ProCamera::consumeFrameMetadata() {
-    Mutex::Autolock al(mWaitMutex);
-
-    // Destructive: Subsequent calls return empty metadatas
-    CameraMetadata tmp = mLatestMetadata;
-    mLatestMetadata.clear();
-
-    return tmp;
-}
-
-ProCamera::StreamInfo& ProCamera::getStreamInfo(int streamId) {
-    return mStreams.editValueFor(streamId);
-}
-
-}; // namespace android
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 66d6913..4217bc6 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -81,6 +81,13 @@
         mSurfaceList.push_back(surface);
     }
 
+    int isReprocess = 0;
+    if ((err = parcel->readInt32(&isReprocess)) != OK) {
+        ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+        return err;
+    }
+    mIsReprocess = (isReprocess != 0);
+
     return OK;
 }
 
@@ -118,6 +125,8 @@
         parcel->writeStrongBinder(binder);
     }
 
+    parcel->writeInt32(mIsReprocess ? 1 : 0);
+
     return OK;
 }
 
diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp
index 4cc7b5d..f599879 100644
--- a/camera/camera2/ICameraDeviceCallbacks.cpp
+++ b/camera/camera2/ICameraDeviceCallbacks.cpp
@@ -37,6 +37,7 @@
     CAMERA_IDLE,
     CAPTURE_STARTED,
     RESULT_RECEIVED,
+    PREPARED
 };
 
 class BpCameraDeviceCallbacks: public BpInterface<ICameraDeviceCallbacks>
@@ -80,7 +81,6 @@
         data.writeNoException();
     }
 
-
     void onResultReceived(const CameraMetadata& metadata,
             const CaptureResultExtras& resultExtras) {
         ALOGV("onResultReceived");
@@ -93,6 +93,17 @@
         remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
         data.writeNoException();
     }
+
+    void onPrepared(int streamId)
+    {
+        ALOGV("onPrepared");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
+        data.writeInt32(streamId);
+        remote()->transact(PREPARED, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeNoException();
+    }
+
 };
 
 IMPLEMENT_META_INTERFACE(CameraDeviceCallbacks,
@@ -160,6 +171,15 @@
             data.readExceptionCode();
             return NO_ERROR;
         } break;
+        case PREPARED: {
+            ALOGV("onPrepared");
+            CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
+            CaptureResultExtras result;
+            int streamId = data.readInt32();
+            onPrepared(streamId);
+            data.readExceptionCode();
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index 277b5db..d2dc200 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -26,6 +26,7 @@
 #include <gui/Surface.h>
 #include <camera/CameraMetadata.h>
 #include <camera/camera2/CaptureRequest.h>
+#include <camera/camera2/OutputConfiguration.h>
 
 namespace android {
 
@@ -41,10 +42,14 @@
     END_CONFIGURE,
     DELETE_STREAM,
     CREATE_STREAM,
+    CREATE_INPUT_STREAM,
+    GET_INPUT_SURFACE,
     CREATE_DEFAULT_REQUEST,
     GET_CAMERA_INFO,
     WAIT_UNTIL_IDLE,
-    FLUSH
+    FLUSH,
+    PREPARE,
+    TEAR_DOWN
 };
 
 namespace {
@@ -78,7 +83,7 @@
         reply.readExceptionCode();
     }
 
-    virtual status_t submitRequest(sp<CaptureRequest> request, bool repeating,
+    virtual int submitRequest(sp<CaptureRequest> request, bool repeating,
                               int64_t *lastFrameNumber)
     {
         Parcel data, reply;
@@ -107,13 +112,13 @@
             }
         }
 
-	if ((res < NO_ERROR) || (resFrameNumber != NO_ERROR)) {
+        if (res < 0 || (resFrameNumber != NO_ERROR)) {
             res = FAILED_TRANSACTION;
         }
         return res;
     }
 
-    virtual status_t submitRequestList(List<sp<CaptureRequest> > requestList, bool repeating,
+    virtual int submitRequestList(List<sp<CaptureRequest> > requestList, bool repeating,
                                   int64_t *lastFrameNumber)
     {
         Parcel data, reply;
@@ -147,7 +152,7 @@
                 resFrameNumber = reply.readInt64(lastFrameNumber);
             }
         }
-        if ((res < NO_ERROR) || (resFrameNumber != NO_ERROR)) {
+        if (res < 0 || (resFrameNumber != NO_ERROR)) {
             res = FAILED_TRANSACTION;
         }
         return res;
@@ -186,11 +191,13 @@
         return reply.readInt32();
     }
 
-    virtual status_t endConfigure()
+    virtual status_t endConfigure(bool isConstrainedHighSpeed)
     {
         ALOGV("endConfigure");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+        data.writeInt32(isConstrainedHighSpeed);
+
         remote()->transact(END_CONFIGURE, data, &reply);
         reply.readExceptionCode();
         return reply.readInt32();
@@ -208,8 +215,23 @@
         return reply.readInt32();
     }
 
-    virtual status_t createStream(int width, int height, int format,
-                          const sp<IGraphicBufferProducer>& bufferProducer)
+    virtual status_t createStream(const OutputConfiguration& outputConfiguration)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+        if (outputConfiguration.getGraphicBufferProducer() != NULL) {
+            data.writeInt32(1); // marker that OutputConfiguration is not null. Mimic aidl behavior
+            outputConfiguration.writeToParcel(data);
+        } else {
+            data.writeInt32(0);
+        }
+        remote()->transact(CREATE_STREAM, data, &reply);
+
+        reply.readExceptionCode();
+        return reply.readInt32();
+    }
+
+    virtual status_t createInputStream(int width, int height, int format)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
@@ -217,17 +239,42 @@
         data.writeInt32(height);
         data.writeInt32(format);
 
-        data.writeInt32(1); // marker that bufferProducer is not null
-        data.writeString16(String16("unknown_name")); // name of surface
-        sp<IBinder> b(IInterface::asBinder(bufferProducer));
-        data.writeStrongBinder(b);
-
-        remote()->transact(CREATE_STREAM, data, &reply);
+        remote()->transact(CREATE_INPUT_STREAM, data, &reply);
 
         reply.readExceptionCode();
         return reply.readInt32();
     }
 
+    // get the buffer producer of the input stream
+    virtual status_t getInputBufferProducer(
+            sp<IGraphicBufferProducer> *producer) {
+        if (producer == NULL) {
+            return BAD_VALUE;
+        }
+
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+
+        remote()->transact(GET_INPUT_SURFACE, data, &reply);
+
+        reply.readExceptionCode();
+        status_t result = reply.readInt32() ;
+        if (result != OK) {
+            return result;
+        }
+
+        sp<IGraphicBufferProducer> bp = NULL;
+        if (reply.readInt32() != 0) {
+            String16 name = readMaybeEmptyString16(reply);
+            bp = interface_cast<IGraphicBufferProducer>(
+                    reply.readStrongBinder());
+        }
+
+        *producer = bp;
+
+        return *producer == NULL ? INVALID_OPERATION : OK;
+    }
+
     // Create a request object from a template.
     virtual status_t createDefaultRequest(int templateId,
                                           /*out*/
@@ -305,6 +352,34 @@
         return res;
     }
 
+    virtual status_t prepare(int streamId)
+    {
+        ALOGV("prepare");
+        Parcel data, reply;
+
+        data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+        data.writeInt32(streamId);
+
+        remote()->transact(PREPARE, data, &reply);
+
+        reply.readExceptionCode();
+        return reply.readInt32();
+    }
+
+    virtual status_t tearDown(int streamId)
+    {
+        ALOGV("tearDown");
+        Parcel data, reply;
+
+        data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+        data.writeInt32(streamId);
+
+        remote()->transact(TEAR_DOWN, data, &reply);
+
+        reply.readExceptionCode();
+        return reply.readInt32();
+    }
+
 private:
 
 
@@ -396,31 +471,15 @@
         } break;
         case CREATE_STREAM: {
             CHECK_INTERFACE(ICameraDeviceUser, data, reply);
-            int width, height, format;
 
-            width = data.readInt32();
-            ALOGV("%s: CREATE_STREAM: width = %d", __FUNCTION__, width);
-            height = data.readInt32();
-            ALOGV("%s: CREATE_STREAM: height = %d", __FUNCTION__, height);
-            format = data.readInt32();
-            ALOGV("%s: CREATE_STREAM: format = %d", __FUNCTION__, format);
-
-            sp<IGraphicBufferProducer> bp;
+            status_t ret = BAD_VALUE;
             if (data.readInt32() != 0) {
-                String16 name = readMaybeEmptyString16(data);
-                bp = interface_cast<IGraphicBufferProducer>(
-                        data.readStrongBinder());
-
-                ALOGV("%s: CREATE_STREAM: bp = %p, name = %s", __FUNCTION__,
-                      bp.get(), String8(name).string());
+                OutputConfiguration outputConfiguration(data);
+                ret = createStream(outputConfiguration);
             } else {
-                ALOGV("%s: CREATE_STREAM: bp = unset, name = unset",
-                      __FUNCTION__);
+                ALOGE("%s: cannot take an empty OutputConfiguration", __FUNCTION__);
             }
 
-            status_t ret;
-            ret = createStream(width, height, format, bp);
-
             reply->writeNoException();
             ALOGV("%s: CREATE_STREAM: write noException", __FUNCTION__);
             reply->writeInt32(ret);
@@ -428,7 +487,35 @@
 
             return NO_ERROR;
         } break;
+        case CREATE_INPUT_STREAM: {
+            CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+            int width, height, format;
 
+            width = data.readInt32();
+            height = data.readInt32();
+            format = data.readInt32();
+            status_t ret = createInputStream(width, height, format);
+
+            reply->writeNoException();
+            reply->writeInt32(ret);
+            return NO_ERROR;
+
+        } break;
+        case GET_INPUT_SURFACE: {
+            CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+
+            sp<IGraphicBufferProducer> bp;
+            status_t ret = getInputBufferProducer(&bp);
+            sp<IBinder> b(IInterface::asBinder(ret == OK ? bp : NULL));
+
+            reply->writeNoException();
+            reply->writeInt32(ret);
+            reply->writeInt32(1);
+            reply->writeString16(String16("camera input")); // name of surface
+            reply->writeStrongBinder(b);
+
+            return NO_ERROR;
+        } break;
         case CREATE_DEFAULT_REQUEST: {
             CHECK_INTERFACE(ICameraDeviceUser, data, reply);
 
@@ -486,10 +573,26 @@
         } break;
         case END_CONFIGURE: {
             CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+            bool isConstrainedHighSpeed = data.readInt32();
             reply->writeNoException();
-            reply->writeInt32(endConfigure());
+            reply->writeInt32(endConfigure(isConstrainedHighSpeed));
             return NO_ERROR;
         } break;
+        case PREPARE: {
+            CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+            int streamId = data.readInt32();
+            reply->writeNoException();
+            reply->writeInt32(prepare(streamId));
+            return NO_ERROR;
+        } break;
+        case TEAR_DOWN: {
+            CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+            int streamId = data.readInt32();
+            reply->writeNoException();
+            reply->writeInt32(tearDown(streamId));
+            return NO_ERROR;
+        } break;
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
new file mode 100644
index 0000000..20a23e0
--- /dev/null
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -0,0 +1,84 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "OutputConfiguration"
+#include <utils/Log.h>
+
+#include <camera/camera2/OutputConfiguration.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+
+const int OutputConfiguration::INVALID_ROTATION = -1;
+
+// Read empty strings without printing a false error message.
+String16 OutputConfiguration::readMaybeEmptyString16(const Parcel& parcel) {
+    size_t len;
+    const char16_t* str = parcel.readString16Inplace(&len);
+    if (str != NULL) {
+        return String16(str, len);
+    } else {
+        return String16();
+    }
+}
+
+sp<IGraphicBufferProducer> OutputConfiguration::getGraphicBufferProducer() const {
+    return mGbp;
+}
+
+int OutputConfiguration::getRotation() const {
+    return mRotation;
+}
+
+OutputConfiguration::OutputConfiguration(const Parcel& parcel) {
+    status_t err;
+    int rotation = 0;
+    if ((err = parcel.readInt32(&rotation)) != OK) {
+        ALOGE("%s: Failed to read rotation from parcel", __FUNCTION__);
+        mGbp = NULL;
+        mRotation = INVALID_ROTATION;
+        return;
+    }
+
+    String16 name = readMaybeEmptyString16(parcel);
+    const sp<IGraphicBufferProducer>& gbp =
+            interface_cast<IGraphicBufferProducer>(parcel.readStrongBinder());
+    mGbp = gbp;
+    mRotation = rotation;
+
+    ALOGV("%s: OutputConfiguration: bp = %p, name = %s", __FUNCTION__,
+          gbp.get(), String8(name).string());
+}
+
+OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation) {
+    mGbp = gbp;
+    mRotation = rotation;
+}
+
+status_t OutputConfiguration::writeToParcel(Parcel& parcel) const {
+
+    parcel.writeInt32(mRotation);
+    parcel.writeString16(String16("unknown_name")); // name of surface
+    sp<IBinder> b(IInterface::asBinder(mGbp));
+    parcel.writeStrongBinder(b);
+
+    return OK;
+}
+
+}; // namespace android
+
diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk
index 2db4c14..3777d94 100644
--- a/camera/tests/Android.mk
+++ b/camera/tests/Android.mk
@@ -17,8 +17,8 @@
 LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
 
 LOCAL_SRC_FILES:= \
-	ProCameraTests.cpp \
-	VendorTagDescriptorTests.cpp
+	VendorTagDescriptorTests.cpp \
+	CameraBinderTests.cpp
 
 LOCAL_SHARED_LIBRARIES := \
 	libutils \
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
new file mode 100644
index 0000000..572fb72
--- /dev/null
+++ b/camera/tests/CameraBinderTests.cpp
@@ -0,0 +1,484 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CameraBinderTests"
+
+#include <binder/IInterface.h>
+#include <binder/IServiceManager.h>
+#include <binder/Parcel.h>
+#include <binder/ProcessState.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <utils/List.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+#include <system/graphics.h>
+#include <hardware/gralloc.h>
+
+#include <camera/CameraMetadata.h>
+#include <camera/ICameraService.h>
+#include <camera/ICameraServiceListener.h>
+#include <camera/camera2/CaptureRequest.h>
+#include <camera/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
+#include <camera/camera2/OutputConfiguration.h>
+
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+
+#include <gtest/gtest.h>
+#include <unistd.h>
+#include <stdint.h>
+#include <utility>
+#include <vector>
+#include <map>
+#include <algorithm>
+
+using namespace android;
+
+#define ASSERT_NOT_NULL(x) \
+    ASSERT_TRUE((x) != nullptr)
+
+#define SETUP_TIMEOUT 2000000000 // ns
+#define IDLE_TIMEOUT 2000000000 // ns
+
+// Stub listener implementation
+class TestCameraServiceListener : public BnCameraServiceListener {
+    std::map<String16, TorchStatus> mCameraTorchStatuses;
+    std::map<int32_t, Status> mCameraStatuses;
+    mutable Mutex mLock;
+    mutable Condition mCondition;
+    mutable Condition mTorchCondition;
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual void onStatusChanged(Status status, int32_t cameraId) {
+        Mutex::Autolock l(mLock);
+        mCameraStatuses[cameraId] = status;
+        mCondition.broadcast();
+    };
+
+    virtual void onTorchStatusChanged(TorchStatus status, const String16& cameraId) {
+        Mutex::Autolock l(mLock);
+        mCameraTorchStatuses[cameraId] = status;
+        mTorchCondition.broadcast();
+    };
+
+    bool waitForNumCameras(size_t num) const {
+        Mutex::Autolock l(mLock);
+
+        if (mCameraStatuses.size() == num) {
+            return true;
+        }
+
+        while (mCameraStatuses.size() < num) {
+            if (mCondition.waitRelative(mLock, SETUP_TIMEOUT) != OK) {
+                return false;
+            }
+        }
+        return true;
+    };
+
+    bool waitForTorchState(TorchStatus status, int32_t cameraId) const {
+        Mutex::Autolock l(mLock);
+
+        const auto& iter = mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+        if (iter != mCameraTorchStatuses.end() && iter->second == status) {
+            return true;
+        }
+
+        bool foundStatus = false;
+        while (!foundStatus) {
+            if (mTorchCondition.waitRelative(mLock, SETUP_TIMEOUT) != OK) {
+                return false;
+            }
+            const auto& iter =
+                    mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+            foundStatus = (iter != mCameraTorchStatuses.end() && iter->second == status);
+        }
+        return true;
+    };
+
+    TorchStatus getTorchStatus(int32_t cameraId) const {
+        Mutex::Autolock l(mLock);
+        const auto& iter = mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+        if (iter == mCameraTorchStatuses.end()) {
+            return ICameraServiceListener::TORCH_STATUS_UNKNOWN;
+        }
+        return iter->second;
+    };
+
+    Status getStatus(int32_t cameraId) const {
+        Mutex::Autolock l(mLock);
+        const auto& iter = mCameraStatuses.find(cameraId);
+        if (iter == mCameraStatuses.end()) {
+            return ICameraServiceListener::STATUS_UNKNOWN;
+        }
+        return iter->second;
+    };
+};
+
+// Callback implementation
+class TestCameraDeviceCallbacks : public BnCameraDeviceCallbacks {
+public:
+    enum Status {
+        IDLE,
+        ERROR,
+        PREPARED,
+        RUNNING,
+        SENT_RESULT,
+        UNINITIALIZED
+    };
+
+protected:
+    bool mError;
+    Status mLastStatus;
+    mutable std::vector<Status> mStatusesHit;
+    mutable Mutex mLock;
+    mutable Condition mStatusCondition;
+public:
+    TestCameraDeviceCallbacks() : mError(false), mLastStatus(UNINITIALIZED) {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual void onDeviceError(CameraErrorCode errorCode,
+            const CaptureResultExtras& resultExtras) {
+        ALOGE("%s: onDeviceError occurred with: %d", __FUNCTION__, static_cast<int>(errorCode));
+        Mutex::Autolock l(mLock);
+        mError = true;
+        mLastStatus = ERROR;
+        mStatusesHit.push_back(mLastStatus);
+        mStatusCondition.broadcast();
+    }
+
+    virtual void onDeviceIdle() {
+        Mutex::Autolock l(mLock);
+        mLastStatus = IDLE;
+        mStatusesHit.push_back(mLastStatus);
+        mStatusCondition.broadcast();
+    }
+
+    virtual void onCaptureStarted(const CaptureResultExtras& resultExtras,
+            int64_t timestamp) {
+        Mutex::Autolock l(mLock);
+        mLastStatus = RUNNING;
+        mStatusesHit.push_back(mLastStatus);
+        mStatusCondition.broadcast();
+    }
+
+
+    virtual void onResultReceived(const CameraMetadata& metadata,
+            const CaptureResultExtras& resultExtras) {
+        Mutex::Autolock l(mLock);
+        mLastStatus = SENT_RESULT;
+        mStatusesHit.push_back(mLastStatus);
+        mStatusCondition.broadcast();
+    }
+
+    virtual void onPrepared(int streamId) {
+        Mutex::Autolock l(mLock);
+        mLastStatus = PREPARED;
+        mStatusesHit.push_back(mLastStatus);
+        mStatusCondition.broadcast();
+    }
+
+    // Test helper functions:
+
+    bool hadError() const {
+        Mutex::Autolock l(mLock);
+        return mError;
+    }
+
+    bool waitForStatus(Status status) const {
+        Mutex::Autolock l(mLock);
+        if (mLastStatus == status) {
+            return true;
+        }
+
+        while (std::find(mStatusesHit.begin(), mStatusesHit.end(), status)
+                == mStatusesHit.end()) {
+
+            if (mStatusCondition.waitRelative(mLock, IDLE_TIMEOUT) != OK) {
+                mStatusesHit.clear();
+                return false;
+            }
+        }
+        mStatusesHit.clear();
+
+        return true;
+
+    }
+
+    void clearStatus() const {
+        Mutex::Autolock l(mLock);
+        mStatusesHit.clear();
+    }
+
+    bool waitForIdle() const {
+        return waitForStatus(IDLE);
+    }
+
+};
+
+// Exercise basic binder calls for the camera service
+TEST(CameraServiceBinderTest, CheckBinderCameraService) {
+    ProcessState::self()->startThreadPool();
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.camera"));
+    ASSERT_NOT_NULL(binder);
+    sp<ICameraService> service = interface_cast<ICameraService>(binder);
+
+
+    int32_t numCameras = service->getNumberOfCameras();
+    EXPECT_LE(0, numCameras);
+
+    // Check listener binder calls
+    sp<TestCameraServiceListener> listener(new TestCameraServiceListener());
+    EXPECT_EQ(OK, service->addListener(listener));
+
+    EXPECT_TRUE(listener->waitForNumCameras(numCameras));
+
+    for (int32_t i = 0; i < numCameras; i++) {
+        // We only care about binder calls for the Camera2 API.  Camera1 is deprecated.
+        status_t camera2Support = service->supportsCameraApi(i, ICameraService::API_VERSION_2);
+        if (camera2Support != OK) {
+            EXPECT_EQ(-EOPNOTSUPP, camera2Support);
+            continue;
+        }
+
+        // Check metadata binder call
+        CameraMetadata metadata;
+        EXPECT_EQ(OK, service->getCameraCharacteristics(i, &metadata));
+        EXPECT_FALSE(metadata.isEmpty());
+
+        // Make sure we're available, or skip device tests otherwise
+        ICameraServiceListener::Status s = listener->getStatus(i);
+        EXPECT_EQ(ICameraServiceListener::STATUS_AVAILABLE, s);
+        if (s != ICameraServiceListener::STATUS_AVAILABLE) {
+            continue;
+        }
+
+        // Check connect binder calls
+        sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
+        sp<ICameraDeviceUser> device;
+        EXPECT_EQ(OK, service->connectDevice(callbacks, i, String16("meeeeeeeee!"),
+                ICameraService::USE_CALLING_UID, /*out*/device));
+        ASSERT_NE(nullptr, device.get());
+        device->disconnect();
+        EXPECT_FALSE(callbacks->hadError());
+
+        ICameraServiceListener::TorchStatus torchStatus = listener->getTorchStatus(i);
+        if (torchStatus == ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
+            // Check torch calls
+            EXPECT_EQ(OK, service->setTorchMode(String16(String8::format("%d", i)),
+                    /*enabled*/true, callbacks));
+            EXPECT_TRUE(listener->waitForTorchState(
+                    ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
+            EXPECT_EQ(OK, service->setTorchMode(String16(String8::format("%d", i)),
+                    /*enabled*/false, callbacks));
+            EXPECT_TRUE(listener->waitForTorchState(
+                    ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
+        }
+    }
+
+    EXPECT_EQ(OK, service->removeListener(listener));
+}
+
+// Test fixture for client focused binder tests
+class CameraClientBinderTest : public testing::Test {
+protected:
+    sp<ICameraService> service;
+    int32_t numCameras;
+    std::vector<std::pair<sp<TestCameraDeviceCallbacks>, sp<ICameraDeviceUser>>> openDeviceList;
+    sp<TestCameraServiceListener> serviceListener;
+
+    std::pair<sp<TestCameraDeviceCallbacks>, sp<ICameraDeviceUser>> openNewDevice(int deviceId) {
+
+        sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
+        sp<ICameraDeviceUser> device;
+        {
+            SCOPED_TRACE("openNewDevice");
+            EXPECT_EQ(OK, service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
+                    ICameraService::USE_CALLING_UID, /*out*/device));
+        }
+        auto p = std::make_pair(callbacks, device);
+        openDeviceList.push_back(p);
+        return p;
+    }
+
+    void closeDevice(std::pair<sp<TestCameraDeviceCallbacks>, sp<ICameraDeviceUser>>& p) {
+        if (p.second.get() != nullptr) {
+            p.second->disconnect();
+            {
+                SCOPED_TRACE("closeDevice");
+                EXPECT_FALSE(p.first->hadError());
+            }
+        }
+        auto iter = std::find(openDeviceList.begin(), openDeviceList.end(), p);
+        if (iter != openDeviceList.end()) {
+            openDeviceList.erase(iter);
+        }
+    }
+
+    virtual void SetUp() {
+        ProcessState::self()->startThreadPool();
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->getService(String16("media.camera"));
+        service = interface_cast<ICameraService>(binder);
+        serviceListener = new TestCameraServiceListener();
+        service->addListener(serviceListener);
+        numCameras = service->getNumberOfCameras();
+    }
+
+    virtual void TearDown() {
+        service = nullptr;
+        numCameras = 0;
+        for (auto& p : openDeviceList) {
+            closeDevice(p);
+        }
+    }
+
+};
+
+TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
+    ASSERT_NOT_NULL(service);
+
+    EXPECT_TRUE(serviceListener->waitForNumCameras(numCameras));
+    for (int32_t i = 0; i < numCameras; i++) {
+        // Make sure we're available, or skip device tests otherwise
+        ICameraServiceListener::Status s = serviceListener->getStatus(i);
+        EXPECT_EQ(ICameraServiceListener::STATUS_AVAILABLE, s);
+        if (s != ICameraServiceListener::STATUS_AVAILABLE) {
+            continue;
+        }
+
+        auto p = openNewDevice(i);
+        sp<TestCameraDeviceCallbacks> callbacks = p.first;
+        sp<ICameraDeviceUser> device = p.second;
+
+        // Setup a buffer queue; I'm just using the vendor opaque format here as that is
+        // guaranteed to be present
+        sp<IGraphicBufferProducer> gbProducer;
+        sp<IGraphicBufferConsumer> gbConsumer;
+        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/2, /*controlledByApp*/true);
+        EXPECT_TRUE(opaqueConsumer.get() != nullptr);
+        opaqueConsumer->setName(String8("nom nom nom"));
+
+        // Set to VGA dimens for default, as that is guaranteed to be present
+        EXPECT_EQ(OK, gbConsumer->setDefaultBufferSize(640, 480));
+        EXPECT_EQ(OK, gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
+
+        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
+
+        OutputConfiguration output(gbProducer, /*rotation*/0);
+
+        // Can we configure?
+        EXPECT_EQ(OK, device->beginConfigure());
+        status_t streamId = device->createStream(output);
+        EXPECT_LE(0, streamId);
+        EXPECT_EQ(OK, device->endConfigure());
+        EXPECT_FALSE(callbacks->hadError());
+
+        // Can we make requests?
+        CameraMetadata requestTemplate;
+        EXPECT_EQ(OK, device->createDefaultRequest(/*preview template*/1,
+                /*out*/&requestTemplate));
+        sp<CaptureRequest> request(new CaptureRequest());
+        request->mMetadata = requestTemplate;
+        request->mSurfaceList.add(surface);
+        request->mIsReprocess = false;
+        int64_t lastFrameNumber = 0;
+        int64_t lastFrameNumberPrev = 0;
+        callbacks->clearStatus();
+        int requestId = device->submitRequest(request, /*streaming*/true, /*out*/&lastFrameNumber);
+        EXPECT_TRUE(callbacks->waitForStatus(TestCameraDeviceCallbacks::SENT_RESULT));
+        EXPECT_LE(0, requestId);
+
+        // Can we stop requests?
+        EXPECT_EQ(OK, device->cancelRequest(requestId, /*out*/&lastFrameNumber));
+        EXPECT_TRUE(callbacks->waitForIdle());
+        EXPECT_FALSE(callbacks->hadError());
+
+        // Can we do it again?
+        lastFrameNumberPrev = lastFrameNumber;
+        lastFrameNumber = 0;
+        requestTemplate.clear();
+        EXPECT_EQ(OK, device->createDefaultRequest(/*preview template*/1,
+                /*out*/&requestTemplate));
+        sp<CaptureRequest> request2(new CaptureRequest());
+        request2->mMetadata = requestTemplate;
+        request2->mSurfaceList.add(surface);
+        request2->mIsReprocess = false;
+        callbacks->clearStatus();
+        int requestId2 = device->submitRequest(request2, /*streaming*/true,
+                /*out*/&lastFrameNumber);
+        EXPECT_EQ(-1, lastFrameNumber);
+        lastFrameNumber = 0;
+        EXPECT_TRUE(callbacks->waitForStatus(TestCameraDeviceCallbacks::SENT_RESULT));
+        EXPECT_LE(0, requestId2);
+        EXPECT_EQ(OK, device->cancelRequest(requestId2, /*out*/&lastFrameNumber));
+        EXPECT_TRUE(callbacks->waitForIdle());
+        EXPECT_LE(lastFrameNumberPrev, lastFrameNumber);
+        sleep(/*second*/1); // allow some time for errors to show up, if any
+        EXPECT_FALSE(callbacks->hadError());
+
+        // Can we do it with a request list?
+        lastFrameNumberPrev = lastFrameNumber;
+        lastFrameNumber = 0;
+        requestTemplate.clear();
+        CameraMetadata requestTemplate2;
+        EXPECT_EQ(OK, device->createDefaultRequest(/*preview template*/1,
+                /*out*/&requestTemplate));
+        EXPECT_EQ(OK, device->createDefaultRequest(/*preview template*/1,
+                /*out*/&requestTemplate2));
+        sp<CaptureRequest> request3(new CaptureRequest());
+        sp<CaptureRequest> request4(new CaptureRequest());
+        request3->mMetadata = requestTemplate;
+        request3->mSurfaceList.add(surface);
+        request3->mIsReprocess = false;
+        request4->mMetadata = requestTemplate2;
+        request4->mSurfaceList.add(surface);
+        request4->mIsReprocess = false;
+        List<sp<CaptureRequest>> requestList;
+        requestList.push_back(request3);
+        requestList.push_back(request4);
+
+        callbacks->clearStatus();
+        int requestId3 = device->submitRequestList(requestList, /*streaming*/false,
+                /*out*/&lastFrameNumber);
+        EXPECT_TRUE(callbacks->waitForStatus(TestCameraDeviceCallbacks::SENT_RESULT));
+        EXPECT_TRUE(callbacks->waitForIdle());
+        EXPECT_LE(lastFrameNumberPrev, lastFrameNumber);
+        sleep(/*second*/1); // allow some time for errors to show up, if any
+        EXPECT_FALSE(callbacks->hadError());
+
+        // Can we unconfigure?
+        EXPECT_EQ(OK, device->beginConfigure());
+        EXPECT_EQ(OK, device->deleteStream(streamId));
+        EXPECT_EQ(OK, device->endConfigure());
+        sleep(/*second*/1); // allow some time for errors to show up, if any
+        EXPECT_FALSE(callbacks->hadError());
+
+        closeDevice(p);
+    }
+
+};
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
deleted file mode 100644
index 1f5867a..0000000
--- a/camera/tests/ProCameraTests.cpp
+++ /dev/null
@@ -1,1278 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-#include <iostream>
-
-#include <binder/IPCThreadState.h>
-#include <utils/Thread.h>
-
-#include "Camera.h"
-#include "ProCamera.h"
-#include <utils/Vector.h>
-#include <utils/Mutex.h>
-#include <utils/Condition.h>
-
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-
-#include <system/camera_metadata.h>
-#include <hardware/camera2.h> // for CAMERA2_TEMPLATE_PREVIEW only
-#include <camera/CameraMetadata.h>
-
-#include <camera/ICameraServiceListener.h>
-
-namespace android {
-namespace camera2 {
-namespace tests {
-namespace client {
-
-#define CAMERA_ID 0
-#define TEST_DEBUGGING 0
-
-#define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout
-#define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead
-
-#define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8
-#define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16
-
-// defaults for display "test"
-#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y8
-#define TEST_DISPLAY_WIDTH 320
-#define TEST_DISPLAY_HEIGHT 240
-
-#define TEST_CPU_FRAME_COUNT 2
-#define TEST_CPU_HEAP_COUNT 5
-
-#define TEST_FRAME_PROCESSING_DELAY_US 200000 // 200 ms
-
-#if TEST_DEBUGGING
-#define dout std::cerr
-#else
-#define dout if (0) std::cerr
-#endif
-
-#define EXPECT_OK(x) EXPECT_EQ(OK, (x))
-#define ASSERT_OK(x) ASSERT_EQ(OK, (x))
-
-class ProCameraTest;
-
-struct ServiceListener : public BnCameraServiceListener {
-
-    ServiceListener() :
-        mLatestStatus(STATUS_UNKNOWN),
-        mPrevStatus(STATUS_UNKNOWN)
-    {
-    }
-
-    void onStatusChanged(Status status, int32_t cameraId) {
-        dout << "On status changed: 0x" << std::hex
-             << (unsigned int) status << " cameraId " << cameraId
-             << std::endl;
-
-        Mutex::Autolock al(mMutex);
-
-        mLatestStatus = status;
-        mCondition.broadcast();
-    }
-
-    status_t waitForStatusChange(Status& newStatus) {
-        Mutex::Autolock al(mMutex);
-
-        if (mLatestStatus != mPrevStatus) {
-            newStatus = mLatestStatus;
-            mPrevStatus = mLatestStatus;
-            return OK;
-        }
-
-        status_t stat = mCondition.waitRelative(mMutex,
-                                               TEST_LISTENER_TIMEOUT);
-
-        if (stat == OK) {
-            newStatus = mLatestStatus;
-            mPrevStatus = mLatestStatus;
-        }
-
-        return stat;
-    }
-
-    Condition mCondition;
-    Mutex mMutex;
-
-    Status mLatestStatus;
-    Status mPrevStatus;
-};
-
-enum ProEvent {
-    UNKNOWN,
-    ACQUIRED,
-    RELEASED,
-    STOLEN,
-    FRAME_RECEIVED,
-    RESULT_RECEIVED,
-};
-
-inline int ProEvent_Mask(ProEvent e) {
-    return (1 << static_cast<int>(e));
-}
-
-typedef Vector<ProEvent> EventList;
-
-class ProCameraTestThread : public Thread
-{
-public:
-    ProCameraTestThread() {
-    }
-
-    virtual bool threadLoop() {
-        mProc = ProcessState::self();
-        mProc->startThreadPool();
-
-        IPCThreadState *ptr = IPCThreadState::self();
-
-        ptr->joinThreadPool();
-
-        return false;
-    }
-
-    sp<ProcessState> mProc;
-};
-
-class ProCameraTestListener : public ProCameraListener {
-
-public:
-    static const int EVENT_MASK_ALL = 0xFFFFFFFF;
-
-    ProCameraTestListener() {
-        mEventMask = EVENT_MASK_ALL;
-        mDropFrames = false;
-    }
-
-    status_t WaitForEvent() {
-        Mutex::Autolock cal(mConditionMutex);
-
-        {
-            Mutex::Autolock al(mListenerMutex);
-
-            if (mProEventList.size() > 0) {
-                return OK;
-            }
-        }
-
-        return mListenerCondition.waitRelative(mConditionMutex,
-                                               TEST_LISTENER_TIMEOUT);
-    }
-
-    /* Read events into out. Existing queue is flushed */
-    void ReadEvents(EventList& out) {
-        Mutex::Autolock al(mListenerMutex);
-
-        for (size_t i = 0; i < mProEventList.size(); ++i) {
-            out.push(mProEventList[i]);
-        }
-
-        mProEventList.clear();
-    }
-
-    /**
-      * Dequeue 1 event from the event queue.
-      * Returns UNKNOWN if queue is empty
-      */
-    ProEvent ReadEvent() {
-        Mutex::Autolock al(mListenerMutex);
-
-        if (mProEventList.size() == 0) {
-            return UNKNOWN;
-        }
-
-        ProEvent ev = mProEventList[0];
-        mProEventList.removeAt(0);
-
-        return ev;
-    }
-
-    void SetEventMask(int eventMask) {
-        Mutex::Autolock al(mListenerMutex);
-        mEventMask = eventMask;
-    }
-
-    // Automatically acquire/release frames as they are available
-    void SetDropFrames(bool dropFrames) {
-        Mutex::Autolock al(mListenerMutex);
-        mDropFrames = dropFrames;
-    }
-
-private:
-    void QueueEvent(ProEvent ev) {
-        bool eventAdded = false;
-        {
-            Mutex::Autolock al(mListenerMutex);
-
-            // Drop events not part of mask
-            if (ProEvent_Mask(ev) & mEventMask) {
-                mProEventList.push(ev);
-                eventAdded = true;
-            }
-        }
-
-        if (eventAdded) {
-            mListenerCondition.broadcast();
-        }
-    }
-
-protected:
-
-    //////////////////////////////////////////////////
-    ///////// ProCameraListener //////////////////////
-    //////////////////////////////////////////////////
-
-
-    // Lock has been acquired. Write operations now available.
-    virtual void onLockAcquired() {
-        QueueEvent(ACQUIRED);
-    }
-    // Lock has been released with exclusiveUnlock
-    virtual void onLockReleased() {
-        QueueEvent(RELEASED);
-    }
-
-    // Lock has been stolen by another client.
-    virtual void onLockStolen() {
-        QueueEvent(STOLEN);
-    }
-
-    // Lock free.
-    virtual void onTriggerNotify(int32_t ext1, int32_t ext2, int32_t ext3) {
-
-        dout << "Trigger notify: " << ext1 << " " << ext2
-             << " " << ext3 << std::endl;
-    }
-
-    virtual void onFrameAvailable(int streamId,
-                                  const sp<CpuConsumer>& consumer) {
-
-        QueueEvent(FRAME_RECEIVED);
-
-        Mutex::Autolock al(mListenerMutex);
-        if (mDropFrames) {
-            CpuConsumer::LockedBuffer buf;
-            status_t ret;
-
-            if (OK == (ret = consumer->lockNextBuffer(&buf))) {
-
-                dout << "Frame received on streamId = " << streamId <<
-                        ", dataPtr = " << (void*)buf.data <<
-                        ", timestamp = " << buf.timestamp << std::endl;
-
-                EXPECT_OK(consumer->unlockBuffer(buf));
-            }
-        } else {
-            dout << "Frame received on streamId = " << streamId << std::endl;
-        }
-    }
-
-    virtual void onResultReceived(int32_t requestId,
-                                  camera_metadata* request) {
-        dout << "Result received requestId = " << requestId
-             << ", requestPtr = " << (void*)request << std::endl;
-        QueueEvent(RESULT_RECEIVED);
-        free_camera_metadata(request);
-    }
-
-    virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) {
-        dout << "Notify received: msg " << std::hex << msg
-             << ", ext1: " << std::hex << ext1 << ", ext2: " << std::hex << ext2
-             << std::endl;
-    }
-
-    Vector<ProEvent> mProEventList;
-    Mutex             mListenerMutex;
-    Mutex             mConditionMutex;
-    Condition         mListenerCondition;
-    int               mEventMask;
-    bool              mDropFrames;
-};
-
-class ProCameraTest : public ::testing::Test {
-
-public:
-    ProCameraTest() {
-        char* displaySecsEnv = getenv("TEST_DISPLAY_SECS");
-        if (displaySecsEnv != NULL) {
-            mDisplaySecs = atoi(displaySecsEnv);
-            if (mDisplaySecs < 0) {
-                mDisplaySecs = 0;
-            }
-        } else {
-            mDisplaySecs = 0;
-        }
-
-        char* displayFmtEnv = getenv("TEST_DISPLAY_FORMAT");
-        if (displayFmtEnv != NULL) {
-            mDisplayFmt = FormatFromString(displayFmtEnv);
-        } else {
-            mDisplayFmt = TEST_DISPLAY_FORMAT;
-        }
-
-        char* displayWidthEnv = getenv("TEST_DISPLAY_WIDTH");
-        if (displayWidthEnv != NULL) {
-            mDisplayW = atoi(displayWidthEnv);
-            if (mDisplayW < 0) {
-                mDisplayW = 0;
-            }
-        } else {
-            mDisplayW = TEST_DISPLAY_WIDTH;
-        }
-
-        char* displayHeightEnv = getenv("TEST_DISPLAY_HEIGHT");
-        if (displayHeightEnv != NULL) {
-            mDisplayH = atoi(displayHeightEnv);
-            if (mDisplayH < 0) {
-                mDisplayH = 0;
-            }
-        } else {
-            mDisplayH = TEST_DISPLAY_HEIGHT;
-        }
-    }
-
-    static void SetUpTestCase() {
-        // Binder Thread Pool Initialization
-        mTestThread = new ProCameraTestThread();
-        mTestThread->run("ProCameraTestThread");
-    }
-
-    virtual void SetUp() {
-        mCamera = ProCamera::connect(CAMERA_ID);
-        ASSERT_NE((void*)NULL, mCamera.get());
-
-        mListener = new ProCameraTestListener();
-        mCamera->setListener(mListener);
-    }
-
-    virtual void TearDown() {
-        ASSERT_NE((void*)NULL, mCamera.get());
-        mCamera->disconnect();
-    }
-
-protected:
-    sp<ProCamera> mCamera;
-    sp<ProCameraTestListener> mListener;
-
-    static sp<Thread> mTestThread;
-
-    int mDisplaySecs;
-    int mDisplayFmt;
-    int mDisplayW;
-    int mDisplayH;
-
-    sp<SurfaceComposerClient> mComposerClient;
-    sp<SurfaceControl> mSurfaceControl;
-
-    sp<SurfaceComposerClient> mDepthComposerClient;
-    sp<SurfaceControl> mDepthSurfaceControl;
-
-    int getSurfaceWidth() {
-        return 512;
-    }
-    int getSurfaceHeight() {
-        return 512;
-    }
-
-    void createOnScreenSurface(sp<Surface>& surface) {
-        mComposerClient = new SurfaceComposerClient;
-        ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
-
-        mSurfaceControl = mComposerClient->createSurface(
-                String8("ProCameraTest StreamingImage Surface"),
-                getSurfaceWidth(), getSurfaceHeight(),
-                PIXEL_FORMAT_RGB_888, 0);
-
-        mSurfaceControl->setPosition(0, 0);
-
-        ASSERT_TRUE(mSurfaceControl != NULL);
-        ASSERT_TRUE(mSurfaceControl->isValid());
-
-        SurfaceComposerClient::openGlobalTransaction();
-        ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
-        ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
-        SurfaceComposerClient::closeGlobalTransaction();
-
-        sp<ANativeWindow> window = mSurfaceControl->getSurface();
-        surface = mSurfaceControl->getSurface();
-
-        ASSERT_NE((void*)NULL, surface.get());
-    }
-
-    void createDepthOnScreenSurface(sp<Surface>& surface) {
-        mDepthComposerClient = new SurfaceComposerClient;
-        ASSERT_EQ(NO_ERROR, mDepthComposerClient->initCheck());
-
-        mDepthSurfaceControl = mDepthComposerClient->createSurface(
-                String8("ProCameraTest StreamingImage Surface"),
-                getSurfaceWidth(), getSurfaceHeight(),
-                PIXEL_FORMAT_RGB_888, 0);
-
-        mDepthSurfaceControl->setPosition(640, 0);
-
-        ASSERT_TRUE(mDepthSurfaceControl != NULL);
-        ASSERT_TRUE(mDepthSurfaceControl->isValid());
-
-        SurfaceComposerClient::openGlobalTransaction();
-        ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->setLayer(0x7FFFFFFF));
-        ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->show());
-        SurfaceComposerClient::closeGlobalTransaction();
-
-        sp<ANativeWindow> window = mDepthSurfaceControl->getSurface();
-        surface = mDepthSurfaceControl->getSurface();
-
-        ASSERT_NE((void*)NULL, surface.get());
-    }
-
-    template <typename T>
-    static bool ExistsItem(T needle, T* array, size_t count) {
-        if (!array) {
-            return false;
-        }
-
-        for (size_t i = 0; i < count; ++i) {
-            if (array[i] == needle) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-
-    static int FormatFromString(const char* str) {
-        std::string s(str);
-
-#define CMP_STR(x, y)                               \
-        if (s == #x) return HAL_PIXEL_FORMAT_ ## y;
-#define CMP_STR_SAME(x) CMP_STR(x, x)
-
-        CMP_STR_SAME( Y16);
-        CMP_STR_SAME( Y8);
-        CMP_STR_SAME( YV12);
-        CMP_STR(NV16, YCbCr_422_SP);
-        CMP_STR(NV21, YCrCb_420_SP);
-        CMP_STR(YUY2, YCbCr_422_I);
-        CMP_STR(RAW,  RAW_SENSOR);
-        CMP_STR(RGBA, RGBA_8888);
-
-        std::cerr << "Unknown format string " << str << std::endl;
-        return -1;
-
-    }
-
-    /**
-     * Creating a streaming request for these output streams from a template,
-     *  and submit it
-     */
-    void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) {
-
-        ASSERT_NE((void*)NULL, streamIds);
-        ASSERT_LT(0u, count);
-
-        camera_metadata_t *requestTmp = NULL;
-        EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
-                                                /*out*/&requestTmp));
-        ASSERT_NE((void*)NULL, requestTmp);
-        CameraMetadata request(requestTmp);
-
-        // set the output streams. default is empty
-
-        uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
-        request.update(tag, streamIds, count);
-
-        requestTmp = request.release();
-
-        if (requestCount < 0) {
-            EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true));
-        } else {
-            for (int i = 0; i < requestCount; ++i) {
-                EXPECT_OK(mCamera->submitRequest(requestTmp,
-                                                 /*streaming*/false));
-            }
-        }
-        request.acquire(requestTmp);
-    }
-};
-
-sp<Thread> ProCameraTest::mTestThread;
-
-TEST_F(ProCameraTest, AvailableFormats) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    CameraMetadata staticInfo = mCamera->getCameraInfo(CAMERA_ID);
-    ASSERT_FALSE(staticInfo.isEmpty());
-
-    uint32_t tag = static_cast<uint32_t>(ANDROID_SCALER_AVAILABLE_FORMATS);
-    EXPECT_TRUE(staticInfo.exists(tag));
-    camera_metadata_entry_t entry = staticInfo.find(tag);
-
-    EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YV12,
-                                                  entry.data.i32, entry.count));
-    EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YCrCb_420_SP,
-                                                  entry.data.i32, entry.count));
-}
-
-// test around exclusiveTryLock (immediate locking)
-TEST_F(ProCameraTest, LockingImmediate) {
-
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
-                            ProEvent_Mask(STOLEN)   |
-                            ProEvent_Mask(RELEASED));
-
-    EXPECT_FALSE(mCamera->hasExclusiveLock());
-    EXPECT_EQ(OK, mCamera->exclusiveTryLock());
-    // at this point we definitely have the lock
-
-    EXPECT_EQ(OK, mListener->WaitForEvent());
-    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
-
-    EXPECT_TRUE(mCamera->hasExclusiveLock());
-    EXPECT_EQ(OK, mCamera->exclusiveUnlock());
-
-    EXPECT_EQ(OK, mListener->WaitForEvent());
-    EXPECT_EQ(RELEASED, mListener->ReadEvent());
-
-    EXPECT_FALSE(mCamera->hasExclusiveLock());
-}
-
-// test around exclusiveLock (locking at some future point in time)
-TEST_F(ProCameraTest, LockingAsynchronous) {
-
-    if (HasFatalFailure()) {
-        return;
-    }
-
-
-    mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
-                            ProEvent_Mask(STOLEN)   |
-                            ProEvent_Mask(RELEASED));
-
-    // TODO: Add another procamera that has a lock here.
-    // then we can be test that the lock wont immediately be acquired
-
-    EXPECT_FALSE(mCamera->hasExclusiveLock());
-    EXPECT_EQ(OK, mCamera->exclusiveTryLock());
-    // at this point we definitely have the lock
-
-    EXPECT_EQ(OK, mListener->WaitForEvent());
-    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
-
-    EXPECT_TRUE(mCamera->hasExclusiveLock());
-    EXPECT_EQ(OK, mCamera->exclusiveUnlock());
-
-    EXPECT_EQ(OK, mListener->WaitForEvent());
-    EXPECT_EQ(RELEASED, mListener->ReadEvent());
-
-    EXPECT_FALSE(mCamera->hasExclusiveLock());
-}
-
-// Stream directly to the screen.
-TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    sp<Surface> surface;
-    if (mDisplaySecs > 0) {
-        createOnScreenSurface(/*out*/surface);
-    }
-    else {
-        dout << "Skipping, will not render to screen" << std::endl;
-        return;
-    }
-
-    int depthStreamId = -1;
-
-    sp<ServiceListener> listener = new ServiceListener();
-    EXPECT_OK(ProCamera::addServiceListener(listener));
-
-    ServiceListener::Status currentStatus;
-
-    // when subscribing a new listener,
-    // we immediately get a callback to the current status
-    while (listener->waitForStatusChange(/*out*/currentStatus) != OK);
-    EXPECT_EQ(ServiceListener::STATUS_PRESENT, currentStatus);
-
-    dout << "Will now stream and resume infinitely..." << std::endl;
-    while (true) {
-
-        if (currentStatus == ServiceListener::STATUS_PRESENT) {
-
-            ASSERT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt,
-                                            surface,
-                                            &depthStreamId));
-            EXPECT_NE(-1, depthStreamId);
-
-            EXPECT_OK(mCamera->exclusiveTryLock());
-
-            int32_t streams[] = { depthStreamId };
-            ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(
-                                                 streams,
-                                                 /*count*/1));
-        }
-
-        ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN;
-
-        // TODO: maybe check for getch every once in a while?
-        while (listener->waitForStatusChange(/*out*/stat) != OK);
-
-        if (currentStatus != stat) {
-            if (stat == ServiceListener::STATUS_PRESENT) {
-                dout << "Reconnecting to camera" << std::endl;
-                mCamera = ProCamera::connect(CAMERA_ID);
-            } else if (stat == ServiceListener::STATUS_NOT_AVAILABLE) {
-                dout << "Disconnecting from camera" << std::endl;
-                mCamera->disconnect();
-            } else if (stat == ServiceListener::STATUS_NOT_PRESENT) {
-                dout << "Camera unplugged" << std::endl;
-                mCamera = NULL;
-            } else {
-                dout << "Unknown status change "
-                     << std::hex << stat << std::endl;
-            }
-
-            currentStatus = stat;
-        }
-    }
-
-    EXPECT_OK(ProCamera::removeServiceListener(listener));
-    EXPECT_OK(mCamera->deleteStream(depthStreamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// Stream directly to the screen.
-TEST_F(ProCameraTest, DISABLED_StreamingImageDual) {
-    if (HasFatalFailure()) {
-        return;
-    }
-    sp<Surface> surface;
-    sp<Surface> depthSurface;
-    if (mDisplaySecs > 0) {
-        createOnScreenSurface(/*out*/surface);
-        createDepthOnScreenSurface(/*out*/depthSurface);
-    }
-
-    int streamId = -1;
-    EXPECT_OK(mCamera->createStream(/*width*/1280, /*height*/960,
-              TEST_FORMAT_MAIN, surface, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    int depthStreamId = -1;
-    EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240,
-              TEST_FORMAT_DEPTH, depthSurface, &depthStreamId));
-    EXPECT_NE(-1, depthStreamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-    /*
-    */
-    /* iterate in a loop submitting requests every frame.
-     *  what kind of requests doesnt really matter, just whatever.
-     */
-
-    // it would probably be better to use CameraMetadata from camera service.
-    camera_metadata_t *request = NULL;
-    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
-              /*out*/&request));
-    EXPECT_NE((void*)NULL, request);
-
-    /*FIXME: dont need this later, at which point the above should become an
-             ASSERT_NE*/
-    if(request == NULL) request = allocate_camera_metadata(10, 100);
-
-    // set the output streams to just this stream ID
-
-    // wow what a verbose API.
-    int32_t allStreams[] = { streamId, depthStreamId };
-    // IMPORTANT. bad things will happen if its not a uint8.
-    size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]);
-    camera_metadata_entry_t entry;
-    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
-    int find = find_camera_metadata_entry(request, tag, &entry);
-    if (find == -ENOENT) {
-        if (add_camera_metadata_entry(request, tag, &allStreams,
-                                      /*data_count*/streamCount) != OK) {
-            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
-            ASSERT_OK(append_camera_metadata(tmp, request));
-            free_camera_metadata(request);
-            request = tmp;
-
-            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
-                                                /*data_count*/streamCount));
-        }
-    } else {
-        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
-                  &allStreams, /*data_count*/streamCount, &entry));
-    }
-
-    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
-    dout << "will sleep now for " << mDisplaySecs << std::endl;
-    sleep(mDisplaySecs);
-
-    free_camera_metadata(request);
-
-    for (size_t i = 0; i < streamCount; ++i) {
-        EXPECT_OK(mCamera->deleteStream(allStreams[i]));
-    }
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, CpuConsumerSingle) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
-                            ProEvent_Mask(STOLEN)   |
-                            ProEvent_Mask(RELEASED) |
-                            ProEvent_Mask(FRAME_RECEIVED));
-    mListener->SetDropFrames(true);
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
-                TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-    EXPECT_EQ(OK, mListener->WaitForEvent());
-    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
-    /* iterate in a loop submitting requests every frame.
-     *  what kind of requests doesnt really matter, just whatever.
-     */
-
-    // it would probably be better to use CameraMetadata from camera service.
-    camera_metadata_t *request = NULL;
-    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
-        /*out*/&request));
-    EXPECT_NE((void*)NULL, request);
-
-    /*FIXME: dont need this later, at which point the above should become an
-      ASSERT_NE*/
-    if(request == NULL) request = allocate_camera_metadata(10, 100);
-
-    // set the output streams to just this stream ID
-
-    int32_t allStreams[] = { streamId };
-    camera_metadata_entry_t entry;
-    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
-    int find = find_camera_metadata_entry(request, tag, &entry);
-    if (find == -ENOENT) {
-        if (add_camera_metadata_entry(request, tag, &allStreams,
-                /*data_count*/1) != OK) {
-            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
-            ASSERT_OK(append_camera_metadata(tmp, request));
-            free_camera_metadata(request);
-            request = tmp;
-
-            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
-                /*data_count*/1));
-        }
-    } else {
-        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
-            &allStreams, /*data_count*/1, &entry));
-    }
-
-    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
-    // Consume a couple of frames
-    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
-        EXPECT_EQ(OK, mListener->WaitForEvent());
-        EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
-    }
-
-    // Done: clean up
-    free_camera_metadata(request);
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, CpuConsumerDual) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    mListener->SetEventMask(ProEvent_Mask(FRAME_RECEIVED));
-    mListener->SetDropFrames(true);
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    int depthStreamId = -1;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
-            TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &depthStreamId));
-    EXPECT_NE(-1, depthStreamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-    /*
-    */
-    /* iterate in a loop submitting requests every frame.
-     *  what kind of requests doesnt really matter, just whatever.
-     */
-
-    // it would probably be better to use CameraMetadata from camera service.
-    camera_metadata_t *request = NULL;
-    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
-                                            /*out*/&request));
-    EXPECT_NE((void*)NULL, request);
-
-    if(request == NULL) request = allocate_camera_metadata(10, 100);
-
-    // set the output streams to just this stream ID
-
-    // wow what a verbose API.
-    int32_t allStreams[] = { streamId, depthStreamId };
-    size_t streamCount = 2;
-    camera_metadata_entry_t entry;
-    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
-    int find = find_camera_metadata_entry(request, tag, &entry);
-    if (find == -ENOENT) {
-        if (add_camera_metadata_entry(request, tag, &allStreams,
-                                      /*data_count*/streamCount) != OK) {
-            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
-            ASSERT_OK(append_camera_metadata(tmp, request));
-            free_camera_metadata(request);
-            request = tmp;
-
-            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
-                                                   /*data_count*/streamCount));
-        }
-    } else {
-        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
-                              &allStreams, /*data_count*/streamCount, &entry));
-    }
-
-    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
-    // Consume a couple of frames
-    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
-        // stream id 1
-        EXPECT_EQ(OK, mListener->WaitForEvent());
-        EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
-
-        // stream id 2
-        EXPECT_EQ(OK, mListener->WaitForEvent());
-        EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
-
-        //TODO: events should be a struct with some data like the stream id
-    }
-
-    // Done: clean up
-    free_camera_metadata(request);
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, ResultReceiver) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED));
-    mListener->SetDropFrames(true);
-    //FIXME: if this is run right after the previous test we get FRAME_RECEIVED
-    // need to filter out events at read time
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-    /*
-    */
-    /* iterate in a loop submitting requests every frame.
-     *  what kind of requests doesnt really matter, just whatever.
-     */
-
-    camera_metadata_t *request = NULL;
-    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
-                                            /*out*/&request));
-    EXPECT_NE((void*)NULL, request);
-
-    /*FIXME*/
-    if(request == NULL) request = allocate_camera_metadata(10, 100);
-
-    // set the output streams to just this stream ID
-
-    int32_t allStreams[] = { streamId };
-    size_t streamCount = 1;
-    camera_metadata_entry_t entry;
-    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
-    int find = find_camera_metadata_entry(request, tag, &entry);
-    if (find == -ENOENT) {
-        if (add_camera_metadata_entry(request, tag, &allStreams,
-                                      /*data_count*/streamCount) != OK) {
-            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
-            ASSERT_OK(append_camera_metadata(tmp, request));
-            free_camera_metadata(request);
-            request = tmp;
-
-            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
-                                                /*data_count*/streamCount));
-        }
-    } else {
-        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
-                               &allStreams, /*data_count*/streamCount, &entry));
-    }
-
-    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
-    // Consume a couple of results
-    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
-        EXPECT_EQ(OK, mListener->WaitForEvent());
-        EXPECT_EQ(RESULT_RECEIVED, mListener->ReadEvent());
-    }
-
-    // Done: clean up
-    free_camera_metadata(request);
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// FIXME: This is racy and sometimes fails on waitForFrameMetadata
-TEST_F(ProCameraTest, DISABLED_WaitForResult) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    mListener->SetDropFrames(true);
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                 TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-
-    int32_t streams[] = { streamId };
-    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1));
-
-    // Consume a couple of results
-    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
-        EXPECT_OK(mCamera->waitForFrameMetadata());
-        CameraMetadata meta = mCamera->consumeFrameMetadata();
-        EXPECT_FALSE(meta.isEmpty());
-    }
-
-    // Done: clean up
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBuffer) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                  TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-
-    int32_t streams[] = { streamId };
-    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
-                                            /*requests*/TEST_CPU_FRAME_COUNT));
-
-    // Consume a couple of results
-    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
-        EXPECT_EQ(1, mCamera->waitForFrameBuffer(streamId));
-
-        CpuConsumer::LockedBuffer buf;
-        EXPECT_OK(consumer->lockNextBuffer(&buf));
-
-        dout << "Buffer synchronously received on streamId = " << streamId <<
-                ", dataPtr = " << (void*)buf.data <<
-                ", timestamp = " << buf.timestamp << std::endl;
-
-        EXPECT_OK(consumer->unlockBuffer(buf));
-    }
-
-    // Done: clean up
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// FIXME: This is racy and sometimes fails on waitForFrameMetadata
-TEST_F(ProCameraTest, DISABLED_WaitForDualStreamBuffer) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    const int REQUEST_COUNT = TEST_CPU_FRAME_COUNT * 10;
-
-    // 15 fps
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                 TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    // 30 fps
-    int depthStreamId = -1;
-    sp<CpuConsumer> depthConsumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
-       TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthConsumer, &depthStreamId));
-    EXPECT_NE(-1, depthStreamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-
-    int32_t streams[] = { streamId, depthStreamId };
-    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2,
-                                                    /*requests*/REQUEST_COUNT));
-
-    int depthFrames = 0;
-    int greyFrames = 0;
-
-    // Consume two frames simultaneously. Unsynchronized by timestamps.
-    for (int i = 0; i < REQUEST_COUNT; ++i) {
-
-        // Exhaust event queue so it doesn't keep growing
-        while (mListener->ReadEvent() != UNKNOWN);
-
-        // Get the metadata
-        EXPECT_OK(mCamera->waitForFrameMetadata());
-        CameraMetadata meta = mCamera->consumeFrameMetadata();
-        EXPECT_FALSE(meta.isEmpty());
-
-        // Get the buffers
-
-        EXPECT_EQ(1, mCamera->waitForFrameBuffer(depthStreamId));
-
-        /**
-          * Guaranteed to be able to consume the depth frame,
-          * since we waited on it.
-          */
-        CpuConsumer::LockedBuffer depthBuffer;
-        EXPECT_OK(depthConsumer->lockNextBuffer(&depthBuffer));
-
-        dout << "Depth Buffer synchronously received on streamId = " <<
-                streamId <<
-                ", dataPtr = " << (void*)depthBuffer.data <<
-                ", timestamp = " << depthBuffer.timestamp << std::endl;
-
-        EXPECT_OK(depthConsumer->unlockBuffer(depthBuffer));
-
-        depthFrames++;
-
-
-        /** Consume Greyscale frames if there are any.
-          * There may not be since it runs at half FPS */
-        CpuConsumer::LockedBuffer greyBuffer;
-        while (consumer->lockNextBuffer(&greyBuffer) == OK) {
-
-            dout << "GRAY Buffer synchronously received on streamId = " <<
-                streamId <<
-                ", dataPtr = " << (void*)greyBuffer.data <<
-                ", timestamp = " << greyBuffer.timestamp << std::endl;
-
-            EXPECT_OK(consumer->unlockBuffer(greyBuffer));
-
-            greyFrames++;
-        }
-    }
-
-    dout << "Done, summary: depth frames " << std::dec << depthFrames
-         << ", grey frames " << std::dec << greyFrames << std::endl;
-
-    // Done: clean up
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesSync) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                  TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT,
-                  /*synchronousMode*/true, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-
-    int32_t streams[] = { streamId };
-    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
-                                                     /*requests*/NUM_REQUESTS));
-
-    // Consume a couple of results
-    for (int i = 0; i < NUM_REQUESTS; ++i) {
-        int numFrames;
-        EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0);
-
-        // Drop all but the newest framebuffer
-        EXPECT_EQ(numFrames-1, mCamera->dropFrameBuffer(streamId, numFrames-1));
-
-        dout << "Dropped " << (numFrames - 1) << " frames" << std::endl;
-
-        // Skip the counter ahead, don't try to consume these frames again
-        i += numFrames-1;
-
-        // "Consume" the buffer
-        CpuConsumer::LockedBuffer buf;
-        EXPECT_OK(consumer->lockNextBuffer(&buf));
-
-        dout << "Buffer synchronously received on streamId = " << streamId <<
-                ", dataPtr = " << (void*)buf.data <<
-                ", timestamp = " << buf.timestamp << std::endl;
-
-        // Process at 10fps, stream is at 15fps.
-        // This means we will definitely fill up the buffer queue with
-        // extra buffers and need to drop them.
-        usleep(TEST_FRAME_PROCESSING_DELAY_US);
-
-        EXPECT_OK(consumer->unlockBuffer(buf));
-    }
-
-    // Done: clean up
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) {
-    if (HasFatalFailure()) {
-        return;
-    }
-
-    const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
-
-    int streamId = -1;
-    sp<CpuConsumer> consumer;
-    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
-                  TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT,
-                  /*synchronousMode*/false, &consumer, &streamId));
-    EXPECT_NE(-1, streamId);
-
-    EXPECT_OK(mCamera->exclusiveTryLock());
-
-    int32_t streams[] = { streamId };
-    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
-                                                     /*requests*/NUM_REQUESTS));
-
-    uint64_t lastFrameNumber = 0;
-    int numFrames;
-
-    // Consume a couple of results
-    int i;
-    for (i = 0; i < NUM_REQUESTS && lastFrameNumber < NUM_REQUESTS; ++i) {
-        EXPECT_LT(0, (numFrames = mCamera->waitForFrameBuffer(streamId)));
-
-        dout << "Dropped " << (numFrames - 1) << " frames" << std::endl;
-
-        // Skip the counter ahead, don't try to consume these frames again
-        i += numFrames-1;
-
-        // "Consume" the buffer
-        CpuConsumer::LockedBuffer buf;
-
-        EXPECT_EQ(OK, consumer->lockNextBuffer(&buf));
-
-        lastFrameNumber = buf.frameNumber;
-
-        dout << "Buffer asynchronously received on streamId = " << streamId <<
-                ", dataPtr = " << (void*)buf.data <<
-                ", timestamp = " << buf.timestamp <<
-                ", framenumber = " << buf.frameNumber << std::endl;
-
-        // Process at 10fps, stream is at 15fps.
-        // This means we will definitely fill up the buffer queue with
-        // extra buffers and need to drop them.
-        usleep(TEST_FRAME_PROCESSING_DELAY_US);
-
-        EXPECT_OK(consumer->unlockBuffer(buf));
-    }
-
-    dout << "Done after " << i << " iterations " << std::endl;
-
-    // Done: clean up
-    EXPECT_OK(mCamera->deleteStream(streamId));
-    EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-
-
-//TODO: refactor into separate file
-TEST_F(ProCameraTest, ServiceListenersSubscribe) {
-
-    ASSERT_EQ(4u, sizeof(ServiceListener::Status));
-
-    sp<ServiceListener> listener = new ServiceListener();
-
-    EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener));
-    EXPECT_OK(ProCamera::addServiceListener(listener));
-
-    EXPECT_EQ(ALREADY_EXISTS, ProCamera::addServiceListener(listener));
-    EXPECT_OK(ProCamera::removeServiceListener(listener));
-
-    EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener));
-}
-
-//TODO: refactor into separate file
-TEST_F(ProCameraTest, ServiceListenersFunctional) {
-
-    sp<ServiceListener> listener = new ServiceListener();
-
-    EXPECT_OK(ProCamera::addServiceListener(listener));
-
-    sp<Camera> cam = Camera::connect(CAMERA_ID,
-                                     /*clientPackageName*/String16(),
-                                     -1);
-    EXPECT_NE((void*)NULL, cam.get());
-
-    ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN;
-    EXPECT_OK(listener->waitForStatusChange(/*out*/stat));
-
-    EXPECT_EQ(ServiceListener::STATUS_NOT_AVAILABLE, stat);
-
-    if (cam.get()) {
-        cam->disconnect();
-    }
-
-    EXPECT_OK(listener->waitForStatusChange(/*out*/stat));
-    EXPECT_EQ(ServiceListener::STATUS_PRESENT, stat);
-
-    EXPECT_OK(ProCamera::removeServiceListener(listener));
-}
-
-
-
-}
-}
-}
-}
diff --git a/camera/tests/VendorTagDescriptorTests.cpp b/camera/tests/VendorTagDescriptorTests.cpp
index 6624e79..9082dbf 100644
--- a/camera/tests/VendorTagDescriptorTests.cpp
+++ b/camera/tests/VendorTagDescriptorTests.cpp
@@ -53,6 +53,10 @@
 
 extern "C" {
 
+static int zero_get_tag_count(const vendor_tag_ops_t* vOps) {
+    return 0;
+}
+
 static int default_get_tag_count(const vendor_tag_ops_t* vOps) {
     return VENDOR_TAG_COUNT_ERR;
 }
@@ -173,10 +177,13 @@
     vendor_tag_ops_t vOps;
     FillWithDefaults(&vOps);
 
+    // Make empty tag count
+    vOps.get_tag_count = zero_get_tag_count;
+
     // Ensure create fails when using null vOps
     EXPECT_EQ(BAD_VALUE, VendorTagDescriptor::createDescriptorFromOps(/*vOps*/NULL, vDesc));
 
-    // Ensure create works when there are no vtags defined in a well-formed vOps
+    // Ensure creat succeeds for empty vendor tag ops
     ASSERT_EQ(OK, VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc));
 
     // Ensure defaults are returned when no vtags are defined, or tag is unknown
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 02df1d2..36a7e73 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -23,7 +23,10 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
 #include <sys/wait.h>
+
 #include <termios.h>
 #include <unistd.h>
 
@@ -637,7 +640,13 @@
         case FORMAT_MP4: {
             // Configure muxer.  We have to wait for the CSD blob from the encoder
             // before we can start it.
-            muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+            int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+            if (fd < 0) {
+                fprintf(stderr, "ERROR: couldn't open file\n");
+                abort();
+            }
+            muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+            close(fd);
             if (gRotate) {
                 muxer->setOrientationHint(90);  // TODO: does this do anything?
             }
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 561ce02..20c0094 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -17,7 +17,8 @@
 	$(TOP)/frameworks/native/include/media/openmax \
 	external/jpeg \
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -40,7 +41,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -63,7 +65,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -87,7 +90,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -110,7 +114,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -133,7 +138,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -157,7 +163,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
@@ -169,6 +176,49 @@
 
 include $(CLEAR_VARS)
 
+LOCAL_SRC_FILES:= \
+	filters/argbtorgba.rs \
+	filters/nightvision.rs \
+	filters/saturation.rs \
+	mediafilter.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright \
+	liblog \
+	libutils \
+	libbinder \
+	libstagefright_foundation \
+	libmedia \
+	libgui \
+	libcutils \
+	libui \
+	libRScpp \
+
+LOCAL_C_INCLUDES:= \
+	$(TOP)/frameworks/av/media/libstagefright \
+	$(TOP)/frameworks/native/include/media/openmax \
+	$(TOP)/frameworks/rs/cpp \
+	$(TOP)/frameworks/rs \
+
+intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
+LOCAL_C_INCLUDES += $(intermediates)
+
+LOCAL_STATIC_LIBRARIES:= \
+	libstagefright_mediafilter
+
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_MODULE:= mediafilter
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
 LOCAL_SRC_FILES:=               \
         muxer.cpp            \
 
@@ -180,7 +230,8 @@
 	frameworks/av/media/libstagefright \
 	$(TOP)/frameworks/native/include/media/openmax
 
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CLANG := true
 
 LOCAL_MODULE_TAGS := optional
 
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index 4b2f980..50913cd 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -21,6 +21,7 @@
 #include "SimplePlayer.h"
 
 #include <gui/Surface.h>
+
 #include <media/AudioTrack.h>
 #include <media/ICrypto.h>
 #include <media/IMediaHTTPService.h>
@@ -29,7 +30,6 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/NativeWindowWrapper.h>
 #include <media/stagefright/NuMediaExtractor.h>
 
 namespace android {
@@ -59,47 +59,46 @@
     return err;
 }
 status_t SimplePlayer::setDataSource(const char *path) {
-    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
     msg->setString("path", path);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
 
 status_t SimplePlayer::setSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
-    sp<AMessage> msg = new AMessage(kWhatSetSurface, id());
+    sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
 
     sp<Surface> surface;
     if (bufferProducer != NULL) {
         surface = new Surface(bufferProducer);
     }
 
-    msg->setObject(
-            "native-window", new NativeWindowWrapper(surface));
+    msg->setObject("surface", surface);
 
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
 
 status_t SimplePlayer::prepare() {
-    sp<AMessage> msg = new AMessage(kWhatPrepare, id());
+    sp<AMessage> msg = new AMessage(kWhatPrepare, this);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
 
 status_t SimplePlayer::start() {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
+    sp<AMessage> msg = new AMessage(kWhatStart, this);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
 
 status_t SimplePlayer::stop() {
-    sp<AMessage> msg = new AMessage(kWhatStop, id());
+    sp<AMessage> msg = new AMessage(kWhatStop, this);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
 
 status_t SimplePlayer::reset() {
-    sp<AMessage> msg = new AMessage(kWhatReset, id());
+    sp<AMessage> msg = new AMessage(kWhatReset, this);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
@@ -116,7 +115,7 @@
                 mState = UNPREPARED;
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -132,14 +131,12 @@
                 err = INVALID_OPERATION;
             } else {
                 sp<RefBase> obj;
-                CHECK(msg->findObject("native-window", &obj));
-
-                mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
-
+                CHECK(msg->findObject("surface", &obj));
+                mSurface = static_cast<Surface *>(obj.get());
                 err = OK;
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -161,7 +158,7 @@
                 }
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -194,7 +191,7 @@
                 }
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -217,7 +214,7 @@
                 }
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -240,7 +237,7 @@
                 mState = UNINITIALIZED;
             }
 
-            uint32_t replyID;
+            sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             sp<AMessage> response = new AMessage;
@@ -324,7 +321,7 @@
 
         err = state->mCodec->configure(
                 format,
-                isVideo ? mNativeWindow->getSurfaceTextureClient() : NULL,
+                isVideo ? mSurface : NULL,
                 NULL /* crypto */,
                 0 /* flags */);
 
@@ -382,7 +379,7 @@
 
     mStartTimeRealUs = -1ll;
 
-    sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, id());
+    sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, this);
     msg->setInt32("generation", ++mDoMoreStuffGeneration);
     msg->post();
 
@@ -411,7 +408,7 @@
     mStateByTrackIndex.clear();
     mCodecLooper.clear();
     mExtractor.clear();
-    mNativeWindow.clear();
+    mSurface.clear();
     mPath.clear();
 
     return OK;
@@ -428,12 +425,12 @@
             err = state->mCodec->dequeueInputBuffer(&index);
 
             if (err == OK) {
-                ALOGV("dequeued input buffer on track %d",
+                ALOGV("dequeued input buffer on track %zu",
                       mStateByTrackIndex.keyAt(i));
 
                 state->mAvailInputBufferIndices.push_back(index);
             } else {
-                ALOGV("dequeueInputBuffer on track %d returned %d",
+                ALOGV("dequeueInputBuffer on track %zu returned %d",
                       mStateByTrackIndex.keyAt(i), err);
             }
         } while (err == OK);
@@ -448,7 +445,7 @@
                     &info.mFlags);
 
             if (err == OK) {
-                ALOGV("dequeued output buffer on track %d",
+                ALOGV("dequeued output buffer on track %zu",
                       mStateByTrackIndex.keyAt(i));
 
                 state->mAvailOutputBufferInfos.push_back(info);
@@ -459,7 +456,7 @@
                 err = state->mCodec->getOutputBuffers(&state->mBuffers[1]);
                 CHECK_EQ(err, (status_t)OK);
             } else {
-                ALOGV("dequeueOutputBuffer on track %d returned %d",
+                ALOGV("dequeueOutputBuffer on track %zu returned %d",
                       mStateByTrackIndex.keyAt(i), err);
             }
         } while (err == OK
@@ -502,7 +499,7 @@
                     0);
             CHECK_EQ(err, (status_t)OK);
 
-            ALOGV("enqueued input data on track %d", trackIndex);
+            ALOGV("enqueued input data on track %zu", trackIndex);
 
             err = mExtractor->advance();
             CHECK_EQ(err, (status_t)OK);
@@ -528,8 +525,8 @@
                 bool release = true;
 
                 if (lateByUs > 30000ll) {
-                    ALOGI("track %d buffer late by %lld us, dropping.",
-                          mStateByTrackIndex.keyAt(i), lateByUs);
+                    ALOGI("track %zu buffer late by %lld us, dropping.",
+                          mStateByTrackIndex.keyAt(i), (long long)lateByUs);
                     state->mCodec->releaseOutputBuffer(info->mIndex);
                 } else {
                     if (state->mAudioTrack != NULL) {
@@ -558,8 +555,8 @@
                     break;
                 }
             } else {
-                ALOGV("track %d buffer early by %lld us.",
-                      mStateByTrackIndex.keyAt(i), -lateByUs);
+                ALOGV("track %zu buffer early by %lld us.",
+                      mStateByTrackIndex.keyAt(i), (long long)-lateByUs);
                 break;
             }
         }
@@ -569,7 +566,7 @@
 }
 
 status_t SimplePlayer::onOutputFormatChanged(
-        size_t trackIndex, CodecState *state) {
+        size_t trackIndex __unused, CodecState *state) {
     sp<AMessage> format;
     status_t err = state->mCodec->getOutputFormat(&format);
 
@@ -640,7 +637,7 @@
     if (delayUs > 2000ll) {
         ALOGW("AudioTrack::write took %lld us, numFramesAvailableToWrite=%u, "
               "numFramesWritten=%u",
-              delayUs, numFramesAvailableToWrite, numFramesWritten);
+              (long long)delayUs, numFramesAvailableToWrite, numFramesWritten);
     }
 
     info->mOffset += nbytes;
diff --git a/cmds/stagefright/SimplePlayer.h b/cmds/stagefright/SimplePlayer.h
index 0a06059..ae9dfd2 100644
--- a/cmds/stagefright/SimplePlayer.h
+++ b/cmds/stagefright/SimplePlayer.h
@@ -23,10 +23,10 @@
 struct ABuffer;
 struct ALooper;
 struct AudioTrack;
-struct IGraphicBufferProducer;
+class IGraphicBufferProducer;
 struct MediaCodec;
-struct NativeWindowWrapper;
 struct NuMediaExtractor;
+class Surface;
 
 struct SimplePlayer : public AHandler {
     SimplePlayer();
@@ -84,7 +84,7 @@
 
     State mState;
     AString mPath;
-    sp<NativeWindowWrapper> mNativeWindow;
+    sp<Surface> mSurface;
 
     sp<NuMediaExtractor> mExtractor;
     sp<ALooper> mCodecLooper;
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 96073f1..6e9e6ec 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -14,6 +14,12 @@
  * limitations under the License.
  */
 
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include <utils/String16.h>
+
 #include <binder/ProcessState.h>
 #include <media/mediarecorder.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -30,7 +36,7 @@
 
 static void usage(const char* name)
 {
-    fprintf(stderr, "Usage: %s [-d duration] [-m] [-w] [<output-file>]\n", name);
+    fprintf(stderr, "Usage: %s [-d du.ration] [-m] [-w] [<output-file>]\n", name);
     fprintf(stderr, "Encodes either a sine wave or microphone input to AMR format\n");
     fprintf(stderr, "    -d    duration in seconds, default 5 seconds\n");
     fprintf(stderr, "    -m    use microphone for input, default sine source\n");
@@ -81,6 +87,7 @@
         // talk into the appropriate microphone for the duration
         source = new AudioSource(
                 AUDIO_SOURCE_MIC,
+                String16(),
                 kSampleRate,
                 channels);
     } else {
@@ -109,7 +116,12 @@
 
     if (fileOut != NULL) {
         // target file specified, write encoded AMR output
-        sp<AMRWriter> writer = new AMRWriter(fileOut);
+        int fd = open(fileOut, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+        if (fd < 0) {
+            return 1;
+        }
+        sp<AMRWriter> writer = new AMRWriter(fd);
+        close(fd);
         writer->addSource(encoder);
         writer->start();
         sleep(duration);
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index fd02bcc..dae9bbe 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -45,9 +45,10 @@
     fprintf(stderr, "usage: %s [-a] use audio\n"
                     "\t\t[-v] use video\n"
                     "\t\t[-p] playback\n"
-                    "\t\t[-S] allocate buffers from a surface\n",
+                    "\t\t[-S] allocate buffers from a surface\n"
+                    "\t\t[-R] render output to surface (enables -S)\n"
+                    "\t\t[-T] use render timestamps (enables -R)\n",
                     me);
-
     exit(1);
 }
 
@@ -71,7 +72,9 @@
         const char *path,
         bool useAudio,
         bool useVideo,
-        const android::sp<android::Surface> &surface) {
+        const android::sp<android::Surface> &surface,
+        bool renderSurface,
+        bool useTimestamp) {
     using namespace android;
 
     static int64_t kTimeout = 500ll;
@@ -105,7 +108,7 @@
             continue;
         }
 
-        ALOGV("selecting track %d", i);
+        ALOGV("selecting track %zu", i);
 
         err = extractor->selectTrack(i);
         CHECK_EQ(err, (status_t)OK);
@@ -136,6 +139,7 @@
     CHECK(!stateByTrack.isEmpty());
 
     int64_t startTimeUs = ALooper::GetNowUs();
+    int64_t startTimeRender = -1;
 
     for (size_t i = 0; i < stateByTrack.size(); ++i) {
         CodecState *state = &stateByTrack.editValueAt(i);
@@ -147,7 +151,7 @@
         CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
         CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
 
-        ALOGV("got %d input and %d output buffers",
+        ALOGV("got %zu input and %zu output buffers",
               state->mInBuffers.size(), state->mOutBuffers.size());
     }
 
@@ -168,7 +172,7 @@
                 err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
 
                 if (err == OK) {
-                    ALOGV("filling input buffer %d", index);
+                    ALOGV("filling input buffer %zu", index);
 
                     const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
 
@@ -205,7 +209,7 @@
                         state->mCodec->dequeueInputBuffer(&index, kTimeout);
 
                     if (err == OK) {
-                        ALOGV("signalling input EOS on track %d", i);
+                        ALOGV("signalling input EOS on track %zu", i);
 
                         err = state->mCodec->queueInputBuffer(
                                 index,
@@ -254,13 +258,29 @@
                     kTimeout);
 
             if (err == OK) {
-                ALOGV("draining output buffer %d, time = %lld us",
-                      index, presentationTimeUs);
+                ALOGV("draining output buffer %zu, time = %lld us",
+                      index, (long long)presentationTimeUs);
 
                 ++state->mNumBuffersDecoded;
                 state->mNumBytesDecoded += size;
 
-                err = state->mCodec->releaseOutputBuffer(index);
+                if (surface == NULL || !renderSurface) {
+                    err = state->mCodec->releaseOutputBuffer(index);
+                } else if (useTimestamp) {
+                    if (startTimeRender == -1) {
+                        // begin rendering 2 vsyncs (~33ms) after first decode
+                        startTimeRender =
+                                systemTime(SYSTEM_TIME_MONOTONIC) + 33000000
+                                - (presentationTimeUs * 1000);
+                    }
+                    presentationTimeUs =
+                            (presentationTimeUs * 1000) + startTimeRender;
+                    err = state->mCodec->renderOutputBufferAndRelease(
+                            index, presentationTimeUs);
+                } else {
+                    err = state->mCodec->renderOutputBufferAndRelease(index);
+                }
+
                 CHECK_EQ(err, (status_t)OK);
 
                 if (flags & MediaCodec::BUFFER_FLAG_EOS) {
@@ -273,7 +293,7 @@
                 CHECK_EQ((status_t)OK,
                          state->mCodec->getOutputBuffers(&state->mOutBuffers));
 
-                ALOGV("got %d output buffers", state->mOutBuffers.size());
+                ALOGV("got %zu output buffers", state->mOutBuffers.size());
             } else if (err == INFO_FORMAT_CHANGED) {
                 sp<AMessage> format;
                 CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
@@ -293,17 +313,17 @@
         CHECK_EQ((status_t)OK, state->mCodec->release());
 
         if (state->mIsAudio) {
-            printf("track %zu: %" PRId64 " bytes received. %.2f KB/sec\n",
+            printf("track %zu: %lld bytes received. %.2f KB/sec\n",
                    i,
-                   state->mNumBytesDecoded,
+                   (long long)state->mNumBytesDecoded,
                    state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
         } else {
-            printf("track %zu: %" PRId64 " frames decoded, %.2f fps. %" PRId64
+            printf("track %zu: %lld frames decoded, %.2f fps. %lld"
                     " bytes received. %.2f KB/sec\n",
                    i,
-                   state->mNumBuffersDecoded,
+                   (long long)state->mNumBuffersDecoded,
                    state->mNumBuffersDecoded * 1E6 / elapsedTimeUs,
-                   state->mNumBytesDecoded,
+                   (long long)state->mNumBytesDecoded,
                    state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
         }
     }
@@ -320,34 +340,42 @@
     bool useVideo = false;
     bool playback = false;
     bool useSurface = false;
+    bool renderSurface = false;
+    bool useTimestamp = false;
 
     int res;
-    while ((res = getopt(argc, argv, "havpSD")) >= 0) {
+    while ((res = getopt(argc, argv, "havpSDRT")) >= 0) {
         switch (res) {
             case 'a':
             {
                 useAudio = true;
                 break;
             }
-
             case 'v':
             {
                 useVideo = true;
                 break;
             }
-
             case 'p':
             {
                 playback = true;
                 break;
             }
-
+            case 'T':
+            {
+                useTimestamp = true;
+            }
+            // fall through
+            case 'R':
+            {
+                renderSurface = true;
+            }
+            // fall through
             case 'S':
             {
                 useSurface = true;
                 break;
             }
-
             case '?':
             case 'h':
             default:
@@ -390,7 +418,7 @@
         ssize_t displayWidth = info.w;
         ssize_t displayHeight = info.h;
 
-        ALOGV("display is %ld x %ld\n", displayWidth, displayHeight);
+        ALOGV("display is %zd x %zd\n", displayWidth, displayHeight);
 
         control = composerClient->createSurface(
                 String8("A Surface"),
@@ -422,7 +450,8 @@
         player->stop();
         player->reset();
     } else {
-        decode(looper, argv[0], useAudio, useVideo, surface);
+        decode(looper, argv[0], useAudio, useVideo, surface, renderSurface,
+                useTimestamp);
     }
 
     if (playback || (useSurface && useVideo)) {
diff --git a/include/media/nbaio/roundup.h b/cmds/stagefright/filters/argbtorgba.rs
similarity index 65%
copy from include/media/nbaio/roundup.h
copy to cmds/stagefright/filters/argbtorgba.rs
index 4c3cc25..229ff8c 100644
--- a/include/media/nbaio/roundup.h
+++ b/cmds/stagefright/filters/argbtorgba.rs
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2014 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,13 @@
  * limitations under the License.
  */
 
-#ifndef ROUNDUP_H
-#define ROUNDUP_H
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
 
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-// Round up to the next highest power of 2
-unsigned roundup(unsigned v);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif  // ROUNDUP_H
+void root(const uchar4 *v_in, uchar4 *v_out) {
+    v_out->x = v_in->y;
+    v_out->y = v_in->z;
+    v_out->z = v_in->w;
+    v_out->w = v_in->x;
+}
\ No newline at end of file
diff --git a/cmds/stagefright/filters/nightvision.rs b/cmds/stagefright/filters/nightvision.rs
new file mode 100644
index 0000000..f61413c
--- /dev/null
+++ b/cmds/stagefright/filters/nightvision.rs
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+const static float3 gNightVisionMult = {0.5f, 1.f, 0.5f};
+
+// calculates luminance of pixel, then biases color balance toward green
+void root(const uchar4 *v_in, uchar4 *v_out) {
+    v_out->x = v_in->x; // don't modify A
+
+    // get RGB, scale 0-255 uchar to 0-1.0 float
+    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+            v_in->w * 0.003921569f};
+
+    // apply filter
+    float3 result = dot(rgb, gMonoMult) * gNightVisionMult;
+
+    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/cmds/stagefright/filters/saturation.rs b/cmds/stagefright/filters/saturation.rs
new file mode 100644
index 0000000..1de9dd8
--- /dev/null
+++ b/cmds/stagefright/filters/saturation.rs
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+    v_out->x = v_in->x; // don't modify A
+
+    // get RGB, scale 0-255 uchar to 0-1.0 float
+    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+            v_in->w * 0.003921569f};
+
+    // apply saturation filter
+    float3 result = dot(rgb, gMonoMult);
+    result = mix(result, rgb, gSaturation);
+
+    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
new file mode 100644
index 0000000..1183112
--- /dev/null
+++ b/cmds/stagefright/mediafilter.cpp
@@ -0,0 +1,785 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "mediafilterTest"
+
+#include <inttypes.h>
+
+#include <binder/ProcessState.h>
+#include <filters/ColorConvert.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <media/stagefright/RenderScriptWrapper.h>
+#include <OMX_IVCommon.h>
+#include <ui/DisplayInfo.h>
+
+#include "RenderScript.h"
+#include "ScriptC_argbtorgba.h"
+#include "ScriptC_nightvision.h"
+#include "ScriptC_saturation.h"
+
+// test parameters
+static const bool kTestFlush = true;        // Note: true will drop 1 out of
+static const int kFlushAfterFrames = 25;    // kFlushAfterFrames output frames
+static const int64_t kTimeout = 500ll;
+
+// built-in filter parameters
+static const int32_t kInvert = false;   // ZeroFilter param
+static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param
+static const float kSaturation = 0.0f;  // SaturationFilter param
+
+static void usage(const char *me) {
+    fprintf(stderr, "usage: [flags] %s\n"
+                    "\t[-b] use IntrinsicBlurFilter\n"
+                    "\t[-c] use argb to rgba conversion RSFilter\n"
+                    "\t[-n] use night vision RSFilter\n"
+                    "\t[-r] use saturation RSFilter\n"
+                    "\t[-s] use SaturationFilter\n"
+                    "\t[-z] use ZeroFilter (copy filter)\n"
+                    "\t[-R] render output to surface (enables -S)\n"
+                    "\t[-S] allocate buffers from a surface\n"
+                    "\t[-T] use render timestamps (enables -R)\n",
+                    me);
+    exit(1);
+}
+
+namespace android {
+
+struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback {
+    void init(RSC::sp<RSC::RS> context) {
+        mScript = new ScriptC_saturation(context);
+        mScript->set_gSaturation(3.f);
+    }
+
+    virtual status_t processBuffers(
+            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+        mScript->forEach_root(inBuffer, outBuffer);
+
+        return OK;
+    }
+
+    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
+        return OK;
+    }
+
+private:
+    RSC::sp<ScriptC_saturation> mScript;
+};
+
+struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback {
+    void init(RSC::sp<RSC::RS> context) {
+        mScript = new ScriptC_nightvision(context);
+    }
+
+    virtual status_t processBuffers(
+            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+        mScript->forEach_root(inBuffer, outBuffer);
+
+        return OK;
+    }
+
+    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
+        return OK;
+    }
+
+private:
+    RSC::sp<ScriptC_nightvision> mScript;
+};
+
+struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback {
+    void init(RSC::sp<RSC::RS> context) {
+        mScript = new ScriptC_argbtorgba(context);
+    }
+
+    virtual status_t processBuffers(
+            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+        mScript->forEach_root(inBuffer, outBuffer);
+
+        return OK;
+    }
+
+    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
+        return OK;
+    }
+
+private:
+    RSC::sp<ScriptC_argbtorgba> mScript;
+};
+
+struct CodecState {
+    sp<MediaCodec> mCodec;
+    Vector<sp<ABuffer> > mInBuffers;
+    Vector<sp<ABuffer> > mOutBuffers;
+    bool mSignalledInputEOS;
+    bool mSawOutputEOS;
+    int64_t mNumBuffersDecoded;
+};
+
+struct DecodedFrame {
+    size_t index;
+    size_t offset;
+    size_t size;
+    int64_t presentationTimeUs;
+    uint32_t flags;
+};
+
+enum FilterType {
+    FILTERTYPE_ZERO,
+    FILTERTYPE_INTRINSIC_BLUR,
+    FILTERTYPE_SATURATION,
+    FILTERTYPE_RS_SATURATION,
+    FILTERTYPE_RS_NIGHT_VISION,
+    FILTERTYPE_RS_ARGB_TO_RGBA,
+};
+
+size_t inputFramesSinceFlush = 0;
+void tryCopyDecodedBuffer(
+        List<DecodedFrame> *decodedFrameIndices,
+        CodecState *filterState,
+        CodecState *vidState) {
+    if (decodedFrameIndices->empty()) {
+        return;
+    }
+
+    size_t filterIndex;
+    status_t err = filterState->mCodec->dequeueInputBuffer(
+            &filterIndex, kTimeout);
+    if (err != OK) {
+        return;
+    }
+
+    ++inputFramesSinceFlush;
+
+    DecodedFrame frame = *decodedFrameIndices->begin();
+
+    // only consume a buffer if we are not going to flush, since we expect
+    // the dequeue -> flush -> queue operation to cause an error and
+    // not produce an output frame
+    if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) {
+        decodedFrameIndices->erase(decodedFrameIndices->begin());
+    }
+    size_t outIndex = frame.index;
+
+    const sp<ABuffer> &srcBuffer =
+        vidState->mOutBuffers.itemAt(outIndex);
+    const sp<ABuffer> &destBuffer =
+        filterState->mInBuffers.itemAt(filterIndex);
+
+    sp<AMessage> srcFormat, destFormat;
+    vidState->mCodec->getOutputFormat(&srcFormat);
+    filterState->mCodec->getInputFormat(&destFormat);
+
+    int32_t srcWidth, srcHeight, srcStride, srcSliceHeight;
+    int32_t srcColorFormat, destColorFormat;
+    int32_t destWidth, destHeight, destStride, destSliceHeight;
+    CHECK(srcFormat->findInt32("stride", &srcStride)
+            && srcFormat->findInt32("slice-height", &srcSliceHeight)
+            && srcFormat->findInt32("width", &srcWidth)
+            && srcFormat->findInt32("height", & srcHeight)
+            && srcFormat->findInt32("color-format", &srcColorFormat));
+    CHECK(destFormat->findInt32("stride", &destStride)
+            && destFormat->findInt32("slice-height", &destSliceHeight)
+            && destFormat->findInt32("width", &destWidth)
+            && destFormat->findInt32("height", & destHeight)
+            && destFormat->findInt32("color-format", &destColorFormat));
+
+    CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight);
+
+    convertYUV420spToARGB(
+            srcBuffer->data(),
+            srcBuffer->data() + srcStride * srcSliceHeight,
+            srcWidth,
+            srcHeight,
+            destBuffer->data());
+
+    // copy timestamp
+    int64_t timeUs;
+    CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
+    destBuffer->meta()->setInt64("timeUs", timeUs);
+
+    if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) {
+        inputFramesSinceFlush = 0;
+
+        // check that queueing a buffer that was dequeued before flush
+        // fails with expected error EACCES
+        filterState->mCodec->flush();
+
+        err = filterState->mCodec->queueInputBuffer(
+                filterIndex, 0 /* offset */, destBuffer->size(),
+                timeUs, frame.flags);
+
+        if (err == OK) {
+            ALOGE("FAIL: queue after flush returned OK");
+        } else if (err != -EACCES) {
+            ALOGE("queueInputBuffer after flush returned %d, "
+                    "expected -EACCES (-13)", err);
+        }
+    } else {
+        err = filterState->mCodec->queueInputBuffer(
+                filterIndex, 0 /* offset */, destBuffer->size(),
+                timeUs, frame.flags);
+        CHECK(err == OK);
+
+        err = vidState->mCodec->releaseOutputBuffer(outIndex);
+        CHECK(err == OK);
+    }
+}
+
+size_t outputFramesSinceFlush = 0;
+void tryDrainOutputBuffer(
+        CodecState *filterState,
+        const sp<Surface> &surface, bool renderSurface,
+        bool useTimestamp, int64_t *startTimeRender) {
+    size_t index;
+    size_t offset;
+    size_t size;
+    int64_t presentationTimeUs;
+    uint32_t flags;
+    status_t err = filterState->mCodec->dequeueOutputBuffer(
+            &index, &offset, &size, &presentationTimeUs, &flags,
+            kTimeout);
+
+    if (err != OK) {
+        return;
+    }
+
+    ++outputFramesSinceFlush;
+
+    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
+        filterState->mCodec->flush();
+    }
+
+    if (surface == NULL || !renderSurface) {
+        err = filterState->mCodec->releaseOutputBuffer(index);
+    } else if (useTimestamp) {
+        if (*startTimeRender == -1) {
+            // begin rendering 2 vsyncs after first decode
+            *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC)
+                    + 33000000 - (presentationTimeUs * 1000);
+        }
+        presentationTimeUs =
+                (presentationTimeUs * 1000) + *startTimeRender;
+        err = filterState->mCodec->renderOutputBufferAndRelease(
+                index, presentationTimeUs);
+    } else {
+        err = filterState->mCodec->renderOutputBufferAndRelease(index);
+    }
+
+    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
+        outputFramesSinceFlush = 0;
+
+        // releasing the buffer dequeued before flush should cause an error
+        // if so, the frame will also be skipped in output stream
+        if (err == OK) {
+            ALOGE("FAIL: release after flush returned OK");
+        } else if (err != -EACCES) {
+            ALOGE("releaseOutputBuffer after flush returned %d, "
+                    "expected -EACCES (-13)", err);
+        }
+    } else {
+        CHECK(err == OK);
+    }
+
+    if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+        ALOGV("reached EOS on output.");
+        filterState->mSawOutputEOS = true;
+    }
+}
+
+static int decode(
+        const sp<ALooper> &looper,
+        const char *path,
+        const sp<Surface> &surface,
+        bool renderSurface,
+        bool useTimestamp,
+        FilterType filterType) {
+
+    static int64_t kTimeout = 500ll;
+
+    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
+        fprintf(stderr, "unable to instantiate extractor.\n");
+        return 1;
+    }
+
+    KeyedVector<size_t, CodecState> stateByTrack;
+
+    CodecState *vidState = NULL;
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<AMessage> format;
+        status_t err = extractor->getTrackFormat(i, &format);
+        CHECK(err == OK);
+
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
+        if (!isVideo) {
+            continue;
+        }
+
+        ALOGV("selecting track %zu", i);
+
+        err = extractor->selectTrack(i);
+        CHECK(err == OK);
+
+        CodecState *state =
+            &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
+
+        vidState = state;
+
+        state->mNumBuffersDecoded = 0;
+
+        state->mCodec = MediaCodec::CreateByType(
+                looper, mime.c_str(), false /* encoder */);
+
+        CHECK(state->mCodec != NULL);
+
+        err = state->mCodec->configure(
+                format, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
+
+        CHECK(err == OK);
+
+        state->mSignalledInputEOS = false;
+        state->mSawOutputEOS = false;
+
+        break;
+    }
+    CHECK(!stateByTrack.isEmpty());
+    CHECK(vidState != NULL);
+    sp<AMessage> vidFormat;
+    vidState->mCodec->getOutputFormat(&vidFormat);
+
+    // set filter to use ARGB8888
+    vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888);
+    // set app cache directory path
+    vidFormat->setString("cacheDir", "/system/bin");
+
+    // create RenderScript context for RSFilters
+    RSC::sp<RSC::RS> context = new RSC::RS();
+    context->init("/system/bin");
+
+    sp<RenderScriptWrapper::RSFilterCallback> rsFilter;
+
+    // create renderscript wrapper for RSFilters
+    sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper;
+    rsWrapper->mContext = context.get();
+
+    CodecState *filterState = new CodecState();
+    filterState->mNumBuffersDecoded = 0;
+
+    sp<AMessage> params = new AMessage();
+
+    switch (filterType) {
+        case FILTERTYPE_ZERO:
+        {
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.zerofilter");
+            params->setInt32("invert", kInvert);
+            break;
+        }
+        case FILTERTYPE_INTRINSIC_BLUR:
+        {
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.intrinsicblur");
+            params->setFloat("blur-radius", kBlurRadius);
+            break;
+        }
+        case FILTERTYPE_SATURATION:
+        {
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.saturation");
+            params->setFloat("saturation", kSaturation);
+            break;
+        }
+        case FILTERTYPE_RS_SATURATION:
+        {
+            SaturationRSFilter *satFilter = new SaturationRSFilter;
+            satFilter->init(context);
+            rsFilter = satFilter;
+            rsWrapper->mCallback = rsFilter;
+            vidFormat->setObject("rs-wrapper", rsWrapper);
+
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.RenderScript");
+            break;
+        }
+        case FILTERTYPE_RS_NIGHT_VISION:
+        {
+            NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter;
+            nightVisionFilter->init(context);
+            rsFilter = nightVisionFilter;
+            rsWrapper->mCallback = rsFilter;
+            vidFormat->setObject("rs-wrapper", rsWrapper);
+
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.RenderScript");
+            break;
+        }
+        case FILTERTYPE_RS_ARGB_TO_RGBA:
+        {
+            ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter;
+            argbToRgbaFilter->init(context);
+            rsFilter = argbToRgbaFilter;
+            rsWrapper->mCallback = rsFilter;
+            vidFormat->setObject("rs-wrapper", rsWrapper);
+
+            filterState->mCodec = MediaCodec::CreateByComponentName(
+                    looper, "android.filter.RenderScript");
+            break;
+        }
+        default:
+        {
+            LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType");
+            break;
+        }
+    }
+    CHECK(filterState->mCodec != NULL);
+
+    status_t err = filterState->mCodec->configure(
+            vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */);
+    CHECK(err == OK);
+
+    filterState->mSignalledInputEOS = false;
+    filterState->mSawOutputEOS = false;
+
+    int64_t startTimeUs = ALooper::GetNowUs();
+    int64_t startTimeRender = -1;
+
+    for (size_t i = 0; i < stateByTrack.size(); ++i) {
+        CodecState *state = &stateByTrack.editValueAt(i);
+
+        sp<MediaCodec> codec = state->mCodec;
+
+        CHECK_EQ((status_t)OK, codec->start());
+
+        CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
+        CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
+
+        ALOGV("got %zu input and %zu output buffers",
+                state->mInBuffers.size(), state->mOutBuffers.size());
+    }
+
+    CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params));
+
+    if (kTestFlush) {
+        status_t flushErr = filterState->mCodec->flush();
+        if (flushErr == OK) {
+            ALOGE("FAIL: Flush before start returned OK");
+        } else {
+            ALOGV("Flush before start returned status %d, usually ENOSYS (-38)",
+                    flushErr);
+        }
+    }
+
+    CHECK_EQ((status_t)OK, filterState->mCodec->start());
+    CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers(
+            &filterState->mInBuffers));
+    CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers(
+            &filterState->mOutBuffers));
+
+    if (kTestFlush) {
+        status_t flushErr = filterState->mCodec->flush();
+        if (flushErr != OK) {
+            ALOGE("FAIL: Flush after start returned %d, expect OK (0)",
+                    flushErr);
+        } else {
+            ALOGV("Flush immediately after start OK");
+        }
+    }
+
+    List<DecodedFrame> decodedFrameIndices;
+
+    // loop until decoder reaches EOS
+    bool sawInputEOS = false;
+    bool sawOutputEOSOnAllTracks = false;
+    while (!sawOutputEOSOnAllTracks) {
+        if (!sawInputEOS) {
+            size_t trackIndex;
+            status_t err = extractor->getSampleTrackIndex(&trackIndex);
+
+            if (err != OK) {
+                ALOGV("saw input eos");
+                sawInputEOS = true;
+            } else {
+                CodecState *state = &stateByTrack.editValueFor(trackIndex);
+
+                size_t index;
+                err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+                if (err == OK) {
+                    ALOGV("filling input buffer %zu", index);
+
+                    const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+                    err = extractor->readSampleData(buffer);
+                    CHECK(err == OK);
+
+                    int64_t timeUs;
+                    err = extractor->getSampleTime(&timeUs);
+                    CHECK(err == OK);
+
+                    uint32_t bufferFlags = 0;
+
+                    err = state->mCodec->queueInputBuffer(
+                            index, 0 /* offset */, buffer->size(),
+                            timeUs, bufferFlags);
+
+                    CHECK(err == OK);
+
+                    extractor->advance();
+                } else {
+                    CHECK_EQ(err, -EAGAIN);
+                }
+            }
+        } else {
+            for (size_t i = 0; i < stateByTrack.size(); ++i) {
+                CodecState *state = &stateByTrack.editValueAt(i);
+
+                if (!state->mSignalledInputEOS) {
+                    size_t index;
+                    status_t err =
+                        state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+                    if (err == OK) {
+                        ALOGV("signalling input EOS on track %zu", i);
+
+                        err = state->mCodec->queueInputBuffer(
+                                index, 0 /* offset */, 0 /* size */,
+                                0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS);
+
+                        CHECK(err == OK);
+
+                        state->mSignalledInputEOS = true;
+                    } else {
+                        CHECK_EQ(err, -EAGAIN);
+                    }
+                }
+            }
+        }
+
+        sawOutputEOSOnAllTracks = true;
+        for (size_t i = 0; i < stateByTrack.size(); ++i) {
+            CodecState *state = &stateByTrack.editValueAt(i);
+
+            if (state->mSawOutputEOS) {
+                continue;
+            } else {
+                sawOutputEOSOnAllTracks = false;
+            }
+
+            DecodedFrame frame;
+            status_t err = state->mCodec->dequeueOutputBuffer(
+                    &frame.index, &frame.offset, &frame.size,
+                    &frame.presentationTimeUs, &frame.flags, kTimeout);
+
+            if (err == OK) {
+                ALOGV("draining decoded buffer %zu, time = %lld us",
+                        frame.index, (long long)frame.presentationTimeUs);
+
+                ++(state->mNumBuffersDecoded);
+
+                decodedFrameIndices.push_back(frame);
+
+                if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) {
+                    ALOGV("reached EOS on decoder output.");
+                    state->mSawOutputEOS = true;
+                }
+
+            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+                ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
+                CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(
+                        &state->mOutBuffers));
+
+                ALOGV("got %zu output buffers", state->mOutBuffers.size());
+            } else if (err == INFO_FORMAT_CHANGED) {
+                sp<AMessage> format;
+                CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
+
+                ALOGV("INFO_FORMAT_CHANGED: %s",
+                        format->debugString().c_str());
+            } else {
+                CHECK_EQ(err, -EAGAIN);
+            }
+
+            tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
+
+            tryDrainOutputBuffer(
+                    filterState, surface, renderSurface,
+                    useTimestamp, &startTimeRender);
+        }
+    }
+
+    // after EOS on decoder, let filter reach EOS
+    while (!filterState->mSawOutputEOS) {
+        tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
+
+        tryDrainOutputBuffer(
+                filterState, surface, renderSurface,
+                useTimestamp, &startTimeRender);
+    }
+
+    int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
+
+    for (size_t i = 0; i < stateByTrack.size(); ++i) {
+        CodecState *state = &stateByTrack.editValueAt(i);
+
+        CHECK_EQ((status_t)OK, state->mCodec->release());
+
+        printf("track %zu: %" PRId64 " frames decoded and filtered, "
+                "%.2f fps.\n", i, state->mNumBuffersDecoded,
+                state->mNumBuffersDecoded * 1E6 / elapsedTimeUs);
+    }
+
+    return 0;
+}
+
+}  // namespace android
+
+int main(int argc, char **argv) {
+    using namespace android;
+
+    const char *me = argv[0];
+
+    bool useSurface = false;
+    bool renderSurface = false;
+    bool useTimestamp = false;
+    FilterType filterType = FILTERTYPE_ZERO;
+
+    int res;
+    while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) {
+        switch (res) {
+            case 'b':
+            {
+                filterType = FILTERTYPE_INTRINSIC_BLUR;
+                break;
+            }
+            case 'c':
+            {
+                filterType = FILTERTYPE_RS_ARGB_TO_RGBA;
+                break;
+            }
+            case 'n':
+            {
+                filterType = FILTERTYPE_RS_NIGHT_VISION;
+                break;
+            }
+            case 'r':
+            {
+                filterType = FILTERTYPE_RS_SATURATION;
+                break;
+            }
+            case 's':
+            {
+                filterType = FILTERTYPE_SATURATION;
+                break;
+            }
+            case 'z':
+            {
+                filterType = FILTERTYPE_ZERO;
+                break;
+            }
+            case 'T':
+            {
+                useTimestamp = true;
+            }
+            // fall through
+            case 'R':
+            {
+                renderSurface = true;
+            }
+            // fall through
+            case 'S':
+            {
+                useSurface = true;
+                break;
+            }
+            case '?':
+            case 'h':
+            default:
+            {
+                usage(me);
+                break;
+            }
+        }
+    }
+
+    argc -= optind;
+    argv += optind;
+
+    if (argc != 1) {
+        usage(me);
+    }
+
+    ProcessState::self()->startThreadPool();
+
+    DataSource::RegisterDefaultSniffers();
+
+    android::sp<ALooper> looper = new ALooper;
+    looper->start();
+
+    android::sp<SurfaceComposerClient> composerClient;
+    android::sp<SurfaceControl> control;
+    android::sp<Surface> surface;
+
+    if (useSurface) {
+        composerClient = new SurfaceComposerClient;
+        CHECK_EQ((status_t)OK, composerClient->initCheck());
+
+        android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+                ISurfaceComposer::eDisplayIdMain));
+        DisplayInfo info;
+        SurfaceComposerClient::getDisplayInfo(display, &info);
+        ssize_t displayWidth = info.w;
+        ssize_t displayHeight = info.h;
+
+        ALOGV("display is %zd x %zd", displayWidth, displayHeight);
+
+        control = composerClient->createSurface(
+                String8("A Surface"), displayWidth, displayHeight,
+                PIXEL_FORMAT_RGBA_8888, 0);
+
+        CHECK(control != NULL);
+        CHECK(control->isValid());
+
+        SurfaceComposerClient::openGlobalTransaction();
+        CHECK_EQ((status_t)OK, control->setLayer(INT_MAX));
+        CHECK_EQ((status_t)OK, control->show());
+        SurfaceComposerClient::closeGlobalTransaction();
+
+        surface = control->getSurface();
+        CHECK(surface != NULL);
+    }
+
+    decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType);
+
+    if (useSurface) {
+        composerClient->dispose();
+    }
+
+    looper->stop();
+
+    return 0;
+}
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index f4a33e8..36fa3b5 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -17,6 +17,9 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "muxer"
 #include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
 #include <utils/Log.h>
 
 #include <binder/ProcessState.h>
@@ -50,7 +53,6 @@
 using namespace android;
 
 static int muxing(
-        const android::sp<android::ALooper> &looper,
         const char *path,
         bool useAudio,
         bool useVideo,
@@ -72,8 +74,15 @@
     ALOGV("input file %s, output file %s", path, outputFileName);
     ALOGV("useAudio %d, useVideo %d", useAudio, useVideo);
 
-    sp<MediaMuxer> muxer = new MediaMuxer(outputFileName,
+    int fd = open(outputFileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+
+    if (fd < 0) {
+        ALOGE("couldn't open file");
+        return fd;
+    }
+    sp<MediaMuxer> muxer = new MediaMuxer(fd,
                                           MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+    close(fd);
 
     size_t trackCount = extractor->countTracks();
     // Map the extractor's track index to the muxer's track index.
@@ -127,14 +136,19 @@
             }
         }
 
-        ALOGV("selecting track %d", i);
+        ALOGV("selecting track %zu", i);
 
         err = extractor->selectTrack(i);
         CHECK_EQ(err, (status_t)OK);
 
         ssize_t newTrackIndex = muxer->addTrack(format);
-        CHECK_GE(newTrackIndex, 0);
-        trackIndexMap.add(i, newTrackIndex);
+        if (newTrackIndex < 0) {
+            fprintf(stderr, "%s track (%zu) unsupported by muxer\n",
+                    isAudio ? "audio" : "video",
+                    i);
+        } else {
+            trackIndexMap.add(i, newTrackIndex);
+        }
     }
 
     int64_t muxerStartTimeUs = ALooper::GetNowUs();
@@ -153,7 +167,12 @@
             ALOGV("saw input eos, err %d", err);
             sawInputEOS = true;
             break;
+        } else if (trackIndexMap.indexOfKey(trackIndex) < 0) {
+            // ALOGV("skipping input from unsupported track %zu", trackIndex);
+            extractor->advance();
+            continue;
         } else {
+            // ALOGV("reading sample from track index %zu\n", trackIndex);
             err = extractor->readSampleData(newBuffer);
             CHECK_EQ(err, (status_t)OK);
 
@@ -298,7 +317,7 @@
     sp<ALooper> looper = new ALooper;
     looper->start();
 
-    int result = muxing(looper, argv[0], useAudio, useVideo, outputFileName,
+    int result = muxing(argv[0], useAudio, useVideo, outputFileName,
                         enableTrim, trimStartTimeMs, trimEndTimeMs, rotationDegrees);
 
     looper->stop();
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index fdc352e..594c933 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -32,13 +32,13 @@
 
 using namespace android;
 
+static const int32_t kAudioBitRate = 12200;
+#if 0
 static const int32_t kFramerate = 24;  // fps
 static const int32_t kIFramesIntervalSec = 1;
 static const int32_t kVideoBitRate = 512 * 1024;
-static const int32_t kAudioBitRate = 12200;
 static const int64_t kDurationUs = 10000000LL;  // 10 seconds
 
-#if 0
 class DummySource : public MediaSource {
 
 public:
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
index 9f547c7..2ad40bd 100644
--- a/cmds/stagefright/recordvideo.cpp
+++ b/cmds/stagefright/recordvideo.cpp
@@ -17,6 +17,10 @@
 #include "SineSource.h"
 
 #include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
 #include <binder/ProcessState.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/AudioPlayer.h>
@@ -300,7 +304,13 @@
                 client.interface(), enc_meta, true /* createEncoder */, source,
                 0, preferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0);
 
-    sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+    int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    if (fd < 0) {
+        fprintf(stderr, "couldn't open file");
+        return 1;
+    }
+    sp<MPEG4Writer> writer = new MPEG4Writer(fd);
+    close(fd);
     writer->addSource(encoder);
     int64_t start = systemTime();
     CHECK_EQ((status_t)OK, writer->start());
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index 0f729a3..0d64d2f 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -38,10 +38,10 @@
 #include <media/stagefright/MediaExtractor.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/NativeWindowWrapper.h>
 #include <media/stagefright/Utils.h>
 
 #include <gui/SurfaceComposerClient.h>
+#include <gui/Surface.h>
 
 #include "include/ESDS.h"
 
@@ -72,7 +72,7 @@
     }
 
     void startAsync() {
-        (new AMessage(kWhatStart, id()))->post();
+        (new AMessage(kWhatStart, this))->post();
     }
 
 protected:
@@ -100,7 +100,7 @@
         if (ctrlc) {
             printf("\n");
             printStatistics();
-            (new AMessage(kWhatStop, id()))->post();
+            (new AMessage(kWhatStop, this))->post();
             ctrlc = false;
         }
         switch (msg->what()) {
@@ -149,13 +149,12 @@
                 mDecodeLooper->registerHandler(mCodec);
 
                 mCodec->setNotificationMessage(
-                        new AMessage(kWhatCodecNotify, id()));
+                        new AMessage(kWhatCodecNotify, this));
 
                 sp<AMessage> format = makeFormat(mSource->getFormat());
 
                 if (mSurface != NULL) {
-                    format->setObject(
-                            "native-window", new NativeWindowWrapper(mSurface));
+                    format->setObject("surface", mSurface);
                 }
 
                 mCodec->initiateSetup(format);
@@ -168,7 +167,7 @@
                 mFinalResult = OK;
                 mSeekState = SEEK_NONE;
 
-                // (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                // (new AMessage(kWhatSeek, this))->post(5000000ll);
                 break;
             }
 
@@ -225,12 +224,12 @@
                     printf((what == CodecBase::kWhatEOS) ? "$\n" : "E\n");
 
                     printStatistics();
-                    (new AMessage(kWhatStop, id()))->post();
+                    (new AMessage(kWhatStop, this))->post();
                 } else if (what == CodecBase::kWhatFlushCompleted) {
                     mSeekState = SEEK_FLUSH_COMPLETED;
                     mCodec->signalResume();
 
-                    (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                    (new AMessage(kWhatSeek, this))->post(5000000ll);
                 } else if (what == CodecBase::kWhatOutputFormatChanged) {
                 } else if (what == CodecBase::kWhatShutdownCompleted) {
                     mDecodeLooper->unregisterHandler(mCodec->id());
@@ -328,14 +327,14 @@
 
             CHECK(size >= 7);
             CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
-            uint8_t profile = ptr[1];
-            uint8_t level = ptr[3];
+            uint8_t profile __unused = ptr[1];
+            uint8_t level __unused = ptr[3];
 
             // There is decodable content out there that fails the following
             // assertion, let's be lenient for now...
             // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
 
-            size_t lengthSize = 1 + (ptr[4] & 3);
+            size_t lengthSize __unused = 1 + (ptr[4] & 3);
 
             // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
             // violates it...
@@ -491,7 +490,7 @@
 
                 if (sizeNeeded > sizeLeft) {
                     if (outBuffer->size() == 0) {
-                        ALOGE("Unable to fit even a single input buffer of size %d.",
+                        ALOGE("Unable to fit even a single input buffer of size %zu.",
                              sizeNeeded);
                     }
                     CHECK_GT(outBuffer->size(), 0u);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 81edcb4..a9c6eda 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -19,6 +19,8 @@
 #include <stdlib.h>
 #include <string.h>
 #include <sys/time.h>
+#include <sys/types.h>
+#include <sys/stat.h>
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "stagefright"
@@ -506,8 +508,13 @@
     sp<MPEG4Writer> writer =
         new MPEG4Writer(gWriteMP4Filename.string());
 #else
+    int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    if (fd < 0) {
+        fprintf(stderr, "couldn't open file");
+        return;
+    }
     sp<MPEG2TSWriter> writer =
-        new MPEG2TSWriter(gWriteMP4Filename.string());
+        new MPEG2TSWriter(fd);
 #endif
 
     // at most one minute.
@@ -958,7 +965,7 @@
     OMXClient client;
     status_t err = client.connect();
 
-    for (int k = 0; k < argc; ++k) {
+    for (int k = 0; k < argc && err == OK; ++k) {
         bool syncInfoPresent = true;
 
         const char *filename = argv[k];
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 0566d14..1a40e53 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -269,7 +269,7 @@
         : mEOS(false) {
     }
 
-    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) {
+    virtual void notify(int msg, int ext1 __unused, int ext2 __unused, const Parcel *obj __unused) {
         Mutex::Autolock autoLock(mLock);
 
         if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) {
@@ -318,7 +318,7 @@
     ssize_t displayWidth = info.w;
     ssize_t displayHeight = info.h;
 
-    ALOGV("display is %d x %d\n", displayWidth, displayHeight);
+    ALOGV("display is %zd x %zd\n", displayWidth, displayHeight);
 
     sp<SurfaceControl> control =
         composerClient->createSurface(
diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp
index 3f62ed7..b90da1b 100644
--- a/drm/common/IDrmManagerService.cpp
+++ b/drm/common/IDrmManagerService.cpp
@@ -34,6 +34,7 @@
 #include "IDrmManagerService.h"
 
 #define INVALID_BUFFER_LENGTH -1
+#define MAX_BINDER_TRANSACTION_SIZE ((1*1024*1024)-(4096*2))
 
 using namespace android;
 
@@ -933,7 +934,12 @@
 
         //Filling DRM info
         const int infoType = data.readInt32();
-        const int bufferSize = data.readInt32();
+        const uint32_t bufferSize = data.readInt32();
+
+        if (bufferSize > data.dataAvail()) {
+            return BAD_VALUE;
+        }
+
         char* buffer = NULL;
         if (0 < bufferSize) {
             buffer = (char *)data.readInplace(bufferSize);
@@ -986,6 +992,9 @@
 
         const int size = data.readInt32();
         for (int index = 0; index < size; ++index) {
+            if (!data.dataAvail()) {
+                break;
+            }
             const String8 key(data.readString8());
             if (key == String8("FileDescriptorKey")) {
                 char buffer[16];
@@ -1035,7 +1044,12 @@
         const int uniqueId = data.readInt32();
 
         //Filling DRM Rights
-        const int bufferSize = data.readInt32();
+        const uint32_t bufferSize = data.readInt32();
+        if (bufferSize > data.dataAvail()) {
+            reply->writeInt32(BAD_VALUE);
+            return DRM_NO_ERROR;
+        }
+
         const DrmBuffer drmBuffer((char *)data.readInplace(bufferSize), bufferSize);
 
         const String8 mimeType(data.readString8());
@@ -1206,10 +1220,13 @@
         const int convertId = data.readInt32();
 
         //Filling input data
-        const int bufferSize = data.readInt32();
+        const uint32_t bufferSize = data.readInt32();
+        if (bufferSize > data.dataAvail()) {
+            return BAD_VALUE;
+        }
         DrmBuffer* inputData = new DrmBuffer((char *)data.readInplace(bufferSize), bufferSize);
 
-        DrmConvertedStatus*    drmConvertedStatus = convertData(uniqueId, convertId, inputData);
+        DrmConvertedStatus* drmConvertedStatus = convertData(uniqueId, convertId, inputData);
 
         if (NULL != drmConvertedStatus) {
             //Filling Drm Converted Ststus
@@ -1393,7 +1410,12 @@
         const int decryptUnitId = data.readInt32();
 
         //Filling Header info
-        const int bufferSize = data.readInt32();
+        const uint32_t bufferSize = data.readInt32();
+        if (bufferSize > data.dataAvail()) {
+            reply->writeInt32(BAD_VALUE);
+            clearDecryptHandle(&handle);
+            return DRM_NO_ERROR;
+        }
         DrmBuffer* headerInfo = NULL;
         headerInfo = new DrmBuffer((char *)data.readInplace(bufferSize), bufferSize);
 
@@ -1417,9 +1439,17 @@
         readDecryptHandleFromParcelData(&handle, data);
 
         const int decryptUnitId = data.readInt32();
-        const int decBufferSize = data.readInt32();
+        const uint32_t decBufferSize = data.readInt32();
+        const uint32_t encBufferSize = data.readInt32();
 
-        const int encBufferSize = data.readInt32();
+        if (encBufferSize > data.dataAvail() ||
+            decBufferSize > MAX_BINDER_TRANSACTION_SIZE) {
+            reply->writeInt32(BAD_VALUE);
+            reply->writeInt32(0);
+            clearDecryptHandle(&handle);
+            return DRM_NO_ERROR;
+        }
+
         DrmBuffer* encBuffer
             = new DrmBuffer((char *)data.readInplace(encBufferSize), encBufferSize);
 
@@ -1429,8 +1459,10 @@
 
         DrmBuffer* IV = NULL;
         if (0 != data.dataAvail()) {
-            const int ivBufferlength = data.readInt32();
-            IV = new DrmBuffer((char *)data.readInplace(ivBufferlength), ivBufferlength);
+            const uint32_t ivBufferlength = data.readInt32();
+            if (ivBufferlength <= data.dataAvail()) {
+                IV = new DrmBuffer((char *)data.readInplace(ivBufferlength), ivBufferlength);
+            }
         }
 
         const status_t status
@@ -1477,7 +1509,11 @@
         DecryptHandle handle;
         readDecryptHandleFromParcelData(&handle, data);
 
-        const int numBytes = data.readInt32();
+        const uint32_t numBytes = data.readInt32();
+        if (numBytes > MAX_BINDER_TRANSACTION_SIZE) {
+            reply->writeInt32(BAD_VALUE);
+            return DRM_NO_ERROR;
+        }
         char* buffer = new char[numBytes];
 
         const off64_t offset = data.readInt64();
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
index 9d15835..6a0b3c0 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
@@ -19,6 +19,7 @@
 #include <fcntl.h>
 #include <limits.h>
 #include <pthread.h>
+#include <stdlib.h>
 #include <string.h>
 #include <sys/stat.h>
 #include <unistd.h>
diff --git a/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp b/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
index ee3189b..eeb64c3 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
@@ -43,10 +43,18 @@
         return android::BAD_VALUE;
     }
 
-    android::sp<Session> session = SessionLibrary::get()->findSession(
-            data, size);
-    *plugin = new CryptoPlugin(session);
-    return android::OK;
+    android::Vector<uint8_t> sessionId;
+    sessionId.appendArray(reinterpret_cast<const uint8_t*>(data), size);
+
+    CryptoPlugin *clearKeyPlugin = new CryptoPlugin(sessionId);
+    android::status_t result = clearKeyPlugin->getInitStatus();
+    if (result == android::OK) {
+        *plugin = clearKeyPlugin;
+    } else {
+        delete clearKeyPlugin;
+        *plugin = NULL;
+    }
+    return result;
 }
 
 } // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
index adad136..53cbf80 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
@@ -19,9 +19,9 @@
 #include <utils/Log.h>
 
 #include <media/stagefright/MediaErrors.h>
-#include <utils/Errors.h>
 
 #include "CryptoPlugin.h"
+#include "SessionLibrary.h"
 
 namespace clearkeydrm {
 
@@ -80,4 +80,18 @@
     }
 }
 
+android::status_t CryptoPlugin::setMediaDrmSession(
+        const android::Vector<uint8_t>& sessionId) {
+    if (!sessionId.size()) {
+        mSession.clear();
+    } else {
+        mSession = SessionLibrary::get()->findSession(sessionId);
+        if (!mSession.get()) {
+            return android::ERROR_DRM_SESSION_NOT_OPENED;
+        }
+    }
+    return android::OK;
+}
+
+
 }  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
index 002d9e0..fd38f28 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
@@ -31,7 +31,10 @@
 
 class CryptoPlugin : public android::CryptoPlugin {
 public:
-    CryptoPlugin(const android::sp<Session>& session) : mSession(session) {}
+    CryptoPlugin(const android::Vector<uint8_t>& sessionId) {
+        mInitStatus = setMediaDrmSession(sessionId);
+    }
+
     virtual ~CryptoPlugin() {}
 
     virtual bool requiresSecureDecoderComponent(const char* mime) const {
@@ -45,10 +48,16 @@
             const SubSample* subSamples, size_t numSubSamples,
             void* dstPtr, android::AString* errorDetailMsg);
 
+    virtual android::status_t setMediaDrmSession(
+            const android::Vector<uint8_t>& sessionId);
+
+    android::status_t getInitStatus() const {return mInitStatus;}
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(CryptoPlugin);
 
     android::sp<Session> mSession;
+    android::status_t mInitStatus;
 };
 
 } // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index 96fca94..e5ee403 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -37,7 +37,9 @@
 
 status_t DrmPlugin::closeSession(const Vector<uint8_t>& sessionId) {
     sp<Session> session = mSessionLibrary->findSession(sessionId);
-    mSessionLibrary->destroySession(session);
+    if (session.get()) {
+        mSessionLibrary->destroySession(session);
+    }
     return android::OK;
 }
 
@@ -48,14 +50,18 @@
         KeyType keyType,
         const KeyedVector<String8, String8>& optionalParameters,
         Vector<uint8_t>& request,
-        String8& defaultUrl) {
+        String8& defaultUrl,
+        DrmPlugin::KeyRequestType *keyRequestType) {
     UNUSED(optionalParameters);
     if (keyType != kKeyType_Streaming) {
         return android::ERROR_DRM_CANNOT_HANDLE;
     }
-
-    sp<Session> session = mSessionLibrary->findSession(scope);
+    *keyRequestType = DrmPlugin::kKeyRequestType_Initial;
     defaultUrl.clear();
+    sp<Session> session = mSessionLibrary->findSession(scope);
+    if (!session.get()) {
+        return android::ERROR_DRM_SESSION_NOT_OPENED;
+    }
     return session->getKeyRequest(initData, initDataType, &request);
 }
 
@@ -64,6 +70,9 @@
         const Vector<uint8_t>& response,
         Vector<uint8_t>& keySetId) {
     sp<Session> session = mSessionLibrary->findSession(scope);
+    if (!session.get()) {
+        return android::ERROR_DRM_SESSION_NOT_OPENED;
+    }
     status_t res = session->provideKeyResponse(response);
     if (res == android::OK) {
         keySetId.clear();
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
index 6139f1f..ba4aefe 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
@@ -54,7 +54,8 @@
             KeyType keyType,
             const KeyedVector<String8, String8>& optionalParameters,
             Vector<uint8_t>& request,
-            String8& defaultUrl);
+            String8& defaultUrl,
+            DrmPlugin::KeyRequestType *keyRequestType);
 
     virtual status_t provideKeyResponse(
             const Vector<uint8_t>& scope,
diff --git a/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
index d047c53..46d7f77 100644
--- a/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
+++ b/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
@@ -63,13 +63,6 @@
     return mSessions.valueFor(sessionId);
 }
 
-const sp<Session>& SessionLibrary::findSession(
-        const void* data, size_t size) {
-    Vector<uint8_t> sessionId;
-    sessionId.appendArray(reinterpret_cast<const uint8_t*>(data), size);
-    return findSession(sessionId);
-}
-
 void SessionLibrary::destroySession(const sp<Session>& session) {
     Mutex::Autolock lock(mSessionsLock);\
     mSessions.removeItem(session->sessionId());
diff --git a/drm/mediadrm/plugins/clearkey/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/SessionLibrary.h
index 56c8828..199ad64 100644
--- a/drm/mediadrm/plugins/clearkey/SessionLibrary.h
+++ b/drm/mediadrm/plugins/clearkey/SessionLibrary.h
@@ -36,8 +36,6 @@
     const android::sp<Session>& findSession(
             const android::Vector<uint8_t>& sessionId);
 
-    const android::sp<Session>& findSession(const void* data, size_t size);
-
     void destroySession(const android::sp<Session>& session);
 
 private:
@@ -50,7 +48,7 @@
 
     android::Mutex mSessionsLock;
     uint32_t mNextSessionId;
-    android::KeyedVector<android::Vector<uint8_t>, android::sp<Session> >
+    android::DefaultKeyedVector<android::Vector<uint8_t>, android::sp<Session> >
             mSessions;
 };
 
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
index 7eac0a1..851ad2c 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
@@ -56,7 +56,7 @@
         return true;
     }
 
-    status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin)
+    status_t MockDrmFactory::createDrmPlugin(const uint8_t /* uuid */[16], DrmPlugin **plugin)
     {
         *plugin = new MockDrmPlugin();
         return OK;
@@ -68,8 +68,9 @@
         return (!memcmp(uuid, mock_uuid, sizeof(mock_uuid)));
     }
 
-    status_t MockCryptoFactory::createPlugin(const uint8_t uuid[16], const void *data,
-                                             size_t size, CryptoPlugin **plugin)
+    status_t MockCryptoFactory::createPlugin(const uint8_t /* uuid */[16],
+                                             const void * /* data */,
+                                             size_t /* size */, CryptoPlugin **plugin)
     {
         *plugin = new MockCryptoPlugin();
         return OK;
@@ -111,7 +112,8 @@
                                           Vector<uint8_t> const &initData,
                                           String8 const &mimeType, KeyType keyType,
                                           KeyedVector<String8, String8> const &optionalParameters,
-                                          Vector<uint8_t> &request, String8 &defaultUrl)
+                                          Vector<uint8_t> &request, String8 &defaultUrl,
+                                          KeyRequestType *keyRequestType)
     {
         Mutex::Autolock lock(mLock);
         ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s"
@@ -149,6 +151,7 @@
         // Properties used in mock test, set by cts test app returned from mock plugin
         //   byte[] mock-request       -> request
         //   string mock-default-url   -> defaultUrl
+        //   string mock-keyRequestType -> keyRequestType
 
         index = mByteArrayProperties.indexOfKey(String8("mock-request"));
         if (index < 0) {
@@ -165,6 +168,16 @@
         } else {
             defaultUrl = mStringProperties.valueAt(index);
         }
+
+        index = mStringProperties.indexOfKey(String8("mock-keyRequestType"));
+        if (index < 0) {
+            ALOGD("Missing 'mock-keyRequestType' parameter for mock");
+            return BAD_VALUE;
+        } else {
+            *keyRequestType = static_cast<KeyRequestType>(
+                atoi(mStringProperties.valueAt(index).string()));
+        }
+
         return OK;
     }
 
@@ -254,8 +267,8 @@
         return OK;
     }
 
-    status_t MockDrmPlugin::getProvisionRequest(String8 const &certType,
-                                                String8 const &certAuthority,
+    status_t MockDrmPlugin::getProvisionRequest(String8 const & /* certType */,
+                                                String8 const & /* certAuthority */,
                                                 Vector<uint8_t> &request,
                                                 String8 &defaultUrl)
     {
@@ -285,8 +298,8 @@
     }
 
     status_t MockDrmPlugin::provideProvisionResponse(Vector<uint8_t> const &response,
-                                                     Vector<uint8_t> &certificate,
-                                                     Vector<uint8_t> &wrappedKey)
+                                                     Vector<uint8_t> & /* certificate */,
+                                                     Vector<uint8_t> & /* wrappedKey */)
     {
         Mutex::Autolock lock(mLock);
         ALOGD("MockDrmPlugin::provideProvisionResponse(%s)",
@@ -305,7 +318,8 @@
         return OK;
     }
 
-    status_t MockDrmPlugin::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop)
+    status_t MockDrmPlugin::getSecureStop(Vector<uint8_t> const & /* ssid */,
+                                          Vector<uint8_t> & secureStop)
     {
         Mutex::Autolock lock(mLock);
         ALOGD("MockDrmPlugin::getSecureStop()");
@@ -427,6 +441,63 @@
                   pData ? vectorToString(*pData) : "{}");
 
             sendEvent(eventType, extra, pSessionId, pData);
+        } else if (name == "mock-send-expiration-update") {
+            int64_t expiryTimeMS;
+            sscanf(value.string(), "%jd", &expiryTimeMS);
+
+            Vector<uint8_t> const *pSessionId = NULL;
+            ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id"));
+            if (index >= 0) {
+                pSessionId = &mByteArrayProperties[index];
+            }
+
+            ALOGD("sending expiration-update from mock drm plugin: %jd %s",
+                  expiryTimeMS, pSessionId ? vectorToString(*pSessionId) : "{}");
+
+            sendExpirationUpdate(pSessionId, expiryTimeMS);
+        } else if (name == "mock-send-keys-change") {
+            Vector<uint8_t> const *pSessionId = NULL;
+            ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id"));
+            if (index >= 0) {
+                pSessionId = &mByteArrayProperties[index];
+            }
+
+            ALOGD("sending keys-change from mock drm plugin: %s",
+                  pSessionId ? vectorToString(*pSessionId) : "{}");
+
+            Vector<DrmPlugin::KeyStatus> keyStatusList;
+            DrmPlugin::KeyStatus keyStatus;
+            uint8_t keyId1[] = {'k', 'e', 'y', '1'};
+            keyStatus.mKeyId.clear();
+            keyStatus.mKeyId.appendArray(keyId1, sizeof(keyId1));
+            keyStatus.mType = DrmPlugin::kKeyStatusType_Usable;
+            keyStatusList.add(keyStatus);
+
+            uint8_t keyId2[] = {'k', 'e', 'y', '2'};
+            keyStatus.mKeyId.clear();
+            keyStatus.mKeyId.appendArray(keyId2, sizeof(keyId2));
+            keyStatus.mType = DrmPlugin::kKeyStatusType_Expired;
+            keyStatusList.add(keyStatus);
+
+            uint8_t keyId3[] = {'k', 'e', 'y', '3'};
+            keyStatus.mKeyId.clear();
+            keyStatus.mKeyId.appendArray(keyId3, sizeof(keyId3));
+            keyStatus.mType = DrmPlugin::kKeyStatusType_OutputNotAllowed;
+            keyStatusList.add(keyStatus);
+
+            uint8_t keyId4[] = {'k', 'e', 'y', '4'};
+            keyStatus.mKeyId.clear();
+            keyStatus.mKeyId.appendArray(keyId4, sizeof(keyId4));
+            keyStatus.mType = DrmPlugin::kKeyStatusType_StatusPending;
+            keyStatusList.add(keyStatus);
+
+            uint8_t keyId5[] = {'k', 'e', 'y', '5'};
+            keyStatus.mKeyId.clear();
+            keyStatus.mKeyId.appendArray(keyId5, sizeof(keyId5));
+            keyStatus.mType = DrmPlugin::kKeyStatusType_InternalError;
+            keyStatusList.add(keyStatus);
+
+            sendKeysChange(pSessionId, &keyStatusList, true);
         } else {
             mStringProperties.add(name, value);
         }
@@ -728,7 +799,7 @@
     ssize_t
     MockCryptoPlugin::decrypt(bool secure, const uint8_t key[16], const uint8_t iv[16],
                               Mode mode, const void *srcPtr, const SubSample *subSamples,
-                              size_t numSubSamples, void *dstPtr, AString *errorDetailMsg)
+                              size_t numSubSamples, void *dstPtr, AString * /* errorDetailMsg */)
     {
         ALOGD("MockCryptoPlugin::decrypt(secure=%d, key=%s, iv=%s, mode=%d, src=%p, "
               "subSamples=%s, dst=%p)",
@@ -757,7 +828,7 @@
     {
         String8 result;
         for (size_t i = 0; i < numSubSamples; i++) {
-            result.appendFormat("[%zu] {clear:%zu, encrypted:%zu} ", i,
+            result.appendFormat("[%zu] {clear:%u, encrypted:%u} ", i,
                                 subSamples[i].mNumBytesOfClearData,
                                 subSamples[i].mNumBytesOfEncryptedData);
         }
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
index d1d8058..d0f2ddb 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
@@ -62,7 +62,8 @@
                                Vector<uint8_t> const &initData,
                                String8 const &mimeType, KeyType keyType,
                                KeyedVector<String8, String8> const &optionalParameters,
-                               Vector<uint8_t> &request, String8 &defaultUrl);
+                               Vector<uint8_t> &request, String8 &defaultUrl,
+                               KeyRequestType *keyRequestType);
 
         status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
                                     Vector<uint8_t> const &response,
diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h
index 1254d3c..953d711 100644
--- a/include/camera/CameraMetadata.h
+++ b/include/camera/CameraMetadata.h
@@ -56,7 +56,7 @@
      * thread-safety, it simply prevents the camera_metadata_t pointer returned
      * here from being accidentally invalidated by CameraMetadata operations.
      */
-    const camera_metadata_t* getAndLock();
+    const camera_metadata_t* getAndLock() const;
 
     /**
      * Unlock the CameraMetadata for use again. After this unlock, the pointer
@@ -208,7 +208,7 @@
 
   private:
     camera_metadata_t *mBuffer;
-    bool               mLocked;
+    mutable bool       mLocked;
 
     /**
      * Check if tag has a given type
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index c6074fc..ba33ffe 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -108,6 +108,9 @@
      */
     void getSupportedPreviewFormats(Vector<int>& formats) const;
 
+    // Returns true if no keys are present
+    bool isEmpty() const;
+
     // Parameter keys to communicate between camera application and driver.
     // The access (read/write, read only, or write only) is viewed from the
     // perspective of applications, not driver.
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index f7f06bb..1b68b5f 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -25,8 +25,6 @@
 
 class ICamera;
 class ICameraClient;
-class IProCameraUser;
-class IProCameraCallbacks;
 class ICameraServiceListener;
 class ICameraDeviceUser;
 class ICameraDeviceCallbacks;
@@ -44,7 +42,6 @@
         GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION,
         GET_CAMERA_INFO,
         CONNECT,
-        CONNECT_PRO,
         CONNECT_DEVICE,
         ADD_LISTENER,
         REMOVE_LISTENER,
@@ -53,6 +50,8 @@
         GET_LEGACY_PARAMETERS,
         SUPPORTS_CAMERA_API,
         CONNECT_LEGACY,
+        SET_TORCH_MODE,
+        NOTIFY_SYSTEM_EVENT,
     };
 
     enum {
@@ -65,13 +64,34 @@
     };
 
     enum {
+        CAMERA_TYPE_BACKWARD_COMPATIBLE = 0,
+        CAMERA_TYPE_ALL = 1,
+    };
+
+    enum {
         CAMERA_HAL_API_VERSION_UNSPECIFIED = -1
-      };
+    };
+
+    /**
+     * Keep up-to-date with declarations in
+     * frameworks/base/services/core/java/com/android/server/camera/CameraService.java
+     *
+     * These event codes are intended to be used with the notifySystemEvent call.
+     */
+    enum {
+        NO_EVENT = 0,
+        USER_SWITCHED,
+    };
 
 public:
     DECLARE_META_INTERFACE(CameraService);
 
+    // Get the number of cameras that support basic color camera operation
+    // (type CAMERA_TYPE_BACKWARD_COMPATIBLE)
     virtual int32_t  getNumberOfCameras() = 0;
+    // Get the number of cameras of the specified type, one of CAMERA_TYPE_*
+    // enums
+    virtual int32_t  getNumberOfCameras(int cameraType) = 0;
     virtual status_t getCameraInfo(int cameraId,
             /*out*/
             struct CameraInfo* cameraInfo) = 0;
@@ -104,13 +124,6 @@
             /*out*/
             sp<ICamera>& device) = 0;
 
-    virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb,
-            int cameraId,
-            const String16& clientPackageName,
-            int clientUid,
-            /*out*/
-            sp<IProCameraUser>& device) = 0;
-
     virtual status_t connectDevice(
             const sp<ICameraDeviceCallbacks>& cameraCb,
             int cameraId,
@@ -142,6 +155,26 @@
             int clientUid,
             /*out*/
             sp<ICamera>& device) = 0;
+
+    /**
+     * Turn on or off a camera's torch mode. Torch mode will be turned off by
+     * camera service if the lastest client binder that turns it on dies.
+     *
+     * return values:
+     * 0:       on a successful operation.
+     * -ENOSYS: the camera device doesn't support this operation. It it returned
+     *          if and only if android.flash.into.available is false.
+     * -EBUSY:  the camera device is opened.
+     * -EINVAL: camera_id is invalid or clientBinder is NULL when enabling a
+     *          torch mode.
+     */
+    virtual status_t setTorchMode(const String16& cameraId, bool enabled,
+            const sp<IBinder>& clientBinder) = 0;
+
+    /**
+     * Notify the camera service of a system event.  Should only be called from system_server.
+     */
+    virtual void notifySystemEvent(int32_t eventId, const int32_t* args, size_t length) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h
index 0a0e43a..709ff31 100644
--- a/include/camera/ICameraServiceListener.h
+++ b/include/camera/ICameraServiceListener.h
@@ -66,9 +66,35 @@
         STATUS_UNKNOWN          = 0xFFFFFFFF,
     };
 
+    /**
+     * The torch mode status of a camera.
+     *
+     * Initial status will be transmitted with onTorchStatusChanged immediately
+     * after this listener is added to the service listener list.
+     *
+     * The enums should be set to values matching
+     * include/hardware/camera_common.h
+     */
+    enum TorchStatus {
+        // The camera's torch mode has become not available to use via
+        // setTorchMode().
+        TORCH_STATUS_NOT_AVAILABLE  = TORCH_MODE_STATUS_NOT_AVAILABLE,
+        // The camera's torch mode is off and available to be turned on via
+        // setTorchMode().
+        TORCH_STATUS_AVAILABLE_OFF  = TORCH_MODE_STATUS_AVAILABLE_OFF,
+        // The camera's torch mode is on and available to be turned off via
+        // setTorchMode().
+        TORCH_STATUS_AVAILABLE_ON   = TORCH_MODE_STATUS_AVAILABLE_ON,
+
+        // Use to initialize variables only
+        TORCH_STATUS_UNKNOWN        = 0xFFFFFFFF,
+    };
+
     DECLARE_META_INTERFACE(CameraServiceListener);
 
     virtual void onStatusChanged(Status status, int32_t cameraId) = 0;
+
+    virtual void onTorchStatusChanged(TorchStatus status, const String16& cameraId) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/ICameraServiceProxy.h b/include/camera/ICameraServiceProxy.h
new file mode 100644
index 0000000..12a555f
--- /dev/null
+++ b/include/camera/ICameraServiceProxy.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+#define ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class ICameraServiceProxy : public IInterface {
+public:
+    enum {
+        PING_FOR_USER_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+    };
+
+    DECLARE_META_INTERFACE(CameraServiceProxy);
+
+    virtual void pingForUserUpdate() = 0;
+};
+
+class BnCameraServiceProxy: public BnInterface<ICameraServiceProxy>
+{
+public:
+    virtual status_t    onTransact( uint32_t code,
+                                    const Parcel& data,
+                                    Parcel* reply,
+                                    uint32_t flags = 0);
+};
+
+
+
+}; // namespace android
+
+#endif // ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+
+
diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h
deleted file mode 100644
index e8abb89..0000000
--- a/include/camera/IProCameraCallbacks.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H
-#define ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-#include <utils/Timers.h>
-#include <system/camera.h>
-
-struct camera_metadata;
-
-namespace android {
-
-class IProCameraCallbacks : public IInterface
-{
-    /**
-     * Keep up-to-date with IProCameraCallbacks.aidl in frameworks/base
-     */
-public:
-    DECLARE_META_INTERFACE(ProCameraCallbacks);
-
-    virtual void            notifyCallback(int32_t msgType,
-                                           int32_t ext1,
-                                           int32_t ext2) = 0;
-
-    enum LockStatus {
-        LOCK_ACQUIRED,
-        LOCK_RELEASED,
-        LOCK_STOLEN,
-    };
-
-    virtual void            onLockStatusChanged(LockStatus newLockStatus) = 0;
-
-    /** Missing by design: implementation is client-side in ProCamera.cpp **/
-    // virtual void onBufferReceived(int streamId,
-    //                               const CpuConsumer::LockedBufer& buf);
-    virtual void            onResultReceived(int32_t requestId,
-                                             camera_metadata* result) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnProCameraCallbacks : public BnInterface<IProCameraCallbacks>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h
deleted file mode 100644
index 2ccc4d2..0000000
--- a/include/camera/IProCameraUser.h
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_IPROCAMERAUSER_H
-#define ANDROID_HARDWARE_IPROCAMERAUSER_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-#include <utils/String8.h>
-#include <camera/IProCameraCallbacks.h>
-
-struct camera_metadata;
-
-namespace android {
-
-class IProCameraUserClient;
-class IGraphicBufferProducer;
-class Surface;
-
-class IProCameraUser: public IInterface
-{
-    /**
-     * Keep up-to-date with IProCameraUser.aidl in frameworks/base
-     */
-public:
-    DECLARE_META_INTERFACE(ProCameraUser);
-
-    virtual void            disconnect() = 0;
-
-    // connect to the service, given a callbacks listener
-    virtual status_t        connect(const sp<IProCameraCallbacks>& callbacks)
-                                                                            = 0;
-
-    /**
-     * Locking
-     **/
-    virtual status_t        exclusiveTryLock() = 0;
-    virtual status_t        exclusiveLock() = 0;
-    virtual status_t        exclusiveUnlock() = 0;
-
-    virtual bool            hasExclusiveLock() = 0;
-
-    /**
-     * Request Handling
-     **/
-
-    // Note that the callee gets a copy of the metadata.
-    virtual int             submitRequest(struct camera_metadata* metadata,
-                                          bool streaming = false) = 0;
-    virtual status_t        cancelRequest(int requestId) = 0;
-
-    virtual status_t        deleteStream(int streamId) = 0;
-    virtual status_t        createStream(
-                                      int width, int height, int format,
-                                      const sp<IGraphicBufferProducer>& bufferProducer,
-                                      /*out*/
-                                      int* streamId) = 0;
-
-    // Create a request object from a template.
-    virtual status_t        createDefaultRequest(int templateId,
-                                                 /*out*/
-                                                 camera_metadata** request)
-                                                                           = 0;
-
-    // Get static camera metadata
-    virtual status_t        getCameraInfo(int cameraId,
-                                          /*out*/
-                                          camera_metadata** info) = 0;
-
-};
-
-// ----------------------------------------------------------------------------
-
-class BnProCameraUser: public BnInterface<IProCameraUser>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h
deleted file mode 100644
index e9b687a..0000000
--- a/include/camera/ProCamera.h
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_PRO_CAMERA_H
-#define ANDROID_HARDWARE_PRO_CAMERA_H
-
-#include <utils/Timers.h>
-#include <utils/KeyedVector.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <system/camera.h>
-#include <camera/IProCameraCallbacks.h>
-#include <camera/IProCameraUser.h>
-#include <camera/Camera.h>
-#include <camera/CameraMetadata.h>
-#include <camera/ICameraService.h>
-#include <gui/CpuConsumer.h>
-
-#include <gui/Surface.h>
-
-#include <utils/Condition.h>
-#include <utils/Mutex.h>
-
-#include <camera/CameraBase.h>
-
-struct camera_metadata;
-
-namespace android {
-
-// All callbacks on this class are concurrent
-// (they come from separate threads)
-class ProCameraListener : virtual public RefBase
-{
-public:
-    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
-
-    // Lock has been acquired. Write operations now available.
-    virtual void onLockAcquired() = 0;
-    // Lock has been released with exclusiveUnlock.
-    virtual void onLockReleased() = 0;
-    // Lock has been stolen by another client.
-    virtual void onLockStolen() = 0;
-
-    // Lock free.
-    virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2)
-                                                                            = 0;
-    // onFrameAvailable and OnResultReceived can come in with any order,
-    // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them
-
-    /**
-      * A new metadata buffer has been received.
-      * -- Ownership of request passes on to the callee, free with
-      *    free_camera_metadata.
-      */
-    virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0;
-
-    // TODO: make onFrameAvailable pure virtual
-
-    // A new frame buffer has been received for this stream.
-    // -- This callback only fires for createStreamCpu streams
-    // -- A buffer may be obtained by calling cpuConsumer->lockNextBuffer
-    // -- Use buf.timestamp to correlate with result's android.sensor.timestamp
-    // -- The buffer should be accessed with CpuConsumer::lockNextBuffer
-    //      and CpuConsumer::unlockBuffer
-    virtual void onFrameAvailable(int /*streamId*/,
-                                  const sp<CpuConsumer>& /*cpuConsumer*/) {
-    }
-
-};
-
-class ProCamera;
-
-template <>
-struct CameraTraits<ProCamera>
-{
-    typedef ProCameraListener     TCamListener;
-    typedef IProCameraUser        TCamUser;
-    typedef IProCameraCallbacks   TCamCallbacks;
-    typedef status_t (ICameraService::*TCamConnectService)(const sp<IProCameraCallbacks>&,
-                                                           int, const String16&, int,
-                                                           /*out*/
-                                                           sp<IProCameraUser>&);
-    static TCamConnectService     fnConnectService;
-};
-
-
-class ProCamera :
-    public CameraBase<ProCamera>,
-    public BnProCameraCallbacks
-{
-public:
-    /**
-     * Connect a shared camera. By default access is restricted to read only
-     * (Lock free) operations. To be able to submit custom requests a lock needs
-     * to be acquired with exclusive[Try]Lock.
-     */
-    static sp<ProCamera> connect(int cameraId);
-    virtual ~ProCamera();
-
-    /**
-     * Exclusive Locks:
-     * - We may request exclusive access to a camera if no other
-     *   clients are using the camera. This works as a traditional
-     *   client, writing/reading any camera state.
-     * - An application opening the camera (a regular 'Camera') will
-     *   always steal away the exclusive lock from a ProCamera,
-     *   this will call onLockReleased.
-     * - onLockAcquired will be called again once it is possible
-     *   to again exclusively lock the camera.
-     *
-     */
-
-    /**
-     * All exclusiveLock/unlock functions are asynchronous. The remote endpoint
-     * shall not block while waiting to acquire the lock. Instead the lock
-     * notifications will come in asynchronously on the listener.
-     */
-
-    /**
-      * Attempt to acquire the lock instantly (non-blocking)
-      * - If this succeeds, you do not need to wait for onLockAcquired
-      *   but the event will still be fired
-      *
-      * Returns -EBUSY if already locked. 0 on success.
-      */
-    status_t exclusiveTryLock();
-    // always returns 0. wait for onLockAcquired before lock is acquired.
-    status_t exclusiveLock();
-    // release a lock if we have one, or cancel the lock request.
-    status_t exclusiveUnlock();
-
-    // exclusive lock = do whatever we want. no lock = read only.
-    bool hasExclusiveLock();
-
-    /**
-     * < 0 error, >= 0 the request ID. streaming to have the request repeat
-     *    until cancelled.
-     * The request queue is flushed when a lock is released or stolen
-     *    if not locked will return PERMISSION_DENIED
-     */
-    int submitRequest(const struct camera_metadata* metadata,
-                                                        bool streaming = false);
-    // if not locked will return PERMISSION_DENIED, BAD_VALUE if requestId bad
-    status_t cancelRequest(int requestId);
-
-    /**
-     * Ask for a stream to be enabled.
-     * Lock free. Service maintains counter of streams.
-     */
-    status_t requestStream(int streamId);
-// TODO: remove requestStream, its useless.
-
-    /**
-      * Delete a stream.
-      * Lock free.
-      *
-      * NOTE: As a side effect this cancels ALL streaming requests.
-      *
-      * Errors: BAD_VALUE if unknown stream ID.
-      *         PERMISSION_DENIED if the stream wasn't yours
-      */
-    status_t deleteStream(int streamId);
-
-    /**
-      * Create a new HW stream, whose sink will be the window.
-      * Lock free. Service maintains counter of streams.
-      * Errors: -EBUSY if too many streams created
-      */
-    status_t createStream(int width, int height, int format,
-                          const sp<Surface>& surface,
-                          /*out*/
-                          int* streamId);
-
-    /**
-      * Create a new HW stream, whose sink will be the SurfaceTexture.
-      * Lock free. Service maintains counter of streams.
-      * Errors: -EBUSY if too many streams created
-      */
-    status_t createStream(int width, int height, int format,
-                          const sp<IGraphicBufferProducer>& bufferProducer,
-                          /*out*/
-                          int* streamId);
-    status_t createStreamCpu(int width, int height, int format,
-                          int heapCount,
-                          /*out*/
-                          sp<CpuConsumer>* cpuConsumer,
-                          int* streamId);
-    status_t createStreamCpu(int width, int height, int format,
-                          int heapCount,
-                          bool synchronousMode,
-                          /*out*/
-                          sp<CpuConsumer>* cpuConsumer,
-                          int* streamId);
-
-    // Create a request object from a template.
-    status_t createDefaultRequest(int templateId,
-                                 /*out*/
-                                  camera_metadata** request) const;
-
-    // Get static camera metadata
-    camera_metadata* getCameraInfo(int cameraId);
-
-    // Blocks until a frame is available (CPU streams only)
-    // - Obtain the frame data by calling CpuConsumer::lockNextBuffer
-    // - Release the frame data after use with CpuConsumer::unlockBuffer
-    // Return value:
-    // - >0 - number of frames available to be locked
-    // - <0 - error (refer to error codes)
-    // Error codes:
-    // -ETIMEDOUT if it took too long to get a frame
-    int waitForFrameBuffer(int streamId);
-
-    // Blocks until a metadata result is available
-    // - Obtain the metadata by calling consumeFrameMetadata()
-    // Error codes:
-    // -ETIMEDOUT if it took too long to get a frame
-    status_t waitForFrameMetadata();
-
-    // Get the latest metadata. This is destructive.
-    // - Calling this repeatedly will produce empty metadata objects.
-    // - Use waitForFrameMetadata to sync until new data is available.
-    CameraMetadata consumeFrameMetadata();
-
-    // Convenience method to drop frame buffers (CPU streams only)
-    // Return values:
-    //  >=0 - number of frames dropped (up to count)
-    //  <0  - error code
-    // Error codes:
-    //   BAD_VALUE - invalid streamId or count passed
-    int dropFrameBuffer(int streamId, int count);
-
-protected:
-    ////////////////////////////////////////////////////////
-    // IProCameraCallbacks implementation
-    ////////////////////////////////////////////////////////
-    virtual void        notifyCallback(int32_t msgType,
-                                       int32_t ext,
-                                       int32_t ext2);
-
-    virtual void        onLockStatusChanged(
-                                IProCameraCallbacks::LockStatus newLockStatus);
-
-    virtual void        onResultReceived(int32_t requestId,
-                                         camera_metadata* result);
-private:
-    ProCamera(int cameraId);
-
-    class ProFrameListener : public CpuConsumer::FrameAvailableListener {
-    public:
-        ProFrameListener(wp<ProCamera> camera, int streamID) {
-            mCamera = camera;
-            mStreamId = streamID;
-        }
-
-    protected:
-        virtual void onFrameAvailable(const BufferItem& /* item */) {
-            sp<ProCamera> c = mCamera.promote();
-            if (c.get() != NULL) {
-                c->onFrameAvailable(mStreamId);
-            }
-        }
-
-    private:
-        wp<ProCamera> mCamera;
-        int mStreamId;
-    };
-    friend class ProFrameListener;
-
-    struct StreamInfo
-    {
-        StreamInfo(int streamId) {
-            this->streamID = streamId;
-            cpuStream = false;
-            frameReady = 0;
-        }
-
-        StreamInfo() {
-            streamID = -1;
-            cpuStream = false;
-        }
-
-        int  streamID;
-        bool cpuStream;
-        sp<CpuConsumer> cpuConsumer;
-        bool synchronousMode;
-        sp<ProFrameListener> frameAvailableListener;
-        sp<Surface> stc;
-        int frameReady;
-    };
-
-    Condition mWaitCondition;
-    Mutex     mWaitMutex;
-    static const nsecs_t mWaitTimeout = 1000000000; // 1sec
-    KeyedVector<int, StreamInfo> mStreams;
-    bool mMetadataReady;
-    CameraMetadata mLatestMetadata;
-
-    void onFrameAvailable(int streamId);
-
-    StreamInfo& getStreamInfo(int streamId);
-
-    friend class CameraBase;
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/camera2/CaptureRequest.h b/include/camera/camera2/CaptureRequest.h
index e56d61f..eeab217 100644
--- a/include/camera/camera2/CaptureRequest.h
+++ b/include/camera/camera2/CaptureRequest.h
@@ -30,6 +30,7 @@
 
     CameraMetadata          mMetadata;
     Vector<sp<Surface> >    mSurfaceList;
+    bool                    mIsReprocess;
 
     /**
      * Keep impl up-to-date with CaptureRequest.java in frameworks/base
diff --git a/include/camera/camera2/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h
index 670480b..c57b39f 100644
--- a/include/camera/camera2/ICameraDeviceCallbacks.h
+++ b/include/camera/camera2/ICameraDeviceCallbacks.h
@@ -65,6 +65,9 @@
     // One way
     virtual void            onResultReceived(const CameraMetadata& metadata,
                                              const CaptureResultExtras& resultExtras) = 0;
+
+    // One way
+    virtual void            onPrepared(int streamId) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index 35488bb..a7bf8ab 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -27,9 +27,9 @@
 
 class ICameraDeviceUserClient;
 class IGraphicBufferProducer;
-class Surface;
 class CaptureRequest;
 class CameraMetadata;
+class OutputConfiguration;
 
 enum {
     NO_IN_FLIGHT_REPEATING_FRAMES = -1,
@@ -97,12 +97,24 @@
      * must be called before any requests can be submitted.
      * <p>
      */
-    virtual status_t        endConfigure() = 0;
+    virtual status_t        endConfigure(bool isConstrainedHighSpeed = false) = 0;
 
     virtual status_t        deleteStream(int streamId) = 0;
-    virtual status_t        createStream(
-            int width, int height, int format,
-            const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+
+    virtual status_t        createStream(const OutputConfiguration& outputConfiguration) = 0;
+
+    /**
+     * Create an input stream of width, height, and format (one of
+     * HAL_PIXEL_FORMAT_*)
+     *
+     * Return stream ID if it's a non-negative value. status_t if it's a
+     * negative value.
+     */
+    virtual status_t        createInputStream(int width, int height, int format) = 0;
+
+    // get the buffer producer of the input stream
+    virtual status_t        getInputBufferProducer(
+            sp<IGraphicBufferProducer> *producer) = 0;
 
     // Create a request object from a template.
     virtual status_t        createDefaultRequest(int templateId,
@@ -121,6 +133,17 @@
      */
     virtual status_t        flush(/*out*/
                                   int64_t* lastFrameNumber = NULL) = 0;
+
+    /**
+     * Preallocate buffers for a given output stream asynchronously.
+     */
+    virtual status_t        prepare(int streamId) = 0;
+
+    /**
+     * Free all unused buffers for a given output stream.
+     */
+    virtual status_t        tearDown(int streamId) = 0;
+
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/camera2/OutputConfiguration.h b/include/camera/camera2/OutputConfiguration.h
new file mode 100644
index 0000000..5bcbe15
--- /dev/null
+++ b/include/camera/camera2/OutputConfiguration.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H
+#define ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H
+
+#include <utils/RefBase.h>
+#include <gui/IGraphicBufferProducer.h>
+
+namespace android {
+
+class Surface;
+
+class OutputConfiguration : public virtual RefBase {
+public:
+
+    static const int INVALID_ROTATION;
+    sp<IGraphicBufferProducer> getGraphicBufferProducer() const;
+    int                        getRotation() const;
+
+    /**
+     * Keep impl up-to-date with OutputConfiguration.java in frameworks/base
+     */
+    status_t                   writeToParcel(Parcel& parcel) const;
+    // getGraphicBufferProducer will be NULL if error occurred
+    // getRotation will be INVALID_ROTATION if error occurred
+    OutputConfiguration(const Parcel& parcel);
+
+    OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation);
+
+private:
+    sp<IGraphicBufferProducer> mGbp;
+    int                        mRotation;
+
+    // helper function
+    static String16 readMaybeEmptyString16(const Parcel& parcel);
+};
+}; // namespace android
+
+#endif
diff --git a/include/media/AVSyncSettings.h b/include/media/AVSyncSettings.h
new file mode 100644
index 0000000..10e3bcc
--- /dev/null
+++ b/include/media/AVSyncSettings.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AV_SYNC_SETTINGS_H
+#define ANDROID_AV_SYNC_SETTINGS_H
+
+namespace android {
+
+enum AVSyncSource : unsigned {
+    // let the system decide the best sync source
+    AVSYNC_SOURCE_DEFAULT = 0,
+    // sync to the system clock
+    AVSYNC_SOURCE_SYSTEM_CLOCK = 1,
+    // sync to the audio track
+    AVSYNC_SOURCE_AUDIO = 2,
+    // sync to the display vsync
+    AVSYNC_SOURCE_VSYNC = 3,
+    AVSYNC_SOURCE_MAX,
+};
+
+enum AVSyncAudioAdjustMode : unsigned {
+    // let the system decide the best audio adjust mode
+    AVSYNC_AUDIO_ADJUST_MODE_DEFAULT = 0,
+    // adjust audio by time stretching
+    AVSYNC_AUDIO_ADJUST_MODE_STRETCH = 1,
+    // adjust audio by resampling
+    AVSYNC_AUDIO_ADJUST_MODE_RESAMPLE = 2,
+    AVSYNC_AUDIO_ADJUST_MODE_MAX,
+};
+
+// max tolerance when adjusting playback speed to desired playback speed
+#define AVSYNC_TOLERANCE_MAX 1.0f
+
+struct AVSyncSettings {
+    AVSyncSource mSource;
+    AVSyncAudioAdjustMode mAudioAdjustMode;
+    float mTolerance;
+    AVSyncSettings()
+        : mSource(AVSYNC_SOURCE_DEFAULT),
+          mAudioAdjustMode(AVSYNC_AUDIO_ADJUST_MODE_DEFAULT),
+          mTolerance(.044f) { }
+};
+
+} // namespace android
+
+// ---------------------------------------------------------------------------
+
+#endif // ANDROID_AV_SYNC_SETTINGS_H
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 583695d..5af6c10 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -201,8 +201,12 @@
      */
 
     /* Simple Constructor.
+     *
+     * Parameters:
+     *
+     * opPackageName:      The package name used for app op checks.
      */
-    AudioEffect();
+    AudioEffect(const String16& opPackageName);
 
 
     /* Constructor.
@@ -211,6 +215,7 @@
      *
      * type:  type of effect created: can be null if uuid is specified. This corresponds to
      *        the OpenSL ES interface implemented by this effect.
+     * opPackageName:  The package name used for app op checks.
      * uuid:  Uuid of effect created: can be null if type is specified. This uuid corresponds to
      *        a particular implementation of an effect type.
      * priority:    requested priority for effect control: the priority level corresponds to the
@@ -227,6 +232,7 @@
      */
 
     AudioEffect(const effect_uuid_t *type,
+                const String16& opPackageName,
                 const effect_uuid_t *uuid = NULL,
                   int32_t priority = 0,
                   effect_callback_t cbf = NULL,
@@ -239,6 +245,7 @@
      *      Same as above but with type and uuid specified by character strings
      */
     AudioEffect(const char *typeStr,
+                    const String16& opPackageName,
                     const char *uuidStr = NULL,
                     int32_t priority = 0,
                     effect_callback_t cbf = NULL,
@@ -406,7 +413,9 @@
      void*                   mUserData;          // client context for callback function
      effect_descriptor_t     mDescriptor;        // effect descriptor
      int32_t                 mId;                // system wide unique effect engine instance ID
-     Mutex                   mLock;               // Mutex for mEnabled access
+     Mutex                   mLock;              // Mutex for mEnabled access
+
+     String16                mOpPackageName;     // The package name used for app op checks.
 
      // IEffectClient
      virtual void controlStatusChanged(bool controlGranted);
@@ -420,7 +429,8 @@
 private:
 
      // Implements the IEffectClient interface
-    class EffectClient : public android::BnEffectClient,  public android::IBinder::DeathRecipient
+    class EffectClient :
+        public android::BnEffectClient, public android::IBinder::DeathRecipient
     {
     public:
 
@@ -428,24 +438,39 @@
 
         // IEffectClient
         virtual void controlStatusChanged(bool controlGranted) {
-            mEffect->controlStatusChanged(controlGranted);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->controlStatusChanged(controlGranted);
+            }
         }
         virtual void enableStatusChanged(bool enabled) {
-            mEffect->enableStatusChanged(enabled);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->enableStatusChanged(enabled);
+            }
         }
         virtual void commandExecuted(uint32_t cmdCode,
                                      uint32_t cmdSize,
                                      void *pCmdData,
                                      uint32_t replySize,
                                      void *pReplyData) {
-            mEffect->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->commandExecuted(
+                    cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+            }
         }
 
         // IBinder::DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who) {mEffect->binderDied();}
+        virtual void binderDied(const wp<IBinder>& who) {
+            sp<AudioEffect> effect = mEffect.promote();
+            if (effect != 0) {
+                effect->binderDied();
+            }
+        }
 
     private:
-        AudioEffect *mEffect;
+        wp<AudioEffect> mEffect;
     };
 
     void binderDied();
diff --git a/include/media/AudioIoDescriptor.h b/include/media/AudioIoDescriptor.h
new file mode 100644
index 0000000..c94b738
--- /dev/null
+++ b/include/media/AudioIoDescriptor.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_IO_DESCRIPTOR_H
+#define ANDROID_AUDIO_IO_DESCRIPTOR_H
+
+namespace android {
+
+enum audio_io_config_event {
+    AUDIO_OUTPUT_OPENED,
+    AUDIO_OUTPUT_CLOSED,
+    AUDIO_OUTPUT_CONFIG_CHANGED,
+    AUDIO_INPUT_OPENED,
+    AUDIO_INPUT_CLOSED,
+    AUDIO_INPUT_CONFIG_CHANGED,
+};
+
+// audio input/output descriptor used to cache output configurations in client process to avoid
+// frequent calls through IAudioFlinger
+class AudioIoDescriptor : public RefBase {
+public:
+    AudioIoDescriptor() :
+        mIoHandle(AUDIO_IO_HANDLE_NONE),
+        mSamplingRate(0), mFormat(AUDIO_FORMAT_DEFAULT), mChannelMask(AUDIO_CHANNEL_NONE),
+        mFrameCount(0), mLatency(0)
+    {
+        memset(&mPatch, 0, sizeof(struct audio_patch));
+    }
+
+    virtual ~AudioIoDescriptor() {}
+
+    audio_port_handle_t getDeviceId() {
+        if (mPatch.num_sources != 0 && mPatch.num_sinks != 0) {
+            if (mPatch.sources[0].type == AUDIO_PORT_TYPE_MIX) {
+                // this is an output mix
+                // FIXME: the API only returns the first device in case of multiple device selection
+                return mPatch.sinks[0].id;
+            } else {
+                // this is an input mix
+                return mPatch.sources[0].id;
+            }
+        }
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+
+    audio_io_handle_t mIoHandle;
+    struct audio_patch mPatch;
+    uint32_t mSamplingRate;
+    audio_format_t mFormat;
+    audio_channel_mask_t mChannelMask;
+    size_t mFrameCount;
+    uint32_t mLatency;
+};
+
+
+};  // namespace android
+
+#endif  /*ANDROID_AUDIO_IO_DESCRIPTOR_H*/
diff --git a/include/media/AudioPolicy.h b/include/media/AudioPolicy.h
index a755e1e..feed402 100644
--- a/include/media/AudioPolicy.h
+++ b/include/media/AudioPolicy.h
@@ -38,8 +38,17 @@
 #define MIX_TYPE_PLAYERS 0
 #define MIX_TYPE_RECORDERS 1
 
-#define ROUTE_FLAG_RENDER 0x1
-#define ROUTE_FLAG_LOOP_BACK (0x1 << 1)
+// definition of the different events that can be reported on a dynamic policy from
+//   AudioSystem's implementation of the AudioPolicyClient interface
+// keep in sync with AudioSystem.java
+#define DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE 0
+
+#define MIX_STATE_DISABLED -1
+#define MIX_STATE_IDLE 0
+#define MIX_STATE_MIXING 1
+
+#define MIX_ROUTE_FLAG_RENDER 0x1
+#define MIX_ROUTE_FLAG_LOOP_BACK (0x1 << 1)
 
 #define MAX_MIXES_PER_POLICY 10
 #define MAX_CRITERIA_PER_MIX 20
@@ -61,11 +70,15 @@
 
 class AudioMix {
 public:
+    // flag on an AudioMix indicating the activity on this mix (IDLE, MIXING)
+    //   must be reported through the AudioPolicyClient interface
+    static const uint32_t kCbFlagNotifyActivity = 0x1;
+
     AudioMix() {}
     AudioMix(Vector<AttributeMatchCriterion> criteria, uint32_t mixType, audio_config_t format,
-             uint32_t routeFlags, String8 registrationId) :
+             uint32_t routeFlags, String8 registrationId, uint32_t flags) :
         mCriteria(criteria), mMixType(mixType), mFormat(format),
-        mRouteFlags(routeFlags), mRegistrationId(registrationId) {}
+        mRouteFlags(routeFlags), mRegistrationId(registrationId), mCbFlags(flags){}
 
     status_t readFromParcel(Parcel *parcel);
     status_t writeToParcel(Parcel *parcel) const;
@@ -75,6 +88,7 @@
     audio_config_t  mFormat;
     uint32_t        mRouteFlags;
     String8         mRegistrationId;
+    uint32_t        mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
 };
 
 }; // namespace android
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index f70d981..c4c7b0e 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -42,8 +42,7 @@
         EVENT_MORE_DATA = 0,        // Request to read available data from buffer.
                                     // If this event is delivered but the callback handler
                                     // does not want to read the available data, the handler must
-                                    // explicitly
-                                    // ignore the event by setting frameCount to zero.
+                                    // explicitly ignore the event by setting frameCount to zero.
         EVENT_OVERRUN = 1,          // Buffer overrun occurred.
         EVENT_MARKER = 2,           // Record head is at the specified marker position
                                     // (See setMarkerPosition()).
@@ -53,7 +52,7 @@
                                     // voluntary invalidation by mediaserver, or mediaserver crash.
     };
 
-    /* Client should declare Buffer on the stack and pass address to obtainBuffer()
+    /* Client should declare a Buffer and pass address to obtainBuffer()
      * and releaseBuffer().  See also callback_t for EVENT_MORE_DATA.
      */
 
@@ -62,20 +61,25 @@
     public:
         // FIXME use m prefix
         size_t      frameCount;     // number of sample frames corresponding to size;
-                                    // on input it is the number of frames available,
-                                    // on output is the number of frames actually drained
-                                    // (currently ignored but will make the primary field in future)
+                                    // on input to obtainBuffer() it is the number of frames desired
+                                    // on output from obtainBuffer() it is the number of available
+                                    //    frames to be read
+                                    // on input to releaseBuffer() it is currently ignored
 
         size_t      size;           // input/output in bytes == frameCount * frameSize
-                                    // on output is the number of bytes actually drained
-                                    // FIXME this is redundant with respect to frameCount,
-                                    // and TRANSFER_OBTAIN mode is broken for 8-bit data
-                                    // since we don't define the frame format
+                                    // on input to obtainBuffer() it is ignored
+                                    // on output from obtainBuffer() it is the number of available
+                                    //    bytes to be read, which is frameCount * frameSize
+                                    // on input to releaseBuffer() it is the number of bytes to
+                                    //    release
+                                    // FIXME This is redundant with respect to frameCount.  Consider
+                                    //    removing size and making frameCount the primary field.
 
         union {
             void*       raw;
             short*      i16;        // signed 16-bit
             int8_t*     i8;         // unsigned 8-bit, offset by 0x80
+                                    // input to obtainBuffer(): unused, output: pointer to buffer
         };
     };
 
@@ -88,8 +92,8 @@
      * user:    Pointer to context for use by the callback receiver.
      * info:    Pointer to optional parameter according to event type:
      *          - EVENT_MORE_DATA: pointer to AudioRecord::Buffer struct. The callback must not read
-     *            more bytes than indicated by 'size' field and update 'size' if fewer bytes are
-     *            consumed.
+     *                             more bytes than indicated by 'size' field and update 'size' if
+     *                             fewer bytes are consumed.
      *          - EVENT_OVERRUN: unused.
      *          - EVENT_MARKER: pointer to const uint32_t containing the marker position in frames.
      *          - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames.
@@ -106,6 +110,7 @@
      *  - BAD_VALUE: unsupported configuration
      * frameCount is guaranteed to be non-zero if status is NO_ERROR,
      * and is undefined otherwise.
+     * FIXME This API assumes a route, and so should be deprecated.
      */
 
      static status_t getMinFrameCount(size_t* frameCount,
@@ -118,14 +123,18 @@
     enum transfer_type {
         TRANSFER_DEFAULT,   // not specified explicitly; determine from the other parameters
         TRANSFER_CALLBACK,  // callback EVENT_MORE_DATA
-        TRANSFER_OBTAIN,    // FIXME deprecated: call obtainBuffer() and releaseBuffer()
+        TRANSFER_OBTAIN,    // call obtainBuffer() and releaseBuffer()
         TRANSFER_SYNC,      // synchronous read()
     };
 
     /* Constructs an uninitialized AudioRecord. No connection with
      * AudioFlinger takes place.  Use set() after this.
+     *
+     * Parameters:
+     *
+     * opPackageName:      The package name used for app ops.
      */
-                        AudioRecord();
+                        AudioRecord(const String16& opPackageName);
 
     /* Creates an AudioRecord object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -138,27 +147,30 @@
      * format:             Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
      *                     16 bits per sample).
      * channelMask:        Channel mask, such that audio_is_input_channel(channelMask) is true.
+     * opPackageName:      The package name used for app ops.
      * frameCount:         Minimum size of track PCM buffer in frames. This defines the
      *                     application's contribution to the
      *                     latency of the track.  The actual size selected by the AudioRecord could
      *                     be larger if the requested size is not compatible with current audio HAL
      *                     latency.  Zero means to use a default value.
      * cbf:                Callback function. If not null, this function is called periodically
-     *                     to consume new data and inform of marker, position updates, etc.
+     *                     to consume new data in TRANSFER_CALLBACK mode
+     *                     and inform of marker, position updates, etc.
      * user:               Context for use by the callback receiver.
      * notificationFrames: The callback function is called each time notificationFrames PCM
      *                     frames are ready in record track output buffer.
      * sessionId:          Not yet supported.
      * transferType:       How data is transferred from AudioRecord.
      * flags:              See comments on audio_input_flags_t in <system/audio.h>
+     * pAttributes:        If not NULL, supersedes inputSource for use case selection.
      * threadCanCallJava:  Not present in parameter list, and so is fixed at false.
-     * pAttributes:        if not NULL, supersedes inputSource for use case selection
      */
 
                         AudioRecord(audio_source_t inputSource,
                                     uint32_t sampleRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
+                                    const String16& opPackageName,
                                     size_t frameCount = 0,
                                     callback_t cbf = NULL,
                                     void* user = NULL,
@@ -166,6 +178,8 @@
                                     int sessionId = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                                    int uid = -1,
+                                    pid_t pid = -1,
                                     const audio_attributes_t* pAttributes = NULL);
 
     /* Terminates the AudioRecord and unregisters it from AudioFlinger.
@@ -177,6 +191,7 @@
 
     /* Initialize an AudioRecord that was created using the AudioRecord() constructor.
      * Don't call set() more than once, or after an AudioRecord() constructor that takes parameters.
+     * set() is not multi-thread safe.
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful intialization
      *  - INVALID_OPERATION: AudioRecord is already initialized or record device is already in use
@@ -201,6 +216,8 @@
                             int sessionId = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
                             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                            int uid = -1,
+                            pid_t pid = -1,
                             const audio_attributes_t* pAttributes = NULL);
 
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
@@ -211,7 +228,7 @@
             status_t    initCheck() const   { return mStatus; }
 
     /* Returns this track's estimated latency in milliseconds.
-     * This includes the latency due to AudioRecord buffer size,
+     * This includes the latency due to AudioRecord buffer size, resampling if applicable,
      * and audio hardware driver.
      */
             uint32_t    latency() const     { return mLatency; }
@@ -243,11 +260,6 @@
      */
             uint32_t    getSampleRate() const   { return mSampleRate; }
 
-    /* Return the notification frame count.
-     * This is approximately how often the callback is invoked, for transfer type TRANSFER_CALLBACK.
-     */
-            size_t      notificationFrames() const  { return mNotificationFramesAct; }
-
     /* Sets marker position. When record reaches the number of frames specified,
      * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
      * with marker == 0 cancels marker notification callback.
@@ -309,7 +321,12 @@
      * Returned value:
      *  handle on audio hardware input
      */
-            audio_io_handle_t    getInput() const;
+// FIXME The only known public caller is frameworks/opt/net/voip/src/jni/rtp/AudioGroup.cpp
+            audio_io_handle_t    getInput() const __attribute__((__deprecated__))
+                                                { return getInputPrivate(); }
+private:
+            audio_io_handle_t    getInputPrivate() const;
+public:
 
     /* Returns the audio session ID associated with this AudioRecord.
      *
@@ -323,10 +340,18 @@
      */
             int    getSessionId() const { return mSessionId; }
 
-    /* Obtains a buffer of up to "audioBuffer->frameCount" full frames.
+    /* Public API for TRANSFER_OBTAIN mode.
+     * Obtains a buffer of up to "audioBuffer->frameCount" full frames.
      * After draining these frames of data, the caller should release them with releaseBuffer().
      * If the track buffer is not empty, obtainBuffer() returns as many contiguous
      * full frames as are available immediately.
+     *
+     * If nonContig is non-NULL, it is an output parameter that will be set to the number of
+     * additional non-contiguous frames that are predicted to be available immediately,
+     * if the client were to release the first frames and then call obtainBuffer() again.
+     * This value is only a prediction, and needs to be confirmed.
+     * It will be set to zero for an error return.
+     *
      * If the track buffer is empty and track is stopped, obtainBuffer() returns WOULD_BLOCK
      * regardless of the value of waitCount.
      * If the track buffer is empty and track is not stopped, obtainBuffer() blocks with a
@@ -336,9 +361,6 @@
      * or return WOULD_BLOCK depending on the value of the "waitCount"
      * parameter.
      *
-     * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications,
-     * which should use read() or callback EVENT_MORE_DATA instead.
-     *
      * Interpretation of waitCount:
      *  +n  limits wait time to n * WAIT_PERIOD_MS,
      *  -1  causes an (almost) infinite wait time,
@@ -347,6 +369,8 @@
      * Buffer fields
      * On entry:
      *  frameCount  number of frames requested
+     *  size        ignored
+     *  raw         ignored
      * After error return:
      *  frameCount  0
      *  size        0
@@ -357,13 +381,58 @@
      *  raw         pointer to the buffer
      */
 
-    /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */
-            status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
-                                __attribute__((__deprecated__));
+            status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
+                                size_t *nonContig = NULL);
+
+            // Explicit Routing
+    /**
+     * TODO Document this method.
+     */
+            status_t setInputDevice(audio_port_handle_t deviceId);
+
+    /**
+     * TODO Document this method.
+     */
+            audio_port_handle_t getInputDevice();
+
+     /* Returns the ID of the audio device actually used by the input to which this AudioRecord
+      * is attached.
+      * A value of AUDIO_PORT_HANDLE_NONE indicates the AudioRecord is not attached to any input.
+      *
+      * Parameters:
+      *  none.
+      */
+     audio_port_handle_t getRoutedDeviceId();
+
+    /* Add an AudioDeviceCallback. The caller will be notified when the audio device
+     * to which this AudioRecord is routed is updated.
+     * Replaces any previously installed callback.
+     * Parameters:
+     *  callback:  The callback interface
+     * Returns NO_ERROR if successful.
+     *         INVALID_OPERATION if the same callback is already installed.
+     *         NO_INIT or PREMISSION_DENIED if AudioFlinger service is not reachable
+     *         BAD_VALUE if the callback is NULL
+     */
+            status_t addAudioDeviceCallback(
+                    const sp<AudioSystem::AudioDeviceCallback>& callback);
+
+    /* remove an AudioDeviceCallback.
+     * Parameters:
+     *  callback:  The callback interface
+     * Returns NO_ERROR if successful.
+     *         INVALID_OPERATION if the callback is not installed
+     *         BAD_VALUE if the callback is NULL
+     */
+            status_t removeAudioDeviceCallback(
+                    const sp<AudioSystem::AudioDeviceCallback>& callback);
 
 private:
     /* If nonContig is non-NULL, it is an output parameter that will be set to the number of
-     * additional non-contiguous frames that are available immediately.
+     * additional non-contiguous frames that are predicted to be available immediately,
+     * if the client were to release the first frames and then call obtainBuffer() again.
+     * This value is only a prediction, and needs to be confirmed.
+     * It will be set to zero for an error return.
      * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(),
      * in case the requested amount of frames is in two or more non-contiguous regions.
      * FIXME requested and elapsed are both relative times.  Consider changing to absolute time.
@@ -372,9 +441,15 @@
                                      struct timespec *elapsed = NULL, size_t *nonContig = NULL);
 public:
 
-    /* Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill. */
-    // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed
-            void        releaseBuffer(Buffer* audioBuffer);
+    /* Public API for TRANSFER_OBTAIN mode.
+     * Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill.
+     *
+     * Buffer fields:
+     *  frameCount  currently ignored but recommend to set to actual number of frames consumed
+     *  size        actual number of bytes consumed, must be multiple of frameSize
+     *  raw         ignored
+     */
+            void        releaseBuffer(const Buffer* audioBuffer);
 
     /* As a convenience we provide a read() interface to the audio buffer.
      * Input parameter 'size' is in byte units.
@@ -386,8 +461,11 @@
      *      WOULD_BLOCK         when obtainBuffer() returns same, or
      *                          AudioRecord was stopped during the read
      *      or any other error code returned by IAudioRecord::start() or restoreRecord_l().
+     * Default behavior is to only return when all data has been transferred. Set 'blocking' to
+     * false for the method to return immediately without waiting to try multiple times to read
+     * the full content of the buffer.
      */
-            ssize_t     read(void* buffer, size_t size);
+            ssize_t     read(void* buffer, size_t size, bool blocking = true);
 
     /* Return the number of input frames lost in the audio driver since the last call of this
      * function.  Audio driver is expected to reset the value to 0 and restart counting upon
@@ -416,6 +494,7 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
+                void        wake();     // wake to handle changed notification conditions.
 
     private:
                 void        pauseInternal(nsecs_t ns = 0LL);
@@ -430,7 +509,9 @@
         bool                mPaused;    // whether thread is requested to pause at next loop entry
         bool                mPausedInt; // whether thread internally requests pause
         nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
-        bool                mIgnoreNextPausedInt;   // whether to ignore next mPausedInt request
+        bool                mIgnoreNextPausedInt;   // skip any internal pause and go immediately
+                                        // to processAudioBuffer() as state may have changed
+                                        // since pause time calculated.
     };
 
             // body of AudioRecordThread::threadLoop()
@@ -445,7 +526,7 @@
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t openRecord_l(size_t epoch);
+            status_t openRecord_l(size_t epoch, const String16& opPackageName);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
@@ -458,7 +539,7 @@
     bool                    mActive;
 
     // for client callback handler
-    callback_t              mCbf;               // callback handler for events, or NULL
+    callback_t              mCbf;                   // callback handler for events, or NULL
     void*                   mUserData;
 
     // for notification APIs
@@ -475,13 +556,15 @@
     bool                    mRetryOnPartialBuffer;  // sleep and retry after partial obtainBuffer()
     uint32_t                mObservedSequence;      // last observed value of mSequence
 
-    uint32_t                mMarkerPosition;    // in wrapping (overflow) frame units
+    uint32_t                mMarkerPosition;        // in wrapping (overflow) frame units
     bool                    mMarkerReached;
-    uint32_t                mNewPosition;       // in frames
-    uint32_t                mUpdatePeriod;      // in frames, zero means no EVENT_NEW_POS
+    uint32_t                mNewPosition;           // in frames
+    uint32_t                mUpdatePeriod;          // in frames, zero means no EVENT_NEW_POS
 
     status_t                mStatus;
 
+    String16                mOpPackageName;         // The package name used for app ops.
+
     size_t                  mFrameCount;            // corresponds to current IAudioRecord, value is
                                                     // reported back by AudioFlinger to the client
     size_t                  mReqFrameCount;         // frame count to request the first or next time
@@ -531,7 +614,14 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioRecord attempt
+    int                     mClientUid;
+    pid_t                   mClientPid;
     audio_attributes_t      mAttributes;
+
+    // For Device Selection API
+    //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
+    audio_port_handle_t    mSelectedDeviceId;
+    sp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 };
 
 }; // namespace android
diff --git a/include/media/AudioResamplerPublic.h b/include/media/AudioResamplerPublic.h
index 97847a0..055f724 100644
--- a/include/media/AudioResamplerPublic.h
+++ b/include/media/AudioResamplerPublic.h
@@ -17,6 +17,11 @@
 #ifndef ANDROID_AUDIO_RESAMPLER_PUBLIC_H
 #define ANDROID_AUDIO_RESAMPLER_PUBLIC_H
 
+#include <stdint.h>
+#include <math.h>
+
+namespace android {
+
 // AUDIO_RESAMPLER_DOWN_RATIO_MAX is the maximum ratio between the original
 // audio sample rate and the target rate when downsampling,
 // as permitted in the audio framework, e.g. AudioTrack and AudioFlinger.
@@ -26,4 +31,147 @@
 // TODO: replace with an API
 #define AUDIO_RESAMPLER_DOWN_RATIO_MAX 256
 
+// AUDIO_RESAMPLER_UP_RATIO_MAX is the maximum suggested ratio between the original
+// audio sample rate and the target rate when upsampling.  It is loosely enforced by
+// the system. One issue with large upsampling ratios is the approximation by
+// an int32_t of the phase increments, making the resulting sample rate inexact.
+#define AUDIO_RESAMPLER_UP_RATIO_MAX 65536
+
+// AUDIO_TIMESTRETCH_SPEED_MIN and AUDIO_TIMESTRETCH_SPEED_MAX define the min and max time stretch
+// speeds supported by the system. These are enforced by the system and values outside this range
+// will result in a runtime error.
+// Depending on the AudioPlaybackRate::mStretchMode, the effective limits might be narrower than
+// the ones specified here
+// AUDIO_TIMESTRETCH_SPEED_MIN_DELTA is the minimum absolute speed difference that might trigger a
+// parameter update
+#define AUDIO_TIMESTRETCH_SPEED_MIN    0.01f
+#define AUDIO_TIMESTRETCH_SPEED_MAX    20.0f
+#define AUDIO_TIMESTRETCH_SPEED_NORMAL 1.0f
+#define AUDIO_TIMESTRETCH_SPEED_MIN_DELTA 0.0001f
+
+// AUDIO_TIMESTRETCH_PITCH_MIN and AUDIO_TIMESTRETCH_PITCH_MAX define the min and max time stretch
+// pitch shifting supported by the system. These are not enforced by the system and values
+// outside this range might result in a pitch different than the one requested.
+// Depending on the AudioPlaybackRate::mStretchMode, the effective limits might be narrower than
+// the ones specified here.
+// AUDIO_TIMESTRETCH_PITCH_MIN_DELTA is the minimum absolute pitch difference that might trigger a
+// parameter update
+#define AUDIO_TIMESTRETCH_PITCH_MIN    0.25f
+#define AUDIO_TIMESTRETCH_PITCH_MAX    4.0f
+#define AUDIO_TIMESTRETCH_PITCH_NORMAL 1.0f
+#define AUDIO_TIMESTRETCH_PITCH_MIN_DELTA 0.0001f
+
+
+//Determines the current algorithm used for stretching
+enum AudioTimestretchStretchMode : int32_t {
+    AUDIO_TIMESTRETCH_STRETCH_DEFAULT            = 0,
+    AUDIO_TIMESTRETCH_STRETCH_SPEECH             = 1,
+    //TODO: add more stretch modes/algorithms
+};
+
+//Limits for AUDIO_TIMESTRETCH_STRETCH_SPEECH mode
+#define TIMESTRETCH_SONIC_SPEED_MIN 0.1f
+#define TIMESTRETCH_SONIC_SPEED_MAX 6.0f
+
+//Determines behavior of Timestretch if current algorithm can't perform
+//with current parameters.
+// FALLBACK_CUT_REPEAT: (internal only) for speed <1.0 will truncate frames
+//    for speed > 1.0 will repeat frames
+// FALLBACK_MUTE: will set all processed frames to zero
+// FALLBACK_FAIL:  will stop program execution and log a fatal error
+enum AudioTimestretchFallbackMode : int32_t {
+    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT     = -1,
+    AUDIO_TIMESTRETCH_FALLBACK_DEFAULT        = 0,
+    AUDIO_TIMESTRETCH_FALLBACK_MUTE           = 1,
+    AUDIO_TIMESTRETCH_FALLBACK_FAIL           = 2,
+};
+
+struct AudioPlaybackRate {
+    float mSpeed;
+    float mPitch;
+    enum AudioTimestretchStretchMode  mStretchMode;
+    enum AudioTimestretchFallbackMode mFallbackMode;
+};
+
+static const AudioPlaybackRate AUDIO_PLAYBACK_RATE_DEFAULT = {
+        AUDIO_TIMESTRETCH_SPEED_NORMAL,
+        AUDIO_TIMESTRETCH_PITCH_NORMAL,
+        AUDIO_TIMESTRETCH_STRETCH_DEFAULT,
+        AUDIO_TIMESTRETCH_FALLBACK_DEFAULT
+};
+
+static inline bool isAudioPlaybackRateEqual(const AudioPlaybackRate &pr1,
+        const AudioPlaybackRate &pr2) {
+    return fabs(pr1.mSpeed - pr2.mSpeed) < AUDIO_TIMESTRETCH_SPEED_MIN_DELTA &&
+           fabs(pr1.mPitch - pr2.mPitch) < AUDIO_TIMESTRETCH_PITCH_MIN_DELTA &&
+           pr2.mStretchMode == pr2.mStretchMode &&
+           pr2.mFallbackMode == pr2.mFallbackMode;
+}
+
+static inline bool isAudioPlaybackRateValid(const AudioPlaybackRate &playbackRate) {
+    if (playbackRate.mFallbackMode == AUDIO_TIMESTRETCH_FALLBACK_FAIL &&
+            (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_SPEECH ||
+                    playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_DEFAULT)) {
+        //test sonic specific constraints
+        return playbackRate.mSpeed >= TIMESTRETCH_SONIC_SPEED_MIN &&
+                playbackRate.mSpeed <= TIMESTRETCH_SONIC_SPEED_MAX &&
+                playbackRate.mPitch >= AUDIO_TIMESTRETCH_PITCH_MIN &&
+                playbackRate.mPitch <= AUDIO_TIMESTRETCH_PITCH_MAX;
+    } else {
+        return playbackRate.mSpeed >= AUDIO_TIMESTRETCH_SPEED_MIN &&
+                playbackRate.mSpeed <= AUDIO_TIMESTRETCH_SPEED_MAX &&
+                playbackRate.mPitch >= AUDIO_TIMESTRETCH_PITCH_MIN &&
+                playbackRate.mPitch <= AUDIO_TIMESTRETCH_PITCH_MAX;
+    }
+}
+
+// TODO: Consider putting these inlines into a class scope
+
+// Returns the source frames needed to resample to destination frames.  This is not a precise
+// value and depends on the resampler (and possibly how it handles rounding internally).
+// Nevertheless, this should be an upper bound on the requirements of the resampler.
+// If srcSampleRate and dstSampleRate are equal, then it returns destination frames, which
+// may not be true if the resampler is asynchronous.
+static inline size_t sourceFramesNeeded(
+        uint32_t srcSampleRate, size_t dstFramesRequired, uint32_t dstSampleRate) {
+    // +1 for rounding - always do this even if matched ratio (resampler may use phases not ratio)
+    // +1 for additional sample needed for interpolation
+    return srcSampleRate == dstSampleRate ? dstFramesRequired :
+            size_t((uint64_t)dstFramesRequired * srcSampleRate / dstSampleRate + 1 + 1);
+}
+
+// An upper bound for the number of destination frames possible from srcFrames
+// after sample rate conversion.  This may be used for buffer sizing.
+static inline size_t destinationFramesPossible(size_t srcFrames, uint32_t srcSampleRate,
+        uint32_t dstSampleRate) {
+    if (srcSampleRate == dstSampleRate) {
+        return srcFrames;
+    }
+    uint64_t dstFrames = (uint64_t)srcFrames * dstSampleRate / srcSampleRate;
+    return dstFrames > 2 ? dstFrames - 2 : 0;
+}
+
+static inline size_t sourceFramesNeededWithTimestretch(
+        uint32_t srcSampleRate, size_t dstFramesRequired, uint32_t dstSampleRate,
+        float speed) {
+    // required is the number of input frames the resampler needs
+    size_t required = sourceFramesNeeded(srcSampleRate, dstFramesRequired, dstSampleRate);
+    // to deliver this, the time stretcher requires:
+    return required * (double)speed + 1 + 1; // accounting for rounding dependencies
+}
+
+// Identifies sample rates that we associate with music
+// and thus eligible for better resampling and fast capture.
+// This is somewhat less than 44100 to allow for pitch correction
+// involving resampling as well as asynchronous resampling.
+#define AUDIO_PROCESSING_MUSIC_RATE 40000
+
+static inline bool isMusicRate(uint32_t sampleRate) {
+    return sampleRate >= AUDIO_PROCESSING_MUSIC_RATE;
+}
+
+} // namespace android
+
+// ---------------------------------------------------------------------------
+
 #endif // ANDROID_AUDIO_RESAMPLER_PUBLIC_H
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 843a354..06116a5 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -19,6 +19,7 @@
 
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
+#include <media/AudioIoDescriptor.h>
 #include <media/IAudioFlingerClient.h>
 #include <media/IAudioPolicyServiceClient.h>
 #include <system/audio.h>
@@ -29,6 +30,7 @@
 namespace android {
 
 typedef void (*audio_error_callback)(status_t err);
+typedef void (*dynamic_policy_callback)(int event, String8 regId, int val);
 
 class IAudioFlinger;
 class IAudioPolicyService;
@@ -89,6 +91,7 @@
     static String8  getParameters(const String8& keys);
 
     static void setErrorCallback(audio_error_callback cb);
+    static void setDynPolicyCallback(dynamic_policy_callback cb);
 
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
@@ -98,10 +101,13 @@
 
     // Returned samplingRate and frameCount output values are guaranteed
     // to be non-zero if status == NO_ERROR
+    // FIXME This API assumes a route, and so should be deprecated.
     static status_t getOutputSamplingRate(uint32_t* samplingRate,
             audio_stream_type_t stream);
+    // FIXME This API assumes a route, and so should be deprecated.
     static status_t getOutputFrameCount(size_t* frameCount,
             audio_stream_type_t stream);
+    // FIXME This API assumes a route, and so should be deprecated.
     static status_t getOutputLatency(uint32_t* latency,
             audio_stream_type_t stream);
     static status_t getSamplingRate(audio_io_handle_t output,
@@ -110,19 +116,20 @@
     // audio_stream->get_buffer_size()/audio_stream_out_frame_size()
     static status_t getFrameCount(audio_io_handle_t output,
                                   size_t* frameCount);
-    // returns the audio output stream latency in ms. Corresponds to
+    // returns the audio output latency in ms. Corresponds to
     // audio_stream_out->get_latency()
     static status_t getLatency(audio_io_handle_t output,
                                uint32_t* latency);
 
     // return status NO_ERROR implies *buffSize > 0
+    // FIXME This API assumes a route, and so should deprecated.
     static status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
         audio_channel_mask_t channelMask, size_t* buffSize);
 
     static status_t setVoiceVolume(float volume);
 
     // return the number of audio frames written by AudioFlinger to audio HAL and
-    // audio dsp to DAC since the specified output I/O handle has exited standby.
+    // audio dsp to DAC since the specified output has exited standby.
     // returned status (from utils/Errors.h) can be:
     // - NO_ERROR: successful operation, halFrames and dspFrames point to valid data
     // - INVALID_OPERATION: Not supported on current hardware platform
@@ -151,32 +158,8 @@
     // or no HW sync source is used.
     static audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId);
 
-    // types of io configuration change events received with ioConfigChanged()
-    enum io_config_event {
-        OUTPUT_OPENED,
-        OUTPUT_CLOSED,
-        OUTPUT_CONFIG_CHANGED,
-        INPUT_OPENED,
-        INPUT_CLOSED,
-        INPUT_CONFIG_CHANGED,
-        STREAM_CONFIG_CHANGED,
-        NUM_CONFIG_EVENTS
-    };
-
-    // audio output descriptor used to cache output configurations in client process to avoid
-    // frequent calls through IAudioFlinger
-    class OutputDescriptor {
-    public:
-        OutputDescriptor()
-        : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0)
-            {}
-
-        uint32_t samplingRate;
-        audio_format_t format;
-        audio_channel_mask_t channelMask;
-        size_t frameCount;
-        uint32_t latency;
-    };
+    // Indicate JAVA services are ready (scheduling, power management ...)
+    static status_t systemReady();
 
     // Events used to synchronize actions between audio sessions.
     // For instance SYNC_EVENT_PRESENTATION_COMPLETE can be used to delay recording start until
@@ -201,7 +184,7 @@
     // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions)
     //
     static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state,
-                                                const char *device_address);
+                                             const char *device_address, const char *device_name);
     static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                 const char *device_address);
     static status_t setPhoneState(audio_mode_t state);
@@ -217,14 +200,16 @@
                                         audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                         const audio_offload_info_t *offloadInfo = NULL);
     static status_t getOutputForAttr(const audio_attributes_t *attr,
-                                        audio_io_handle_t *output,
-                                        audio_session_t session,
-                                        audio_stream_type_t *stream,
-                                        uint32_t samplingRate = 0,
-                                        audio_format_t format = AUDIO_FORMAT_DEFAULT,
-                                        audio_channel_mask_t channelMask = AUDIO_CHANNEL_OUT_STEREO,
-                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                                        const audio_offload_info_t *offloadInfo = NULL);
+                                     audio_io_handle_t *output,
+                                     audio_session_t session,
+                                     audio_stream_type_t *stream,
+                                     uid_t uid,
+                                     uint32_t samplingRate = 0,
+                                     audio_format_t format = AUDIO_FORMAT_DEFAULT,
+                                     audio_channel_mask_t channelMask = AUDIO_CHANNEL_OUT_STEREO,
+                                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+                                     const audio_offload_info_t *offloadInfo = NULL);
     static status_t startOutput(audio_io_handle_t output,
                                 audio_stream_type_t stream,
                                 audio_session_t session);
@@ -240,10 +225,12 @@
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
                                     audio_session_t session,
+                                    uid_t uid,
                                     uint32_t samplingRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
-                                    audio_input_flags_t flags);
+                                    audio_input_flags_t flags,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     static status_t startInput(audio_io_handle_t input,
                                audio_session_t session);
@@ -327,6 +314,12 @@
 
     static status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration);
 
+    static status_t startAudioSource(const struct audio_port_config *source,
+                                      const audio_attributes_t *attributes,
+                                      audio_io_handle_t *handle);
+    static status_t stopAudioSource(audio_io_handle_t handle);
+
+
     // ----------------------------------------------------------------------------
 
     class AudioPortCallback : public RefBase
@@ -342,16 +335,42 @@
 
     };
 
-    static void setAudioPortCallback(sp<AudioPortCallback> callBack);
+    static status_t addAudioPortCallback(const sp<AudioPortCallback>& callback);
+    static status_t removeAudioPortCallback(const sp<AudioPortCallback>& callback);
+
+    class AudioDeviceCallback : public RefBase
+    {
+    public:
+
+                AudioDeviceCallback() {}
+        virtual ~AudioDeviceCallback() {}
+
+        virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                         audio_port_handle_t deviceId) = 0;
+    };
+
+    static status_t addAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+                                           audio_io_handle_t audioIo);
+    static status_t removeAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+                                              audio_io_handle_t audioIo);
+
+    static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
 private:
 
     class AudioFlingerClient: public IBinder::DeathRecipient, public BnAudioFlingerClient
     {
     public:
-        AudioFlingerClient() {
+        AudioFlingerClient() :
+            mInBuffSize(0), mInSamplingRate(0),
+            mInFormat(AUDIO_FORMAT_DEFAULT), mInChannelMask(AUDIO_CHANNEL_NONE) {
         }
 
+        void clearIoCache();
+        status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
+                                    audio_channel_mask_t channelMask, size_t* buffSize);
+        sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+
         // DeathRecipient
         virtual void binderDied(const wp<IBinder>& who);
 
@@ -359,7 +378,27 @@
 
         // indicate a change in the configuration of an output or input: keeps the cached
         // values for output/input parameters up-to-date in client process
-        virtual void ioConfigChanged(int event, audio_io_handle_t ioHandle, const void *param2);
+        virtual void ioConfigChanged(audio_io_config_event event,
+                                     const sp<AudioIoDescriptor>& ioDesc);
+
+
+        status_t addAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+                                               audio_io_handle_t audioIo);
+        status_t removeAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+                                           audio_io_handle_t audioIo);
+
+        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+
+    private:
+        Mutex                               mLock;
+        DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> >   mIoDescriptors;
+        DefaultKeyedVector<audio_io_handle_t, Vector < sp<AudioDeviceCallback> > >
+                                                                        mAudioDeviceCallbacks;
+        // cached values for recording getInputBufferSize() queries
+        size_t                              mInBuffSize;    // zero indicates cache is invalid
+        uint32_t                            mInSamplingRate;
+        audio_format_t                      mInFormat;
+        audio_channel_mask_t                mInChannelMask;
     };
 
     class AudioPolicyServiceClient: public IBinder::DeathRecipient,
@@ -369,26 +408,35 @@
         AudioPolicyServiceClient() {
         }
 
+        int addAudioPortCallback(const sp<AudioPortCallback>& callback);
+        int removeAudioPortCallback(const sp<AudioPortCallback>& callback);
+
         // DeathRecipient
         virtual void binderDied(const wp<IBinder>& who);
 
         // IAudioPolicyServiceClient
         virtual void onAudioPortListUpdate();
         virtual void onAudioPatchListUpdate();
+        virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state);
+
+    private:
+        Mutex                               mLock;
+        Vector <sp <AudioPortCallback> >    mAudioPortCallbacks;
     };
 
+    static const sp<AudioFlingerClient> getAudioFlingerClient();
+    static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+
     static sp<AudioFlingerClient> gAudioFlingerClient;
     static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
     friend class AudioFlingerClient;
     friend class AudioPolicyServiceClient;
 
     static Mutex gLock;      // protects gAudioFlinger and gAudioErrorCallback,
-    static Mutex gLockCache; // protects gOutputs, gPrevInSamplingRate, gPrevInFormat,
-                             // gPrevInChannelMask and gInBuffSize
     static Mutex gLockAPS;   // protects gAudioPolicyService and gAudioPolicyServiceClient
-    static Mutex gLockAPC;   // protects gAudioPortCallback
     static sp<IAudioFlinger> gAudioFlinger;
     static audio_error_callback gAudioErrorCallback;
+    static dynamic_policy_callback gDynPolicyCallback;
 
     static size_t gInBuffSize;
     // previous parameters for recording buffer size queries
@@ -397,12 +445,6 @@
     static audio_channel_mask_t gPrevInChannelMask;
 
     static sp<IAudioPolicyService> gAudioPolicyService;
-
-    // list of output descriptors containing cached parameters
-    // (sampling rate, framecount, channel count...)
-    static DefaultKeyedVector<audio_io_handle_t, OutputDescriptor *> gOutputs;
-
-    static sp<AudioPortCallback> gAudioPortCallback;
 };
 
 };  // namespace android
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index fd51b8f..e02f1b7 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -21,6 +21,7 @@
 #include <media/AudioSystem.h>
 #include <media/AudioTimestamp.h>
 #include <media/IAudioTrack.h>
+#include <media/AudioResamplerPublic.h>
 #include <utils/threads.h>
 
 namespace android {
@@ -42,28 +43,38 @@
      */
     enum event_type {
         EVENT_MORE_DATA = 0,        // Request to write more data to buffer.
+                                    // This event only occurs for TRANSFER_CALLBACK.
                                     // If this event is delivered but the callback handler
-                                    // does not want to write more data, the handler must explicitly
+                                    // does not want to write more data, the handler must
                                     // ignore the event by setting frameCount to zero.
-        EVENT_UNDERRUN = 1,         // Buffer underrun occurred.
+                                    // This might occur, for example, if the application is
+                                    // waiting for source data or is at the end of stream.
+                                    //
+                                    // For data filling, it is preferred that the callback
+                                    // does not block and instead returns a short count on
+                                    // the amount of data actually delivered
+                                    // (or 0, if no data is currently available).
+        EVENT_UNDERRUN = 1,         // Buffer underrun occurred. This will not occur for
+                                    // static tracks.
         EVENT_LOOP_END = 2,         // Sample loop end was reached; playback restarted from
-                                    // loop start if loop count was not 0.
+                                    // loop start if loop count was not 0 for a static track.
         EVENT_MARKER = 3,           // Playback head is at the specified marker position
                                     // (See setMarkerPosition()).
         EVENT_NEW_POS = 4,          // Playback head is at a new position
                                     // (See setPositionUpdatePeriod()).
-        EVENT_BUFFER_END = 5,       // Playback head is at the end of the buffer.
-                                    // Not currently used by android.media.AudioTrack.
+        EVENT_BUFFER_END = 5,       // Playback has completed for a static track.
         EVENT_NEW_IAUDIOTRACK = 6,  // IAudioTrack was re-created, either due to re-routing and
                                     // voluntary invalidation by mediaserver, or mediaserver crash.
         EVENT_STREAM_END = 7,       // Sent after all the buffers queued in AF and HW are played
-                                    // back (after stop is called)
+                                    // back (after stop is called) for an offloaded track.
+#if 0   // FIXME not yet implemented
         EVENT_NEW_TIMESTAMP = 8,    // Delivered periodically and when there's a significant change
                                     // in the mapping from frame position to presentation time.
                                     // See AudioTimestamp for the information included with event.
+#endif
     };
 
-    /* Client should declare Buffer on the stack and pass address to obtainBuffer()
+    /* Client should declare a Buffer and pass the address to obtainBuffer()
      * and releaseBuffer().  See also callback_t for EVENT_MORE_DATA.
      */
 
@@ -72,22 +83,26 @@
     public:
         // FIXME use m prefix
         size_t      frameCount;   // number of sample frames corresponding to size;
-                                  // on input it is the number of frames desired,
-                                  // on output is the number of frames actually filled
-                                  // (currently ignored, but will make the primary field in future)
+                                  // on input to obtainBuffer() it is the number of frames desired,
+                                  // on output from obtainBuffer() it is the number of available
+                                  //    [empty slots for] frames to be filled
+                                  // on input to releaseBuffer() it is currently ignored
 
         size_t      size;         // input/output in bytes == frameCount * frameSize
-                                  // on input it is unused
-                                  // on output is the number of bytes actually filled
-                                  // FIXME this is redundant with respect to frameCount,
-                                  // and TRANSFER_OBTAIN mode is broken for 8-bit data
-                                  // since we don't define the frame format
+                                  // on input to obtainBuffer() it is ignored
+                                  // on output from obtainBuffer() it is the number of available
+                                  //    [empty slots for] bytes to be filled,
+                                  //    which is frameCount * frameSize
+                                  // on input to releaseBuffer() it is the number of bytes to
+                                  //    release
+                                  // FIXME This is redundant with respect to frameCount.  Consider
+                                  //    removing size and making frameCount the primary field.
 
         union {
             void*       raw;
             short*      i16;      // signed 16-bit
             int8_t*     i8;       // unsigned 8-bit, offset by 0x80
-        };                        // input: unused, output: pointer to buffer
+        };                        // input to obtainBuffer(): unused, output: pointer to buffer
     };
 
     /* As a convenience, if a callback is supplied, a handler thread
@@ -121,6 +136,7 @@
      *  - BAD_VALUE: unsupported configuration
      * frameCount is guaranteed to be non-zero if status is NO_ERROR,
      * and is undefined otherwise.
+     * FIXME This API assumes a route, and so should be deprecated.
      */
 
     static status_t getMinFrameCount(size_t* frameCount,
@@ -132,7 +148,7 @@
     enum transfer_type {
         TRANSFER_DEFAULT,   // not specified explicitly; determine from the other parameters
         TRANSFER_CALLBACK,  // callback EVENT_MORE_DATA
-        TRANSFER_OBTAIN,    // FIXME deprecated: call obtainBuffer() and releaseBuffer()
+        TRANSFER_OBTAIN,    // call obtainBuffer() and releaseBuffer()
         TRANSFER_SYNC,      // synchronous write()
         TRANSFER_SHARED,    // shared memory
     };
@@ -145,18 +161,15 @@
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
      * Unspecified values are set to appropriate default values.
-     * With this constructor, the track is configured for streaming mode.
-     * Data to be rendered is supplied by write() or by the callback EVENT_MORE_DATA.
-     * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is not allowed.
      *
      * Parameters:
      *
      * streamType:         Select the type of audio stream this track is attached to
      *                     (e.g. AUDIO_STREAM_MUSIC).
      * sampleRate:         Data source sampling rate in Hz.
-     * format:             Audio format.  For mixed tracks, any PCM format supported by server is OK
-     *                     or AUDIO_FORMAT_PCM_8_BIT which is handled on client side.  For direct
-     *                     and offloaded tracks, the possible format(s) depends on the output sink.
+     * format:             Audio format. For mixed tracks, any PCM format supported by server is OK.
+     *                     For direct and offloaded tracks, the possible format(s) depends on the
+     *                     output sink.
      * channelMask:        Channel mask, such that audio_is_output_channel(channelMask) is true.
      * frameCount:         Minimum size of track PCM buffer in frames. This defines the
      *                     application's contribution to the
@@ -165,20 +178,32 @@
      *                     configuration.  Zero means to use a default value.
      * flags:              See comments on audio_output_flags_t in <system/audio.h>.
      * cbf:                Callback function. If not null, this function is called periodically
-     *                     to provide new data and inform of marker, position updates, etc.
+     *                     to provide new data in TRANSFER_CALLBACK mode
+     *                     and inform of marker, position updates, etc.
      * user:               Context for use by the callback receiver.
      * notificationFrames: The callback function is called each time notificationFrames PCM
      *                     frames have been consumed from track input buffer.
      *                     This is expressed in units of frames at the initial source sample rate.
      * sessionId:          Specific session ID, or zero to use default.
      * transferType:       How data is transferred to AudioTrack.
+     * offloadInfo:        If not NULL, provides offload parameters for
+     *                     AudioSystem::getOutputForAttr().
+     * uid:                User ID of the app which initially requested this AudioTrack
+     *                     for power management tracking, or -1 for current user ID.
+     * pid:                Process ID of the app which initially requested this AudioTrack
+     *                     for power management tracking, or -1 for current process ID.
+     * pAttributes:        If not NULL, supersedes streamType for use case selection.
+     * doNotReconnect:     If set to true, AudioTrack won't automatically recreate the IAudioTrack
+                           binder to AudioFlinger.
+                           It will return an error instead.  The application will recreate
+                           the track based on offloading or different channel configuration, etc.
      * threadCanCallJava:  Not present in parameter list, and so is fixed at false.
      */
 
                         AudioTrack( audio_stream_type_t streamType,
                                     uint32_t sampleRate,
                                     audio_format_t format,
-                                    audio_channel_mask_t,
+                                    audio_channel_mask_t channelMask,
                                     size_t frameCount    = 0,
                                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                     callback_t cbf       = NULL,
@@ -189,13 +214,15 @@
                                     const audio_offload_info_t *offloadInfo = NULL,
                                     int uid = -1,
                                     pid_t pid = -1,
-                                    const audio_attributes_t* pAttributes = NULL);
+                                    const audio_attributes_t* pAttributes = NULL,
+                                    bool doNotReconnect = false);
 
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
-     * The format must not be 8-bit linear PCM.
      * Data to be rendered is passed in a shared memory buffer
-     * identified by the argument sharedBuffer, which must be non-0.
+     * identified by the argument sharedBuffer, which should be non-0.
+     * If sharedBuffer is zero, this constructor is equivalent to the previous constructor
+     * but without the ability to specify a non-zero value for the frameCount parameter.
      * The memory should be initialized to the desired data before calling start().
      * The write() method is not supported in this case.
      * It is recommended to pass a callback function to be notified of playback end by an
@@ -216,7 +243,8 @@
                                     const audio_offload_info_t *offloadInfo = NULL,
                                     int uid = -1,
                                     pid_t pid = -1,
-                                    const audio_attributes_t* pAttributes = NULL);
+                                    const audio_attributes_t* pAttributes = NULL,
+                                    bool doNotReconnect = false);
 
     /* Terminates the AudioTrack and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioTrack.
@@ -227,6 +255,7 @@
 
     /* Initialize an AudioTrack that was created using the AudioTrack() constructor.
      * Don't call set() more than once, or after the AudioTrack() constructors that take parameters.
+     * set() is not multi-thread safe.
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful initialization
      *  - INVALID_OPERATION: AudioTrack is already initialized
@@ -259,7 +288,8 @@
                             const audio_offload_info_t *offloadInfo = NULL,
                             int uid = -1,
                             pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = NULL);
+                            const audio_attributes_t* pAttributes = NULL,
+                            bool doNotReconnect = false);
 
     /* Result of constructing the AudioTrack. This must be checked for successful initialization
      * before using any AudioTrack API (except for set()), because using
@@ -347,6 +377,26 @@
     /* Return current source sample rate in Hz */
             uint32_t    getSampleRate() const;
 
+    /* Return the original source sample rate in Hz. This corresponds to the sample rate
+     * if playback rate had normal speed and pitch.
+     */
+            uint32_t    getOriginalSampleRate() const;
+
+    /* Set source playback rate for timestretch
+     * 1.0 is normal speed: < 1.0 is slower, > 1.0 is faster
+     * 1.0 is normal pitch: < 1.0 is lower pitch, > 1.0 is higher pitch
+     *
+     * AUDIO_TIMESTRETCH_SPEED_MIN <= speed <= AUDIO_TIMESTRETCH_SPEED_MAX
+     * AUDIO_TIMESTRETCH_PITCH_MIN <= pitch <= AUDIO_TIMESTRETCH_PITCH_MAX
+     *
+     * Speed increases the playback rate of media, but does not alter pitch.
+     * Pitch increases the "tonal frequency" of media, but does not affect the playback rate.
+     */
+            status_t    setPlaybackRate(const AudioPlaybackRate &playbackRate);
+
+    /* Return current playback rate */
+            const AudioPlaybackRate& getPlaybackRate() const;
+
     /* Enables looping and sets the start and end points of looping.
      * Only supported for static buffer mode.
      *
@@ -461,7 +511,38 @@
      *  handle on audio hardware output, or AUDIO_IO_HANDLE_NONE if the
      *  track needed to be re-created but that failed
      */
+private:
             audio_io_handle_t    getOutput() const;
+public:
+
+    /* Selects the audio device to use for output of this AudioTrack. A value of
+     * AUDIO_PORT_HANDLE_NONE indicates default (AudioPolicyManager) routing.
+     *
+     * Parameters:
+     *  The device ID of the selected device (as returned by the AudioDevicesManager API).
+     *
+     * Returned value:
+     *  - NO_ERROR: successful operation
+     *    TODO: what else can happen here?
+     */
+            status_t    setOutputDevice(audio_port_handle_t deviceId);
+
+    /* Returns the ID of the audio device selected for this AudioTrack.
+     * A value of AUDIO_PORT_HANDLE_NONE indicates default (AudioPolicyManager) routing.
+     *
+     * Parameters:
+     *  none.
+     */
+     audio_port_handle_t getOutputDevice();
+
+     /* Returns the ID of the audio device actually used by the output to which this AudioTrack is
+      * attached.
+      * A value of AUDIO_PORT_HANDLE_NONE indicates the audio track is not attached to any output.
+      *
+      * Parameters:
+      *  none.
+      */
+     audio_port_handle_t getRoutedDeviceId();
 
     /* Returns the unique session ID associated with this track.
      *
@@ -487,10 +568,18 @@
      */
             status_t    attachAuxEffect(int effectId);
 
-    /* Obtains a buffer of up to "audioBuffer->frameCount" empty slots for frames.
+    /* Public API for TRANSFER_OBTAIN mode.
+     * Obtains a buffer of up to "audioBuffer->frameCount" empty slots for frames.
      * After filling these slots with data, the caller should release them with releaseBuffer().
      * If the track buffer is not full, obtainBuffer() returns as many contiguous
      * [empty slots for] frames as are available immediately.
+     *
+     * If nonContig is non-NULL, it is an output parameter that will be set to the number of
+     * additional non-contiguous frames that are predicted to be available immediately,
+     * if the client were to release the first frames and then call obtainBuffer() again.
+     * This value is only a prediction, and needs to be confirmed.
+     * It will be set to zero for an error return.
+     *
      * If the track buffer is full and track is stopped, obtainBuffer() returns WOULD_BLOCK
      * regardless of the value of waitCount.
      * If the track buffer is full and track is not stopped, obtainBuffer() blocks with a
@@ -499,10 +588,6 @@
      * is exhausted, at which point obtainBuffer() will either block
      * or return WOULD_BLOCK depending on the value of the "waitCount"
      * parameter.
-     * Each sample is 16-bit signed PCM.
-     *
-     * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications,
-     * which should use write() or callback EVENT_MORE_DATA instead.
      *
      * Interpretation of waitCount:
      *  +n  limits wait time to n * WAIT_PERIOD_MS,
@@ -511,24 +596,27 @@
      *
      * Buffer fields
      * On entry:
-     *  frameCount  number of frames requested
+     *  frameCount  number of [empty slots for] frames requested
+     *  size        ignored
+     *  raw         ignored
      * After error return:
      *  frameCount  0
      *  size        0
      *  raw         undefined
      * After successful return:
-     *  frameCount  actual number of frames available, <= number requested
+     *  frameCount  actual number of [empty slots for] frames available, <= number requested
      *  size        actual number of bytes available
      *  raw         pointer to the buffer
      */
-
-    /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */
-            status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
-                                __attribute__((__deprecated__));
+            status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
+                                size_t *nonContig = NULL);
 
 private:
     /* If nonContig is non-NULL, it is an output parameter that will be set to the number of
-     * additional non-contiguous frames that are available immediately.
+     * additional non-contiguous frames that are predicted to be available immediately,
+     * if the client were to release the first frames and then call obtainBuffer() again.
+     * This value is only a prediction, and needs to be confirmed.
+     * It will be set to zero for an error return.
      * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(),
      * in case the requested amount of frames is in two or more non-contiguous regions.
      * FIXME requested and elapsed are both relative times.  Consider changing to absolute time.
@@ -537,9 +625,15 @@
                                      struct timespec *elapsed = NULL, size_t *nonContig = NULL);
 public:
 
-    /* Release a filled buffer of "audioBuffer->frameCount" frames for AudioFlinger to process. */
-    // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed
-            void        releaseBuffer(Buffer* audioBuffer);
+    /* Public API for TRANSFER_OBTAIN mode.
+     * Release a filled buffer of frames for AudioFlinger to process.
+     *
+     * Buffer fields:
+     *  frameCount  currently ignored but recommend to set to actual number of frames filled
+     *  size        actual number of bytes filled, must be multiple of frameSize
+     *  raw         ignored
+     */
+            void        releaseBuffer(const Buffer* audioBuffer);
 
     /* As a convenience we provide a write() interface to the audio buffer.
      * Input parameter 'size' is in byte units.
@@ -550,8 +644,14 @@
      *      BAD_VALUE           size is invalid
      *      WOULD_BLOCK         when obtainBuffer() returns same, or
      *                          AudioTrack was stopped during the write
+     *      DEAD_OBJECT         when AudioFlinger dies or the output device changes and
+     *                          the track cannot be automatically restored.
+     *                          The application needs to recreate the AudioTrack
+     *                          because the audio device changed or AudioFlinger died.
+     *                          This typically occurs for direct or offload tracks
+     *                          or if mDoNotReconnect is true.
      *      or any other error code returned by IAudioTrack::start() or restoreTrack_l().
-     * Default behavior is to only return until all data has been transferred. Set 'blocking' to
+     * Default behavior is to only return when all data has been transferred. Set 'blocking' to
      * false for the method to return immediately without waiting to try multiple times to write
      * the full content of the buffer.
      */
@@ -559,6 +659,7 @@
 
     /*
      * Dumps the state of an audio track.
+     * Not a general-purpose API; intended only for use by media player service to dump its tracks.
      */
             status_t    dump(int fd, const Vector<String16>& args) const;
 
@@ -589,19 +690,45 @@
      *                     overall hardware latency to physical output. In WOULD_BLOCK cases,
      *                     one might poll again, or use getPosition(), or use 0 position and
      *                     current time for the timestamp.
+     *         DEAD_OBJECT if AudioFlinger dies or the output device changes and
+     *                     the track cannot be automatically restored.
+     *                     The application needs to recreate the AudioTrack
+     *                     because the audio device changed or AudioFlinger died.
+     *                     This typically occurs for direct or offload tracks
+     *                     or if mDoNotReconnect is true.
      *         INVALID_OPERATION  if called on a FastTrack, wrong state, or some other error.
      *
      * The timestamp parameter is undefined on return, if status is not NO_ERROR.
      */
             status_t    getTimestamp(AudioTimestamp& timestamp);
 
+    /* Add an AudioDeviceCallback. The caller will be notified when the audio device to which this
+     * AudioTrack is routed is updated.
+     * Replaces any previously installed callback.
+     * Parameters:
+     *  callback:  The callback interface
+     * Returns NO_ERROR if successful.
+     *         INVALID_OPERATION if the same callback is already installed.
+     *         NO_INIT or PREMISSION_DENIED if AudioFlinger service is not reachable
+     *         BAD_VALUE if the callback is NULL
+     */
+            status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+
+    /* remove an AudioDeviceCallback.
+     * Parameters:
+     *  callback:  The callback interface
+     * Returns NO_ERROR if successful.
+     *         INVALID_OPERATION if the callback is not installed
+     *         BAD_VALUE if the callback is NULL
+     */
+            status_t removeAudioDeviceCallback(
+                    const sp<AudioSystem::AudioDeviceCallback>& callback);
+
 protected:
     /* copying audio tracks is not allowed */
                         AudioTrack(const AudioTrack& other);
             AudioTrack& operator = (const AudioTrack& other);
 
-            void        setAttributesFromStreamType(audio_stream_type_t streamType);
-
     /* a small internal class to handle the callback */
     class AudioTrackThread : public Thread
     {
@@ -614,6 +741,7 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
+                void        wake();     // wake to handle changed notification conditions.
 
     private:
                 void        pauseInternal(nsecs_t ns = 0LL);
@@ -628,7 +756,9 @@
         bool                mPaused;    // whether thread is requested to pause at next loop entry
         bool                mPausedInt; // whether thread internally requests pause
         nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
-        bool                mIgnoreNextPausedInt;   // whether to ignore next mPausedInt request
+        bool                mIgnoreNextPausedInt;   // skip any internal pause and go immediately
+                                        // to processAudioBuffer() as state may have changed
+                                        // since pause time calculated.
     };
 
             // body of AudioTrackThread::threadLoop()
@@ -641,10 +771,6 @@
             static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3;
             nsecs_t processAudioBuffer();
 
-            bool     isOffloaded() const;
-            bool     isDirect() const;
-            bool     isOffloadedOrDirect() const;
-
             // caller must hold lock on mLock for all _l methods
 
             status_t createTrack_l();
@@ -657,6 +783,10 @@
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreTrack_l(const char *from);
 
+            bool     isOffloaded() const;
+            bool     isDirect() const;
+            bool     isOffloadedOrDirect() const;
+
             bool     isOffloaded_l() const
                 { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; }
 
@@ -670,6 +800,9 @@
             // increment mPosition by the delta of mServer, and return new value of mPosition
             uint32_t updateAndGetPosition_l();
 
+            // check sample rate and speed is compatible with AudioTrack
+            bool     isSampleRateSpeedAllowed_l(uint32_t sampleRate, float speed) const;
+
     // Next 4 fields may be changed if IAudioTrack is re-created, but always != 0
     sp<IAudioTrack>         mAudioTrack;
     sp<IMemory>             mCblkMemory;
@@ -680,12 +813,21 @@
 
     float                   mVolume[2];
     float                   mSendLevel;
-    mutable uint32_t        mSampleRate;            // mutable because getSampleRate() can update it.
+    mutable uint32_t        mSampleRate;            // mutable because getSampleRate() can update it
+    uint32_t                mOriginalSampleRate;
+    AudioPlaybackRate       mPlaybackRate;
     size_t                  mFrameCount;            // corresponds to current IAudioTrack, value is
                                                     // reported back by AudioFlinger to the client
     size_t                  mReqFrameCount;         // frame count to request the first or next time
                                                     // a new IAudioTrack is needed, non-decreasing
 
+    // The following AudioFlinger server-side values are cached in createAudioTrack_l().
+    // These values can be used for informational purposes until the track is invalidated,
+    // whereupon restoreTrack_l() calls createTrack_l() to update the values.
+    uint32_t                mAfLatency;             // AudioFlinger latency in ms
+    size_t                  mAfFrameCount;          // AudioFlinger frame count
+    uint32_t                mAfSampleRate;          // AudioFlinger sample rate
+
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
     audio_stream_type_t     mStreamType;            // mStreamType == AUDIO_STREAM_DEFAULT implies
@@ -698,10 +840,7 @@
     const audio_offload_info_t* mOffloadInfo;
     audio_attributes_t      mAttributes;
 
-    // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data.  For 8-bit PCM data, it's
-    // twice as large as mFrameSize because data is expanded to 16-bit before it's stored in buffer.
-    size_t                  mFrameSize;             // app-level frame size
-    size_t                  mFrameSizeAF;           // AudioFlinger frame size
+    size_t                  mFrameSize;             // frame size in bytes
 
     status_t                mStatus;
 
@@ -732,17 +871,25 @@
     bool                    mRefreshRemaining;      // processAudioBuffer() should refresh
                                                     // mRemainingFrames and mRetryOnPartialBuffer
 
+                                                    // used for static track cbf and restoration
+    int32_t                 mLoopCount;             // last setLoop loopCount; zero means disabled
+    uint32_t                mLoopStart;             // last setLoop loopStart
+    uint32_t                mLoopEnd;               // last setLoop loopEnd
+    int32_t                 mLoopCountNotified;     // the last loopCount notified by callback.
+                                                    // mLoopCountNotified counts down, matching
+                                                    // the remaining loop count for static track
+                                                    // playback.
+
     // These are private to processAudioBuffer(), and are not protected by a lock
     uint32_t                mRemainingFrames;       // number of frames to request in obtainBuffer()
     bool                    mRetryOnPartialBuffer;  // sleep and retry after partial obtainBuffer()
     uint32_t                mObservedSequence;      // last observed value of mSequence
 
-    uint32_t                mLoopPeriod;            // in frames, zero means looping is disabled
-
     uint32_t                mMarkerPosition;        // in wrapping (overflow) frame units
     bool                    mMarkerReached;
     uint32_t                mNewPosition;           // in frames
     uint32_t                mUpdatePeriod;          // in frames, zero means no EVENT_NEW_POS
+
     uint32_t                mServer;                // in frames, last known mProxy->getPosition()
                                                     // which is count of frames consumed by server,
                                                     // reset by new IAudioTrack,
@@ -758,10 +905,17 @@
     int64_t                 mStartUs;               // the start time after flush or stop.
                                                     // only used for offloaded and direct tracks.
 
+    bool                    mPreviousTimestampValid;// true if mPreviousTimestamp is valid
+    bool                    mTimestampStartupGlitchReported; // reduce log spam
+    bool                    mRetrogradeMotionReported; // reduce log spam
+    AudioTimestamp          mPreviousTimestamp;     // used to detect retrograde motion
+
     audio_output_flags_t    mFlags;
         // const after set(), except for bits AUDIO_OUTPUT_FLAG_FAST and AUDIO_OUTPUT_FLAG_OFFLOAD.
         // mLock must be held to read or write those bits reliably.
 
+    bool                    mDoNotReconnect;
+
     int                     mSessionId;
     int                     mAuxEffectId;
 
@@ -783,6 +937,10 @@
     bool                    mInUnderrun;            // whether track is currently in underrun state
     uint32_t                mPausedPosition;
 
+    // For Device Selection API
+    //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
+    audio_port_handle_t     mSelectedDeviceId;
+
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
     public:
@@ -797,6 +955,8 @@
     uint32_t                mSequence;              // incremented for each new IAudioTrack attempt
     int                     mClientUid;
     pid_t                   mClientPid;
+
+    sp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 };
 
 class TimedAudioTrack : public AudioTrack
diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h
index b1ed7b0..64a3212 100644
--- a/include/media/EffectsFactoryApi.h
+++ b/include/media/EffectsFactoryApi.h
@@ -171,6 +171,8 @@
 ////////////////////////////////////////////////////////////////////////////////
 int EffectIsNullUuid(const effect_uuid_t *pEffectUuid);
 
+int EffectDumpEffects(int fd);
+
 #if __cplusplus
 }  // extern "C"
 #endif
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 31a14f0..5051aff 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -85,15 +85,19 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
+                                const String16& callingPackage,
                                 size_t *pFrameCount,
                                 track_flags_t *flags,
                                 pid_t tid,  // -1 means unused, otherwise must be valid non-0
+                                int clientUid,
                                 int *sessionId,
                                 size_t *notificationFrames,
                                 sp<IMemory>& cblk,
                                 sp<IMemory>& buffers,   // return value 0 means it follows cblk
                                 status_t *status) = 0;
 
+    // FIXME Surprisingly, sampleRate/format/frameCount/latency don't work for input handles
+
     /* query the audio hardware state. This state never changes,
      * and therefore can be cached.
      */
@@ -142,6 +146,7 @@
     virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0;
 
     // retrieve the audio recording buffer size
+    // FIXME This API assumes a route, and so should be deprecated.
     virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
             audio_channel_mask_t channelMask) const = 0;
 
@@ -195,6 +200,7 @@
                                     // AudioFlinger doesn't take over handle reference from client
                                     audio_io_handle_t output,
                                     int sessionId,
+                                    const String16& callingPackage,
                                     status_t *status,
                                     int *id,
                                     int *enabled) = 0;
@@ -237,6 +243,9 @@
 
     /* Get the HW synchronization source used for an audio session */
     virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) = 0;
+
+    /* Indicate JAVA services are ready (scheduling, power management ...) */
+    virtual status_t systemReady() = 0;
 };
 
 
diff --git a/include/media/IAudioFlingerClient.h b/include/media/IAudioFlingerClient.h
index 75a9971..0080bc9 100644
--- a/include/media/IAudioFlingerClient.h
+++ b/include/media/IAudioFlingerClient.h
@@ -22,6 +22,7 @@
 #include <binder/IInterface.h>
 #include <utils/KeyedVector.h>
 #include <system/audio.h>
+#include <media/AudioIoDescriptor.h>
 
 namespace android {
 
@@ -33,7 +34,8 @@
     DECLARE_META_INTERFACE(AudioFlingerClient);
 
     // Notifies a change of audio input/output configuration.
-    virtual void ioConfigChanged(int event, audio_io_handle_t ioHandle, const void *param2) = 0;
+    virtual void ioConfigChanged(audio_io_config_event event,
+                                 const sp<AudioIoDescriptor>& ioDesc) = 0;
 
 };
 
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index c98c475..6b93f6f 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -44,7 +44,8 @@
     //
     virtual status_t setDeviceConnectionState(audio_devices_t device,
                                               audio_policy_dev_state_t state,
-                                              const char *device_address) = 0;
+                                              const char *device_address,
+                                              const char *device_name) = 0;
     virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                   const char *device_address) = 0;
     virtual status_t setPhoneState(audio_mode_t state) = 0;
@@ -61,10 +62,12 @@
                                         audio_io_handle_t *output,
                                         audio_session_t session,
                                         audio_stream_type_t *stream,
+                                        uid_t uid,
                                         uint32_t samplingRate = 0,
                                         audio_format_t format = AUDIO_FORMAT_DEFAULT,
                                         audio_channel_mask_t channelMask = 0,
                                         audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                                        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                                         const audio_offload_info_t *offloadInfo = NULL) = 0;
     virtual status_t startOutput(audio_io_handle_t output,
                                  audio_stream_type_t stream,
@@ -76,12 +79,14 @@
                                audio_stream_type_t stream,
                                audio_session_t session) = 0;
     virtual status_t  getInputForAttr(const audio_attributes_t *attr,
-                                      audio_io_handle_t *input,
-                                      audio_session_t session,
-                                      uint32_t samplingRate,
-                                      audio_format_t format,
-                                      audio_channel_mask_t channelMask,
-                                      audio_input_flags_t flags) = 0;
+                              audio_io_handle_t *input,
+                              audio_session_t session,
+                              uid_t uid,
+                              uint32_t samplingRate,
+                              audio_format_t format,
+                              audio_channel_mask_t channelMask,
+                              audio_input_flags_t flags,
+                              audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE) = 0;
     virtual status_t startInput(audio_io_handle_t input,
                                 audio_session_t session) = 0;
     virtual status_t stopInput(audio_io_handle_t input,
@@ -144,6 +149,8 @@
 
     virtual void registerClient(const sp<IAudioPolicyServiceClient>& client) = 0;
 
+    virtual void setAudioPortCallbacksEnabled(bool enabled) = 0;
+
     virtual status_t acquireSoundTriggerSession(audio_session_t *session,
                                            audio_io_handle_t *ioHandle,
                                            audio_devices_t *device) = 0;
@@ -153,6 +160,11 @@
     virtual audio_mode_t getPhoneState() = 0;
 
     virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration) = 0;
+
+    virtual status_t startAudioSource(const struct audio_port_config *source,
+                                      const audio_attributes_t *attributes,
+                                      audio_io_handle_t *handle) = 0;
+    virtual status_t stopAudioSource(audio_io_handle_t handle) = 0;
 };
 
 
diff --git a/include/media/IAudioPolicyServiceClient.h b/include/media/IAudioPolicyServiceClient.h
index 59df046..a7f2cc3 100644
--- a/include/media/IAudioPolicyServiceClient.h
+++ b/include/media/IAudioPolicyServiceClient.h
@@ -35,6 +35,8 @@
     virtual void onAudioPortListUpdate() = 0;
     // Notifies a change of audio patch configuration.
     virtual void onAudioPatchListUpdate() = 0;
+    // Notifies a change in the mixing state of a specific mix in a dynamic audio policy
+    virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state) = 0;
 };
 
 
diff --git a/include/media/ICrypto.h b/include/media/ICrypto.h
index 07742ca..ea316de 100644
--- a/include/media/ICrypto.h
+++ b/include/media/ICrypto.h
@@ -25,6 +25,7 @@
 namespace android {
 
 struct AString;
+class IMemory;
 
 struct ICrypto : public IInterface {
     DECLARE_META_INTERFACE(Crypto);
@@ -43,12 +44,14 @@
 
     virtual void notifyResolution(uint32_t width, uint32_t height) = 0;
 
+    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId) = 0;
+
     virtual ssize_t decrypt(
             bool secure,
             const uint8_t key[16],
             const uint8_t iv[16],
             CryptoPlugin::Mode mode,
-            const void *srcPtr,
+            const sp<IMemory> &sharedBuffer, size_t offset,
             const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
             void *dstPtr,
             AString *errorDetailMsg) = 0;
@@ -61,6 +64,9 @@
     virtual status_t onTransact(
             uint32_t code, const Parcel &data, Parcel *reply,
             uint32_t flags = 0);
+private:
+    void readVector(const Parcel &data, Vector<uint8_t> &vector) const;
+    void writeVector(Parcel *reply, Vector<uint8_t> const &vector) const;
 };
 
 }  // namespace android
diff --git a/include/media/IDataSource.h b/include/media/IDataSource.h
new file mode 100644
index 0000000..07e46f7
--- /dev/null
+++ b/include/media/IDataSource.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IDATASOURCE_H
+#define ANDROID_IDATASOURCE_H
+
+#include <binder/IInterface.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+
+namespace android {
+
+class IMemory;
+
+// A binder interface for implementing a stagefright DataSource remotely.
+class IDataSource : public IInterface {
+public:
+    DECLARE_META_INTERFACE(DataSource);
+
+    // Get the memory that readAt writes into.
+    virtual sp<IMemory> getIMemory() = 0;
+    // Read up to |size| bytes into the memory returned by getIMemory(). Returns
+    // the number of bytes read, or -1 on error. |size| must not be larger than
+    // the buffer.
+    virtual ssize_t readAt(off64_t offset, size_t size) = 0;
+    // Get the size, or -1 if the size is unknown.
+    virtual status_t getSize(off64_t* size) = 0;
+    // This should be called before deleting |this|. The other methods may
+    // return errors if they're called after calling close().
+    virtual void close() = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IDataSource);
+};
+
+// ----------------------------------------------------------------------------
+
+class BnDataSource : public BnInterface<IDataSource> {
+public:
+    virtual status_t onTransact(uint32_t code,
+                                const Parcel& data,
+                                Parcel* reply,
+                                uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IDATASOURCE_H
diff --git a/include/media/IDrm.h b/include/media/IDrm.h
index affcbd7..9449beb6 100644
--- a/include/media/IDrm.h
+++ b/include/media/IDrm.h
@@ -47,7 +47,8 @@
                       Vector<uint8_t> const &initData,
                       String8 const &mimeType, DrmPlugin::KeyType keyType,
                       KeyedVector<String8, String8> const &optionalParameters,
-                      Vector<uint8_t> &request, String8 &defaultUrl) = 0;
+                      Vector<uint8_t> &request, String8 &defaultUrl,
+                      DrmPlugin::KeyRequestType *keyRequestType) = 0;
 
     virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
                                         Vector<uint8_t> const &response,
diff --git a/include/media/IMediaCodecList.h b/include/media/IMediaCodecList.h
index e93ea8b..12b52d7 100644
--- a/include/media/IMediaCodecList.h
+++ b/include/media/IMediaCodecList.h
@@ -21,6 +21,8 @@
 #include <binder/IInterface.h>
 #include <binder/Parcel.h>
 
+#include <media/stagefright/foundation/AMessage.h>
+
 namespace android {
 
 struct MediaCodecInfo;
@@ -33,6 +35,8 @@
     virtual size_t countCodecs() const = 0;
     virtual sp<MediaCodecInfo> getCodecInfo(size_t index) const = 0;
 
+    virtual const sp<AMessage> getGlobalSettings() const = 0;
+
     virtual ssize_t findCodecByType(
             const char *type, bool encoder, size_t startIndex = 0) const = 0;
 
diff --git a/include/media/IMediaMetadataRetriever.h b/include/media/IMediaMetadataRetriever.h
index 2529800..c90f254 100644
--- a/include/media/IMediaMetadataRetriever.h
+++ b/include/media/IMediaMetadataRetriever.h
@@ -26,6 +26,7 @@
 
 namespace android {
 
+class IDataSource;
 struct IMediaHTTPService;
 
 class IMediaMetadataRetriever: public IInterface
@@ -40,6 +41,7 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t        setDataSource(const sp<IDataSource>& dataSource) = 0;
     virtual sp<IMemory>     getFrameAtTime(int64_t timeUs, int option) = 0;
     virtual sp<IMemory>     extractAlbumArt() = 0;
     virtual const char*     extractMetadata(int keyCode) = 0;
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index db62cd5..0fd8933 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -31,9 +31,12 @@
 
 class Parcel;
 class Surface;
-class IStreamSource;
+class IDataSource;
+struct IStreamSource;
 class IGraphicBufferProducer;
 struct IMediaHTTPService;
+struct AudioPlaybackRate;
+struct AVSyncSettings;
 
 class IMediaPlayer: public IInterface
 {
@@ -49,6 +52,7 @@
 
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
+    virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
     virtual status_t        prepareAsync() = 0;
@@ -56,6 +60,11 @@
     virtual status_t        stop() = 0;
     virtual status_t        pause() = 0;
     virtual status_t        isPlaying(bool* state) = 0;
+    virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate) = 0;
+    virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) = 0;
+    virtual status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) = 0;
+    virtual status_t        getSyncSettings(AVSyncSettings* sync /* nonnull */,
+                                            float* videoFps /* nonnull */) = 0;
     virtual status_t        seekTo(int msec) = 0;
     virtual status_t        getCurrentPosition(int* msec) = 0;
     virtual status_t        getDuration(int* msec) = 0;
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 67b599a..a316ce2 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -47,9 +47,10 @@
 public:
     DECLARE_META_INTERFACE(MediaPlayerService);
 
-    virtual sp<IMediaRecorder> createMediaRecorder() = 0;
+    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName) = 0;
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
-    virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) = 0;
+    virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0)
+            = 0;
 
     virtual sp<IOMX>            getOMX() = 0;
     virtual sp<ICrypto>         makeCrypto() = 0;
@@ -64,8 +65,8 @@
     // display client when display connection, disconnection or errors occur.
     // The assumption is that at most one remote display will be connected to the
     // provided interface at a time.
-    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const sp<IRemoteDisplayClient>& client,
-            const String8& iface) = 0;
+    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName,
+            const sp<IRemoteDisplayClient>& client, const String8& iface) = 0;
 
     // codecs and audio devices usage tracking for the battery app
     enum BatteryDataBits {
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 3e67550..77ed5d3 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -26,6 +26,7 @@
 class ICamera;
 class ICameraRecordingProxy;
 class IMediaRecorderClient;
+class IGraphicBufferConsumer;
 class IGraphicBufferProducer;
 
 class IMediaRecorder: public IInterface
@@ -41,7 +42,6 @@
     virtual status_t setOutputFormat(int of) = 0;
     virtual status_t setVideoEncoder(int ve) = 0;
     virtual status_t setAudioEncoder(int ae) = 0;
-    virtual status_t setOutputFile(const char* path) = 0;
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t setVideoSize(int width, int height) = 0;
     virtual status_t setVideoFrameRate(int frames_per_second) = 0;
@@ -56,6 +56,7 @@
     virtual status_t init() = 0;
     virtual status_t close() = 0;
     virtual status_t release() = 0;
+    virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
 };
 
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 627f23b..3d29e4a 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -20,10 +20,15 @@
 
 #include <binder/IInterface.h>
 #include <gui/IGraphicBufferProducer.h>
+#include <gui/IGraphicBufferConsumer.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/List.h>
 #include <utils/String8.h>
 
+#include <list>
+
+#include <media/hardware/MetadataBufferType.h>
+
 #include <OMX_Core.h>
 #include <OMX_Video.h>
 
@@ -80,14 +85,16 @@
     virtual status_t getState(
             node_id node, OMX_STATETYPE* state) = 0;
 
+    // This will set *type to previous metadata buffer type on OMX error (not on binder error), and
+    // new metadata buffer type on success.
     virtual status_t storeMetaDataInBuffers(
-            node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+            node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type = NULL) = 0;
 
     virtual status_t prepareForAdaptivePlayback(
             node_id node, OMX_U32 portIndex, OMX_BOOL enable,
             OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
 
-   virtual status_t configureVideoTunnelMode(
+    virtual status_t configureVideoTunnelMode(
             node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
             OMX_U32 audioHwSync, native_handle_t **sidebandHandle) = 0;
 
@@ -97,9 +104,10 @@
     virtual status_t getGraphicBufferUsage(
             node_id node, OMX_U32 port_index, OMX_U32* usage) = 0;
 
+    // Use |params| as an OMX buffer, but limit the size of the OMX buffer to |allottedSize|.
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
-            buffer_id *buffer) = 0;
+            buffer_id *buffer, OMX_U32 allottedSize) = 0;
 
     virtual status_t useGraphicBuffer(
             node_id node, OMX_U32 port_index,
@@ -109,9 +117,23 @@
             node_id node, OMX_U32 port_index,
             const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
 
+    // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
+    // well as on success.
     virtual status_t createInputSurface(
             node_id node, OMX_U32 port_index,
-            sp<IGraphicBufferProducer> *bufferProducer) = 0;
+            sp<IGraphicBufferProducer> *bufferProducer,
+            MetadataBufferType *type = NULL) = 0;
+
+    virtual status_t createPersistentInputSurface(
+            sp<IGraphicBufferProducer> *bufferProducer,
+            sp<IGraphicBufferConsumer> *bufferConsumer) = 0;
+
+    // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
+    // well as on success.
+    virtual status_t setInputSurface(
+            node_id node, OMX_U32 port_index,
+            const sp<IGraphicBufferConsumer> &bufferConsumer,
+            MetadataBufferType *type) = 0;
 
     virtual status_t signalEndOfInputStream(node_id node) = 0;
 
@@ -123,20 +145,32 @@
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data) = 0;
 
+    // Allocate an OMX buffer of size |allotedSize|. Use |params| as the backup buffer, which
+    // may be larger.
     virtual status_t allocateBufferWithBackup(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
-            buffer_id *buffer) = 0;
+            buffer_id *buffer, OMX_U32 allottedSize) = 0;
 
     virtual status_t freeBuffer(
             node_id node, OMX_U32 port_index, buffer_id buffer) = 0;
 
-    virtual status_t fillBuffer(node_id node, buffer_id buffer) = 0;
+    enum {
+        kFenceTimeoutMs = 1000
+    };
+    // Calls OMX_FillBuffer on buffer, and passes |fenceFd| to component if it supports
+    // fences. Otherwise, it waits on |fenceFd| before calling OMX_FillBuffer.
+    // Takes ownership of |fenceFd| even if this call fails.
+    virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd = -1) = 0;
 
+    // Calls OMX_EmptyBuffer on buffer (after updating buffer header with |range_offset|,
+    // |range_length|, |flags| and |timestamp|). Passes |fenceFd| to component if it
+    // supports fences. Otherwise, it waits on |fenceFd| before calling OMX_EmptyBuffer.
+    // Takes ownership of |fenceFd| even if this call fails.
     virtual status_t emptyBuffer(
             node_id node,
             buffer_id buffer,
             OMX_U32 range_offset, OMX_U32 range_length,
-            OMX_U32 flags, OMX_TICKS timestamp) = 0;
+            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd = -1) = 0;
 
     virtual status_t getExtensionIndex(
             node_id node,
@@ -147,6 +181,7 @@
         INTERNAL_OPTION_SUSPEND,  // data is a bool
         INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,  // data is an int64_t
         INTERNAL_OPTION_MAX_TIMESTAMP_GAP, // data is int64_t
+        INTERNAL_OPTION_MAX_FPS, // data is float
         INTERNAL_OPTION_START_TIME, // data is an int64_t
         INTERNAL_OPTION_TIME_LAPSE, // data is an int64_t[2]
     };
@@ -163,10 +198,11 @@
         EVENT,
         EMPTY_BUFFER_DONE,
         FILL_BUFFER_DONE,
-
+        FRAME_RENDERED,
     } type;
 
     IOMX::node_id node;
+    int fenceFd; // used for EMPTY_BUFFER_DONE and FILL_BUFFER_DONE; client must close this
 
     union {
         // if type == EVENT
@@ -190,6 +226,11 @@
             OMX_TICKS timestamp;
         } extended_buffer_data;
 
+        // if type == FRAME_RENDERED
+        struct {
+            OMX_TICKS timestamp;
+            OMX_S64 nanoTime;
+        } render_data;
     } u;
 };
 
@@ -197,7 +238,8 @@
 public:
     DECLARE_META_INTERFACE(OMXObserver);
 
-    virtual void onMessage(const omx_message &msg) = 0;
+    // Handle (list of) messages.
+    virtual void onMessages(const std::list<omx_message> &messages) = 0;
 };
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -223,4 +265,15 @@
 
 }  // namespace android
 
+inline static const char *asString(android::MetadataBufferType i, const char *def = "??") {
+    using namespace android;
+    switch (i) {
+        case kMetadataBufferTypeCameraSource:   return "CameraSource";
+        case kMetadataBufferTypeGrallocSource:  return "GrallocSource";
+        case kMetadataBufferTypeANWBuffer:      return "ANWBuffer";
+        case kMetadataBufferTypeInvalid:        return "Invalid";
+        default:                                return def;
+    }
+}
+
 #endif  // ANDROID_IOMX_H_
diff --git a/include/media/IResourceManagerClient.h b/include/media/IResourceManagerClient.h
new file mode 100644
index 0000000..aa0cd88
--- /dev/null
+++ b/include/media/IResourceManagerClient.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IRESOURCEMANAGERCLIENT_H
+#define ANDROID_IRESOURCEMANAGERCLIENT_H
+
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class IResourceManagerClient: public IInterface
+{
+public:
+    DECLARE_META_INTERFACE(ResourceManagerClient);
+
+    virtual bool reclaimResource() = 0;
+    virtual String8 getName() = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnResourceManagerClient: public BnInterface<IResourceManagerClient>
+{
+public:
+    virtual status_t onTransact(uint32_t code,
+                                const Parcel &data,
+                                Parcel *reply,
+                                uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IRESOURCEMANAGERCLIENT_H
diff --git a/include/media/IResourceManagerService.h b/include/media/IResourceManagerService.h
new file mode 100644
index 0000000..1e4f6de
--- /dev/null
+++ b/include/media/IResourceManagerService.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IRESOURCEMANAGERSERVICE_H
+#define ANDROID_IRESOURCEMANAGERSERVICE_H
+
+#include <utils/Errors.h>  // for status_t
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+#include <media/IResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+
+namespace android {
+
+class IResourceManagerService: public IInterface
+{
+public:
+    DECLARE_META_INTERFACE(ResourceManagerService);
+
+    virtual void config(const Vector<MediaResourcePolicy> &policies) = 0;
+
+    virtual void addResource(
+            int pid,
+            int64_t clientId,
+            const sp<IResourceManagerClient> client,
+            const Vector<MediaResource> &resources) = 0;
+
+    virtual void removeResource(int pid, int64_t clientId) = 0;
+
+    virtual bool reclaimResource(
+            int callingPid,
+            const Vector<MediaResource> &resources) = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnResourceManagerService: public BnInterface<IResourceManagerService>
+{
+public:
+    virtual status_t onTransact(uint32_t code,
+                                const Parcel &data,
+                                Parcel *reply,
+                                uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IRESOURCEMANAGERSERVICE_H
diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h
index 677119b..4a6aafd 100644
--- a/include/media/IStreamSource.h
+++ b/include/media/IStreamSource.h
@@ -23,7 +23,7 @@
 namespace android {
 
 struct AMessage;
-struct IMemory;
+class IMemory;
 struct IStreamListener;
 
 struct IStreamSource : public IInterface {
@@ -81,6 +81,13 @@
     // with the next PTS occuring in the stream. The value is of type int64_t.
     static const char *const kKeyMediaTimeUs;
 
+    // Optionally signalled as part of a discontinuity that includes
+    // DISCONTINUITY_TIME. It indicates the media time (in us) of a recent
+    // sample from the same content, and is used as a hint for the parser to
+    // handle PTS wraparound. This is required when a new parser is created
+    // to continue parsing content from the same timeline.
+    static const char *const kKeyRecentMediaTimeUs;
+
     virtual void issueCommand(
             Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0;
 };
diff --git a/include/media/MediaCodecInfo.h b/include/media/MediaCodecInfo.h
index cd56adb..4067b47 100644
--- a/include/media/MediaCodecInfo.h
+++ b/include/media/MediaCodecInfo.h
@@ -32,9 +32,11 @@
 namespace android {
 
 struct AMessage;
-struct Parcel;
+class Parcel;
 struct CodecCapabilities;
 
+typedef KeyedVector<AString, AString> CodecSettings;
+
 struct MediaCodecInfo : public RefBase {
     struct ProfileLevel {
         uint32_t mProfile;
@@ -104,6 +106,7 @@
     MediaCodecInfo(AString name, bool encoder, const char *mime);
     void addQuirk(const char *name);
     status_t addMime(const char *mime);
+    status_t updateMime(const char *mime);
     status_t initializeCapabilities(const CodecCapabilities &caps);
     void addDetail(const AString &key, const AString &value);
     void addFeature(const AString &key, int32_t value);
@@ -114,6 +117,7 @@
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodecInfo);
 
     friend class MediaCodecList;
+    friend class MediaCodecListOverridesTest;
 };
 
 }  // namespace android
diff --git a/include/media/MediaMetadataRetrieverInterface.h b/include/media/MediaMetadataRetrieverInterface.h
index 38dbb20..bce6ee3 100644
--- a/include/media/MediaMetadataRetrieverInterface.h
+++ b/include/media/MediaMetadataRetrieverInterface.h
@@ -25,6 +25,7 @@
 
 namespace android {
 
+class DataSource;
 struct IMediaHTTPService;
 
 // Abstract base class
@@ -40,6 +41,7 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t setDataSource(const sp<DataSource>& source) = 0;
     virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) = 0;
     virtual MediaAlbumArt* extractAlbumArt() = 0;
     virtual const char* extractMetadata(int keyCode) = 0;
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 4a6bf28..de82554 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -26,8 +26,10 @@
 #include <utils/RefBase.h>
 
 #include <media/mediaplayer.h>
+#include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTimestamp.h>
+#include <media/AVSyncSettings.h>
 #include <media/Metadata.h>
 
 // Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
@@ -36,6 +38,7 @@
 
 namespace android {
 
+class DataSource;
 class Parcel;
 class Surface;
 class IGraphicBufferProducer;
@@ -110,20 +113,35 @@
                 AudioCallback cb = NULL,
                 void *cookie = NULL,
                 audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                const audio_offload_info_t *offloadInfo = NULL) = 0;
+                const audio_offload_info_t *offloadInfo = NULL,
+                bool doNotReconnect = false,
+                uint32_t suggestedFrameCount = 0) = 0;
 
         virtual status_t    start() = 0;
-        virtual ssize_t     write(const void* buffer, size_t size) = 0;
+
+        /* Input parameter |size| is in byte units stored in |buffer|.
+         * Data is copied over and actual number of bytes written (>= 0)
+         * is returned, or no data is copied and a negative status code
+         * is returned (even when |blocking| is true).
+         * When |blocking| is false, AudioSink will immediately return after
+         * part of or full |buffer| is copied over.
+         * When |blocking| is true, AudioSink will wait to copy the entire
+         * buffer, unless an error occurs or the copy operation is
+         * prematurely stopped.
+         */
+        virtual ssize_t     write(const void* buffer, size_t size, bool blocking = true) = 0;
+
         virtual void        stop() = 0;
         virtual void        flush() = 0;
         virtual void        pause() = 0;
         virtual void        close() = 0;
 
-        virtual status_t    setPlaybackRatePermille(int32_t rate) { return INVALID_OPERATION; }
+        virtual status_t    setPlaybackRate(const AudioPlaybackRate& rate) = 0;
+        virtual status_t    getPlaybackRate(AudioPlaybackRate* rate /* nonnull */) = 0;
         virtual bool        needsTrailingPadding() { return true; }
 
-        virtual status_t    setParameters(const String8& keyValuePairs) { return NO_ERROR; };
-        virtual String8     getParameters(const String8& keys) { return String8::empty(); };
+        virtual status_t    setParameters(const String8& /* keyValuePairs */) { return NO_ERROR; }
+        virtual String8     getParameters(const String8& /* keys */) { return String8::empty(); }
     };
 
                         MediaPlayerBase() : mCookie(0), mNotify(0) {}
@@ -131,7 +149,7 @@
     virtual status_t    initCheck() = 0;
     virtual bool        hardwareOutput() = 0;
 
-    virtual status_t    setUID(uid_t uid) {
+    virtual status_t    setUID(uid_t /* uid */) {
         return INVALID_OPERATION;
     }
 
@@ -142,7 +160,11 @@
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
 
-    virtual status_t    setDataSource(const sp<IStreamSource> &source) {
+    virtual status_t    setDataSource(const sp<IStreamSource>& /* source */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setDataSource(const sp<DataSource>& /* source */) {
         return INVALID_OPERATION;
     }
 
@@ -156,6 +178,31 @@
     virtual status_t    stop() = 0;
     virtual status_t    pause() = 0;
     virtual bool        isPlaying() = 0;
+    virtual status_t    setPlaybackSettings(const AudioPlaybackRate& rate) {
+        // by default, players only support setting rate to the default
+        if (!isAudioPlaybackRateEqual(rate, AUDIO_PLAYBACK_RATE_DEFAULT)) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) {
+        *rate = AUDIO_PLAYBACK_RATE_DEFAULT;
+        return OK;
+    }
+    virtual status_t    setSyncSettings(const AVSyncSettings& sync, float /* videoFps */) {
+        // By default, players only support setting sync source to default; all other sync
+        // settings are ignored. There is no requirement for getters to return set values.
+        if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getSyncSettings(
+                                AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) {
+        *sync = AVSyncSettings();
+        *videoFps = -1.f;
+        return OK;
+    }
     virtual status_t    seekTo(int msec) = 0;
     virtual status_t    getCurrentPosition(int *msec) = 0;
     virtual status_t    getDuration(int *msec) = 0;
@@ -166,13 +213,13 @@
     virtual status_t    getParameter(int key, Parcel *reply) = 0;
 
     // default no-op implementation of optional extensions
-    virtual status_t setRetransmitEndpoint(const struct sockaddr_in* endpoint) {
+    virtual status_t setRetransmitEndpoint(const struct sockaddr_in* /* endpoint */) {
         return INVALID_OPERATION;
     }
-    virtual status_t getRetransmitEndpoint(struct sockaddr_in* endpoint) {
+    virtual status_t getRetransmitEndpoint(struct sockaddr_in* /* endpoint */) {
         return INVALID_OPERATION;
     }
-    virtual status_t setNextPlayer(const sp<MediaPlayerBase>& next) {
+    virtual status_t setNextPlayer(const sp<MediaPlayerBase>& /* next */) {
         return OK;
     }
 
@@ -192,8 +239,8 @@
     //            the known metadata should be returned.
     // @param[inout] records Parcel where the player appends its metadata.
     // @return OK if the call was successful.
-    virtual status_t    getMetadata(const media::Metadata::Filter& ids,
-                                    Parcel *records) {
+    virtual status_t    getMetadata(const media::Metadata::Filter& /* ids */,
+                                    Parcel* /* records */) {
         return INVALID_OPERATION;
     };
 
@@ -216,7 +263,7 @@
         if (notifyCB) notifyCB(cookie, msg, ext1, ext2, obj);
     }
 
-    virtual status_t dump(int fd, const Vector<String16> &args) const {
+    virtual status_t dump(int /* fd */, const Vector<String16>& /* args */) const {
         return INVALID_OPERATION;
     }
 
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index f061d22..e02918f 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -58,24 +58,6 @@
     CAMCORDER_QUALITY_HIGH_SPEED_LIST_END = 2005,
 };
 
-/**
- * Set CIF as default maximum import and export resolution of video editor.
- * The maximum import and export resolutions are platform specific,
- * which should be defined in media_profiles.xml.
- * Set default maximum prefetch YUV frames to 6, which means video editor can
- * queue up to 6 YUV frames in the video encoder source.
- * This value is used to limit the amount of memory used by video editor
- * engine when the encoder consumes YUV frames at a lower speed
- * than video editor engine produces.
- */
-enum videoeditor_capability {
-    VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH = 352,
-    VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT = 288,
-    VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH = 352,
-    VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT = 288,
-    VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES = 6
-};
-
 enum video_decoder {
     VIDEO_DECODER_WMV,
 };
@@ -148,32 +130,6 @@
     int getVideoEncoderParamByName(const char *name, video_encoder codec) const;
 
     /**
-     * Returns the value for the given param name for the video editor cap
-     * param or -1 if error.
-     * Supported param name are:
-     * videoeditor.input.width.max - max input video frame width
-     * videoeditor.input.height.max - max input video frame height
-     * videoeditor.output.width.max - max output video frame width
-     * videoeditor.output.height.max - max output video frame height
-     * maxPrefetchYUVFrames - max prefetch YUV frames in video editor engine. This value is used
-     * to limit the memory consumption.
-     */
-    int getVideoEditorCapParamByName(const char *name) const;
-
-    /**
-     * Returns the value for the given param name for the video editor export codec format
-     * param or -1 if error.
-     * Supported param name are:
-     * videoeditor.export.profile - export video profile
-     * videoeditor.export.level - export video level
-     * Supported param codec are:
-     * 1 for h263
-     * 2 for h264
-     * 3 for mpeg4
-     */
-    int getVideoEditorExportParamByName(const char *name, int codec) const;
-
-    /**
      * Returns the audio encoders supported.
      */
     Vector<audio_encoder> getAudioEncoders() const;
@@ -221,7 +177,7 @@
 
     MediaProfiles& operator=(const MediaProfiles&);  // Don't call me
     MediaProfiles(const MediaProfiles&);             // Don't call me
-    MediaProfiles() { mVideoEditorCap = NULL; }        // Dummy default constructor
+    MediaProfiles() {}                               // Dummy default constructor
     ~MediaProfiles();                                // Don't delete me
 
     struct VideoCodec {
@@ -366,31 +322,6 @@
         int mCameraId;
         Vector<int> mLevels;
     };
-    struct ExportVideoProfile {
-        ExportVideoProfile(int codec, int profile, int level)
-            :mCodec(codec),mProfile(profile),mLevel(level) {}
-        ~ExportVideoProfile() {}
-        int mCodec;
-        int mProfile;
-        int mLevel;
-    };
-    struct VideoEditorCap {
-        VideoEditorCap(int inFrameWidth, int inFrameHeight,
-            int outFrameWidth, int outFrameHeight, int frames)
-            : mMaxInputFrameWidth(inFrameWidth),
-              mMaxInputFrameHeight(inFrameHeight),
-              mMaxOutputFrameWidth(outFrameWidth),
-              mMaxOutputFrameHeight(outFrameHeight),
-              mMaxPrefetchYUVFrames(frames) {}
-
-        ~VideoEditorCap() {}
-
-        int mMaxInputFrameWidth;
-        int mMaxInputFrameHeight;
-        int mMaxOutputFrameWidth;
-        int mMaxOutputFrameHeight;
-        int mMaxPrefetchYUVFrames;
-    };
 
     int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
     void initRequiredProfileRefs(const Vector<int>& cameraIds);
@@ -403,7 +334,6 @@
     static void logAudioEncoderCap(const AudioEncoderCap& cap);
     static void logVideoDecoderCap(const VideoDecoderCap& cap);
     static void logAudioDecoderCap(const AudioDecoderCap& cap);
-    static void logVideoEditorCap(const VideoEditorCap& cap);
 
     // If the xml configuration file does exist, use the settings
     // from the xml
@@ -415,9 +345,6 @@
     static VideoDecoderCap* createVideoDecoderCap(const char **atts);
     static VideoEncoderCap* createVideoEncoderCap(const char **atts);
     static AudioEncoderCap* createAudioEncoderCap(const char **atts);
-    static VideoEditorCap* createVideoEditorCap(
-                const char **atts, MediaProfiles *profiles);
-    static ExportVideoProfile* createExportVideoProfile(const char **atts);
 
     static CamcorderProfile* createCamcorderProfile(
                 int cameraId, const char **atts, Vector<int>& cameraIds);
@@ -461,8 +388,6 @@
     static void createDefaultEncoderOutputFileFormats(MediaProfiles *profiles);
     static void createDefaultImageEncodingQualityLevels(MediaProfiles *profiles);
     static void createDefaultImageDecodingMaxMemory(MediaProfiles *profiles);
-    static void createDefaultVideoEditorCap(MediaProfiles *profiles);
-    static void createDefaultExportVideoProfiles(MediaProfiles *profiles);
 
     static VideoEncoderCap* createDefaultH263VideoEncoderCap();
     static VideoEncoderCap* createDefaultM4vVideoEncoderCap();
@@ -520,8 +445,6 @@
 
     RequiredProfiles *mRequiredProfileRefs;
     Vector<int>              mCameraIds;
-    VideoEditorCap* mVideoEditorCap;
-    Vector<ExportVideoProfile*> mVideoEditorExportProfiles;
 };
 
 }; // namespace android
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index d7ac302..d6cc4bb 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -26,10 +26,12 @@
 
 class ICameraRecordingProxy;
 class Surface;
+class IGraphicBufferConsumer;
 class IGraphicBufferProducer;
 
 struct MediaRecorderBase {
-    MediaRecorderBase() {}
+    MediaRecorderBase(const String16 &opPackageName)
+        : mOpPackageName(opPackageName) {}
     virtual ~MediaRecorderBase() {}
 
     virtual status_t init() = 0;
@@ -43,7 +45,6 @@
     virtual status_t setCamera(const sp<ICamera>& camera,
                                const sp<ICameraRecordingProxy>& proxy) = 0;
     virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface) = 0;
-    virtual status_t setOutputFile(const char *path) = 0;
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;}
     virtual status_t setParameters(const String8& params) = 0;
@@ -56,8 +57,13 @@
     virtual status_t reset() = 0;
     virtual status_t getMaxAmplitude(int *max) = 0;
     virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
+    virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
 
+
+protected:
+    String16 mOpPackageName;
+
 private:
     MediaRecorderBase(const MediaRecorderBase &);
     MediaRecorderBase &operator=(const MediaRecorderBase &);
diff --git a/include/media/MediaResource.h b/include/media/MediaResource.h
new file mode 100644
index 0000000..20f2cad
--- /dev/null
+++ b/include/media/MediaResource.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_MEDIA_RESOURCE_H
+#define ANDROID_MEDIA_RESOURCE_H
+
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+
+namespace android {
+
+extern const char kResourceSecureCodec[];
+extern const char kResourceNonSecureCodec[];
+extern const char kResourceAudioCodec[];
+extern const char kResourceVideoCodec[];
+extern const char kResourceGraphicMemory[];
+
+class MediaResource {
+public:
+    MediaResource();
+    MediaResource(String8 type, uint64_t value);
+    MediaResource(String8 type, String8 subType, uint64_t value);
+
+    void readFromParcel(const Parcel &parcel);
+    void writeToParcel(Parcel *parcel) const;
+
+    String8 toString() const;
+
+    bool operator==(const MediaResource &other) const;
+    bool operator!=(const MediaResource &other) const;
+
+    String8 mType;
+    String8 mSubType;
+    uint64_t mValue;
+};
+
+}; // namespace android
+
+#endif  // ANDROID_MEDIA_RESOURCE_H
diff --git a/include/media/MediaResourcePolicy.h b/include/media/MediaResourcePolicy.h
new file mode 100644
index 0000000..9bc2eec
--- /dev/null
+++ b/include/media/MediaResourcePolicy.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_MEDIA_RESOURCE_POLICY_H
+#define ANDROID_MEDIA_RESOURCE_POLICY_H
+
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+
+namespace android {
+
+extern const char kPolicySupportsMultipleSecureCodecs[];
+extern const char kPolicySupportsSecureWithNonSecureCodec[];
+
+class MediaResourcePolicy {
+public:
+    MediaResourcePolicy();
+    MediaResourcePolicy(String8 type, String8 value);
+
+    void readFromParcel(const Parcel &parcel);
+    void writeToParcel(Parcel *parcel) const;
+
+    String8 toString() const;
+
+    String8 mType;
+    String8 mValue;
+};
+
+}; // namespace android
+
+#endif  // ANDROID_MEDIA_RESOURCE_POLICY_H
diff --git a/include/media/RingBuffer.h b/include/media/RingBuffer.h
new file mode 100644
index 0000000..df7c00e
--- /dev/null
+++ b/include/media/RingBuffer.h
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_SERVICE_UTILS_RING_BUFFER_H
+#define ANDROID_SERVICE_UTILS_RING_BUFFER_H
+
+#include <utils/Log.h>
+#include <cutils/compiler.h>
+
+#include <iterator>
+#include <utility>
+#include <vector>
+
+namespace android {
+
+/**
+ * A RingBuffer class that maintains an array of objects that can grow up to a certain size.
+ * Elements added to the RingBuffer are inserted in the logical front of the buffer, and
+ * invalidate all current iterators for that RingBuffer object.
+ */
+template <class T>
+class RingBuffer final {
+public:
+
+    /**
+     * Construct a RingBuffer that can grow up to the given length.
+     */
+    RingBuffer(size_t length);
+
+    /**
+     * Forward iterator to this class.  Implements an std:forward_iterator.
+     */
+    class iterator : public std::iterator<std::forward_iterator_tag, T> {
+    public:
+        iterator(T* ptr, size_t size, size_t pos, size_t ctr);
+
+        iterator& operator++();
+
+        iterator operator++(int);
+
+        bool operator==(const iterator& rhs);
+
+        bool operator!=(const iterator& rhs);
+
+        T& operator*();
+
+        T* operator->();
+
+    private:
+        T* mPtr;
+        size_t mSize;
+        size_t mPos;
+        size_t mCtr;
+    };
+
+    /**
+     * Constant forward iterator to this class.  Implements an std:forward_iterator.
+     */
+    class const_iterator : public std::iterator<std::forward_iterator_tag, T> {
+    public:
+        const_iterator(const T* ptr, size_t size, size_t pos, size_t ctr);
+
+        const_iterator& operator++();
+
+        const_iterator operator++(int);
+
+        bool operator==(const const_iterator& rhs);
+
+        bool operator!=(const const_iterator& rhs);
+
+        const T& operator*();
+
+        const T* operator->();
+
+    private:
+        const T* mPtr;
+        size_t mSize;
+        size_t mPos;
+        size_t mCtr;
+    };
+
+    /**
+     * Adds item to the front of this RingBuffer.  If the RingBuffer is at its maximum length,
+     * this will result in the last element being replaced (this is done using the element's
+     * assignment operator).
+     *
+     * All current iterators are invalidated.
+     */
+    void add(const T& item);
+
+    /**
+     * Moves item to the front of this RingBuffer.  Following a call to this, item should no
+     * longer be used.  If the RingBuffer is at its maximum length, this will result in the
+     * last element being replaced (this is done using the element's assignment operator).
+     *
+     * All current iterators are invalidated.
+     */
+    void add(T&& item);
+
+    /**
+     * Construct item in-place in the front of this RingBuffer using the given arguments.  If
+     * the RingBuffer is at its maximum length, this will result in the last element being
+     * replaced (this is done using the element's assignment operator).
+     *
+     * All current iterators are invalidated.
+     */
+    template <class... Args>
+    void emplace(Args&&... args);
+
+    /**
+     * Get an iterator to the front of this RingBuffer.
+     */
+    iterator begin();
+
+    /**
+     * Get an iterator to the end of this RingBuffer.
+     */
+    iterator end();
+
+    /**
+     * Get a const_iterator to the front of this RingBuffer.
+     */
+    const_iterator begin() const;
+
+    /**
+     * Get a const_iterator to the end of this RingBuffer.
+     */
+    const_iterator end() const;
+
+    /**
+     * Return a reference to the element at a given index.  If the index is out of range for
+     * this ringbuffer, [0, size), the behavior for this is undefined.
+     */
+    T& operator[](size_t index);
+
+    /**
+     * Return a const reference to the element at a given index.  If the index is out of range
+     * for this ringbuffer, [0, size), the behavior for this is undefined.
+     */
+    const T& operator[](size_t index) const;
+
+    /**
+     * Return the current size of this RingBuffer.
+     */
+    size_t size() const;
+
+    /**
+     * Remove all elements from this RingBuffer and set the size to 0.
+     */
+    void clear();
+
+private:
+    size_t mFrontIdx;
+    size_t mMaxBufferSize;
+    std::vector<T> mBuffer;
+}; // class RingBuffer
+
+
+template <class T>
+RingBuffer<T>::RingBuffer(size_t length) : mFrontIdx{0}, mMaxBufferSize{length} {}
+
+template <class T>
+RingBuffer<T>::iterator::iterator(T* ptr, size_t size, size_t pos, size_t ctr) :
+        mPtr{ptr}, mSize{size}, mPos{pos}, mCtr{ctr} {}
+
+template <class T>
+typename RingBuffer<T>::iterator& RingBuffer<T>::iterator::operator++() {
+    ++mCtr;
+
+    if (CC_UNLIKELY(mCtr == mSize)) {
+        mPos = mSize;
+        return *this;
+    }
+
+    mPos = ((CC_UNLIKELY(mPos == 0)) ? mSize - 1 : mPos - 1);
+    return *this;
+}
+
+template <class T>
+typename RingBuffer<T>::iterator RingBuffer<T>::iterator::operator++(int) {
+    iterator tmp{mPtr, mSize, mPos, mCtr};
+    ++(*this);
+    return tmp;
+}
+
+template <class T>
+bool RingBuffer<T>::iterator::operator==(const iterator& rhs) {
+    return (mPtr + mPos) == (rhs.mPtr + rhs.mPos);
+}
+
+template <class T>
+bool RingBuffer<T>::iterator::operator!=(const iterator& rhs) {
+    return (mPtr + mPos) != (rhs.mPtr + rhs.mPos);
+}
+
+template <class T>
+T& RingBuffer<T>::iterator::operator*() {
+    return *(mPtr + mPos);
+}
+
+template <class T>
+T* RingBuffer<T>::iterator::operator->() {
+    return mPtr + mPos;
+}
+
+template <class T>
+RingBuffer<T>::const_iterator::const_iterator(const T* ptr, size_t size, size_t pos, size_t ctr) :
+        mPtr{ptr}, mSize{size}, mPos{pos}, mCtr{ctr} {}
+
+template <class T>
+typename RingBuffer<T>::const_iterator& RingBuffer<T>::const_iterator::operator++() {
+    ++mCtr;
+
+    if (CC_UNLIKELY(mCtr == mSize)) {
+        mPos = mSize;
+        return *this;
+    }
+
+    mPos = ((CC_UNLIKELY(mPos == 0)) ? mSize - 1 : mPos - 1);
+    return *this;
+}
+
+template <class T>
+typename RingBuffer<T>::const_iterator RingBuffer<T>::const_iterator::operator++(int) {
+    const_iterator tmp{mPtr, mSize, mPos, mCtr};
+    ++(*this);
+    return tmp;
+}
+
+template <class T>
+bool RingBuffer<T>::const_iterator::operator==(const const_iterator& rhs) {
+    return (mPtr + mPos) == (rhs.mPtr + rhs.mPos);
+}
+
+template <class T>
+bool RingBuffer<T>::const_iterator::operator!=(const const_iterator& rhs) {
+    return (mPtr + mPos) != (rhs.mPtr + rhs.mPos);
+}
+
+template <class T>
+const T& RingBuffer<T>::const_iterator::operator*() {
+    return *(mPtr + mPos);
+}
+
+template <class T>
+const T* RingBuffer<T>::const_iterator::operator->() {
+    return mPtr + mPos;
+}
+
+template <class T>
+void RingBuffer<T>::add(const T& item) {
+    if (mBuffer.size() < mMaxBufferSize) {
+        mBuffer.push_back(item);
+        mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+        return;
+    }
+
+    mBuffer[mFrontIdx] = item;
+    mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+}
+
+template <class T>
+void RingBuffer<T>::add(T&& item) {
+    if (mBuffer.size() != mMaxBufferSize) {
+        mBuffer.push_back(std::forward<T>(item));
+        mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+        return;
+    }
+
+    // Only works for types with move assignment operator
+    mBuffer[mFrontIdx] = std::forward<T>(item);
+    mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+}
+
+template <class T>
+template <class... Args>
+void RingBuffer<T>::emplace(Args&&... args) {
+    if (mBuffer.size() != mMaxBufferSize) {
+        mBuffer.emplace_back(std::forward<Args>(args)...);
+        mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+        return;
+    }
+
+    // Only works for types with move assignment operator
+    mBuffer[mFrontIdx] = T(std::forward<Args>(args)...);
+    mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize);
+}
+
+template <class T>
+typename RingBuffer<T>::iterator RingBuffer<T>::begin() {
+    size_t tmp = (mBuffer.size() == 0) ? 0 : mBuffer.size() - 1;
+    return iterator(mBuffer.data(), mBuffer.size(), (mFrontIdx == 0) ? tmp : mFrontIdx - 1, 0);
+}
+
+template <class T>
+typename RingBuffer<T>::iterator RingBuffer<T>::end() {
+    size_t s = mBuffer.size();
+    return iterator(mBuffer.data(), s, s, s);
+}
+
+template <class T>
+typename RingBuffer<T>::const_iterator RingBuffer<T>::begin() const {
+    size_t tmp = (mBuffer.size() == 0) ? 0 : mBuffer.size() - 1;
+    return const_iterator(mBuffer.data(), mBuffer.size(),
+            (mFrontIdx == 0) ? tmp : mFrontIdx - 1, 0);
+}
+
+template <class T>
+typename RingBuffer<T>::const_iterator RingBuffer<T>::end() const {
+    size_t s = mBuffer.size();
+    return const_iterator(mBuffer.data(), s, s, s);
+}
+
+template <class T>
+T& RingBuffer<T>::operator[](size_t index) {
+    LOG_ALWAYS_FATAL_IF(index >= mBuffer.size(), "Index %zu out of bounds, size is %zu.",
+            index, mBuffer.size());
+    size_t pos = (index >= mFrontIdx) ?
+            mBuffer.size() - 1 - (index - mFrontIdx) : mFrontIdx - 1 - index;
+    return mBuffer[pos];
+}
+
+template <class T>
+const T& RingBuffer<T>::operator[](size_t index) const {
+    LOG_ALWAYS_FATAL_IF(index >= mBuffer.size(), "Index %zu out of bounds, size is %zu.",
+            index, mBuffer.size());
+    size_t pos = (index >= mFrontIdx) ?
+            mBuffer.size() - 1 - (index - mFrontIdx) : mFrontIdx - 1 - index;
+    return mBuffer[pos];
+}
+
+template <class T>
+size_t RingBuffer<T>::size() const {
+    return mBuffer.size();
+}
+
+template <class T>
+void RingBuffer<T>::clear() {
+    mBuffer.clear();
+    mFrontIdx = 0;
+}
+
+}; // namespace android
+
+#endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H
+
+
diff --git a/include/media/SingleStateQueue.h b/include/media/SingleStateQueue.h
index 04c5fd0..d423962 100644
--- a/include/media/SingleStateQueue.h
+++ b/include/media/SingleStateQueue.h
@@ -21,6 +21,7 @@
 // Non-blocking single-reader / single-writer multi-word atomic load / store
 
 #include <stdint.h>
+#include <cutils/atomic.h>
 
 namespace android {
 
@@ -31,6 +32,12 @@
     class Mutator;
     class Observer;
 
+    enum SSQ_STATUS {
+        SSQ_PENDING, /* = 0 */
+        SSQ_READ,
+        SSQ_DONE,
+    };
+
     struct Shared {
         // needs to be part of a union so don't define constructor or destructor
 
@@ -41,28 +48,56 @@
         void                init() { mAck = 0; mSequence = 0; }
 
         volatile int32_t    mAck;
-#if 0
-        int                 mPad[7];
-        // cache line boundary
-#endif
         volatile int32_t    mSequence;
         T                   mValue;
     };
 
     class Mutator {
     public:
-        Mutator(Shared *shared);
-        /*virtual*/ ~Mutator() { }
+        Mutator(Shared *shared)
+            : mSequence(0), mShared(shared)
+        {
+            // exactly one of Mutator and Observer must initialize, currently it is Observer
+            // shared->init();
+        }
 
         // push new value onto state queue, overwriting previous value;
         // returns a sequence number which can be used with ack()
-        int32_t push(const T& value);
+        int32_t push(const T& value)
+        {
+            Shared *shared = mShared;
+            int32_t sequence = mSequence;
+            sequence++;
+            android_atomic_acquire_store(sequence, &shared->mSequence);
+            shared->mValue = value;
+            sequence++;
+            android_atomic_release_store(sequence, &shared->mSequence);
+            mSequence = sequence;
+            // consider signalling a futex here, if we know that observer is waiting
+            return sequence;
+        }
 
-        // return true if most recent push has been observed
-        bool ack();
+        // returns the status of the last state push.  This may be a stale value.
+        //
+        // SSQ_PENDING, or 0, means it has not been observed
+        // SSQ_READ means it has been read
+        // SSQ_DONE means it has been acted upon, after Observer::done() is called
+        enum SSQ_STATUS ack() const
+        {
+            // in the case of SSQ_DONE, prevent any subtle data-races of subsequent reads
+            // being performed (out-of-order) before the ack read, should the caller be
+            // depending on sequentiality of reads.
+            const int32_t ack = android_atomic_acquire_load(&mShared->mAck);
+            return ack - mSequence & ~1 ? SSQ_PENDING /* seq differ */ :
+                    ack & 1 ? SSQ_DONE : SSQ_READ;
+        }
 
         // return true if a push with specified sequence number or later has been observed
-        bool ack(int32_t sequence);
+        bool ack(int32_t sequence) const
+        {
+            // this relies on 2's complement rollover to detect an ancient sequence number
+            return mShared->mAck - sequence >= 0;
+        }
 
     private:
         int32_t     mSequence;
@@ -71,11 +106,54 @@
 
     class Observer {
     public:
-        Observer(Shared *shared);
-        /*virtual*/ ~Observer() { }
+        Observer(Shared *shared)
+            : mSequence(0), mSeed(1), mShared(shared)
+        {
+            // exactly one of Mutator and Observer must initialize, currently it is Observer
+            shared->init();
+        }
 
         // return true if value has changed
-        bool poll(T& value);
+        bool poll(T& value)
+        {
+            Shared *shared = mShared;
+            int32_t before = shared->mSequence;
+            if (before == mSequence) {
+                return false;
+            }
+            for (int tries = 0; ; ) {
+                const int MAX_TRIES = 5;
+                if (before & 1) {
+                    if (++tries >= MAX_TRIES) {
+                        return false;
+                    }
+                    before = shared->mSequence;
+                } else {
+                    android_memory_barrier();
+                    T temp = shared->mValue;
+                    int32_t after = android_atomic_release_load(&shared->mSequence);
+                    if (after == before) {
+                        value = temp;
+                        shared->mAck = before;
+                        mSequence = before; // mSequence is even after poll success
+                        return true;
+                    }
+                    if (++tries >= MAX_TRIES) {
+                        return false;
+                    }
+                    before = after;
+                }
+            }
+        }
+
+        // (optional) used to indicate to the Mutator that the state that has been polled
+        // has also been acted upon.
+        void done()
+        {
+            const int32_t ack = mShared->mAck + 1;
+            // ensure all previous writes have been performed.
+            android_atomic_release_store(ack, &mShared->mAck); // mSequence is odd after "done"
+        }
 
     private:
         int32_t     mSequence;
diff --git a/include/media/StringArray.h b/include/media/StringArray.h
index ae47085..48d98bf 100644
--- a/include/media/StringArray.h
+++ b/include/media/StringArray.h
@@ -16,7 +16,7 @@
 
 //
 // Sortable array of strings.  STL-ish, but STL-free.
-//  
+//
 #ifndef _LIBS_MEDIA_STRING_ARRAY_H
 #define _LIBS_MEDIA_STRING_ARRAY_H
 
diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h
index 6167dd6..186e018 100644
--- a/include/media/Visualizer.h
+++ b/include/media/Visualizer.h
@@ -65,7 +65,8 @@
     /* Constructor.
      * See AudioEffect constructor for details on parameters.
      */
-                        Visualizer(int32_t priority = 0,
+                        Visualizer(const String16& opPackageName,
+                                   int32_t priority = 0,
                                    effect_callback_t cbf = NULL,
                                    void* user = NULL,
                                    int sessionId = 0);
@@ -94,7 +95,8 @@
 
     // install a callback to receive periodic captures. The capture rate is specified in milliHertz
     // and the capture format is according to flags  (see callback_flags).
-    status_t setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate);
+    status_t setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate,
+                                bool force = false);
 
     // set the capture size capture size must be a power of two in the range
     // [VISUALIZER_CAPTURE_SIZE_MAX. VISUALIZER_CAPTURE_SIZE_MIN]
diff --git a/include/media/mediametadataretriever.h b/include/media/mediametadataretriever.h
index b35cf32..f655f35 100644
--- a/include/media/mediametadataretriever.h
+++ b/include/media/mediametadataretriever.h
@@ -25,6 +25,7 @@
 
 namespace android {
 
+class IDataSource;
 struct IMediaHTTPService;
 class IMediaPlayerService;
 class IMediaMetadataRetriever;
@@ -57,6 +58,7 @@
     METADATA_KEY_IS_DRM          = 22,
     METADATA_KEY_LOCATION        = 23,
     METADATA_KEY_VIDEO_ROTATION  = 24,
+    METADATA_KEY_CAPTURE_FRAMERATE = 25,
 
     // Add more here...
 };
@@ -74,6 +76,7 @@
             const KeyedVector<String8, String8> *headers = NULL);
 
     status_t setDataSource(int fd, int64_t offset, int64_t length);
+    status_t setDataSource(const sp<IDataSource>& dataSource);
     sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
     sp<IMemory> extractAlbumArt();
     const char* extractMetadata(int keyCode);
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 5830933..3fe749c 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -20,6 +20,8 @@
 #include <arpa/inet.h>
 
 #include <binder/IMemory.h>
+
+#include <media/AudioResamplerPublic.h>
 #include <media/IMediaPlayerClient.h>
 #include <media/IMediaPlayer.h>
 #include <media/IMediaDeathNotifier.h>
@@ -32,8 +34,9 @@
 
 namespace android {
 
-class Surface;
+struct AVSyncSettings;
 class IGraphicBufferProducer;
+class Surface;
 
 enum media_event_type {
     MEDIA_NOP               = 0, // interface test message
@@ -50,6 +53,7 @@
     MEDIA_ERROR             = 100,
     MEDIA_INFO              = 200,
     MEDIA_SUBTITLE_DATA     = 201,
+    MEDIA_META_DATA         = 202,
 };
 
 // Generic error codes for the media player framework.  Errors are fatal, the
@@ -183,6 +187,7 @@
     MEDIA_TRACK_TYPE_AUDIO = 2,
     MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
     MEDIA_TRACK_TYPE_SUBTITLE = 4,
+    MEDIA_TRACK_TYPE_METADATA = 5,
 };
 
 // ----------------------------------------------------------------------------
@@ -211,6 +216,7 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IStreamSource> &source);
+            status_t        setDataSource(const sp<IDataSource> &source);
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
@@ -220,6 +226,12 @@
             status_t        stop();
             status_t        pause();
             bool            isPlaying();
+            status_t        setPlaybackSettings(const AudioPlaybackRate& rate);
+            status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+            status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint);
+            status_t        getSyncSettings(
+                                    AVSyncSettings* sync /* nonnull */,
+                                    float* videoFps /* nonnull */);
             status_t        getVideoWidth(int *w);
             status_t        getVideoHeight(int *h);
             status_t        seekTo(int msec);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index b0a62a7..15ff82d 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -32,6 +32,7 @@
 class ICamera;
 class ICameraRecordingProxy;
 class IGraphicBufferProducer;
+struct PersistentSurface;
 class Surface;
 
 typedef void (*media_completion_f)(status_t status, void *cookie);
@@ -209,7 +210,7 @@
                       public virtual IMediaDeathNotifier
 {
 public:
-    MediaRecorder();
+    MediaRecorder(const String16& opPackageName);
     ~MediaRecorder();
 
     void        died();
@@ -221,7 +222,6 @@
     status_t    setOutputFormat(int of);
     status_t    setVideoEncoder(int ve);
     status_t    setAudioEncoder(int ae);
-    status_t    setOutputFile(const char* path);
     status_t    setOutputFile(int fd, int64_t offset, int64_t length);
     status_t    setVideoSize(int width, int height);
     status_t    setVideoFrameRate(int frames_per_second);
@@ -237,6 +237,7 @@
     status_t    close();
     status_t    release();
     void        notify(int msg, int ext1, int ext2);
+    status_t    setInputSurface(const sp<PersistentSurface>& surface);
     sp<IGraphicBufferProducer>     querySurfaceMediaSourceFromMediaServer();
 
 private:
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
index d422576..d9bbc8d 100644
--- a/include/media/nbaio/NBAIO.h
+++ b/include/media/nbaio/NBAIO.h
@@ -231,7 +231,8 @@
     virtual status_t getTimestamp(AudioTimestamp& timestamp) { return INVALID_OPERATION; }
 
 protected:
-    NBAIO_Sink(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { }
+    NBAIO_Sink(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0)
+            { }
     virtual ~NBAIO_Sink() { }
 
     // Implementations are free to ignore these if they don't need them
@@ -322,7 +323,8 @@
     virtual void    onTimestamp(const AudioTimestamp& timestamp) { }
 
 protected:
-    NBAIO_Source(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { }
+    NBAIO_Source(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0)
+            { }
     virtual ~NBAIO_Source() { }
 
     // Implementations are free to ignore these if they don't need them
diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h
index bcbbc04..1297b51 100644
--- a/include/media/nbaio/NBLog.h
+++ b/include/media/nbaio/NBLog.h
@@ -21,7 +21,7 @@
 
 #include <binder/IMemory.h>
 #include <utils/Mutex.h>
-#include <media/nbaio/roundup.h>
+#include <audio_utils/roundup.h>
 
 namespace android {
 
diff --git a/include/media/stagefright/AACWriter.h b/include/media/stagefright/AACWriter.h
index d22707a..aa60a19 100644
--- a/include/media/stagefright/AACWriter.h
+++ b/include/media/stagefright/AACWriter.h
@@ -24,10 +24,9 @@
 namespace android {
 
 struct MediaSource;
-struct MetaData;
+class MetaData;
 
 struct AACWriter : public MediaWriter {
-    AACWriter(const char *filename);
     AACWriter(int fd);
 
     status_t initCheck() const;
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 7825508..8b5b862 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -20,9 +20,11 @@
 
 #include <stdint.h>
 #include <android/native_window.h>
+#include <media/hardware/MetadataBufferType.h>
 #include <media/IOMX.h>
 #include <media/stagefright/foundation/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <OMX_Audio.h>
 
@@ -44,9 +46,12 @@
     virtual void initiateAllocateComponent(const sp<AMessage> &msg);
     virtual void initiateConfigureComponent(const sp<AMessage> &msg);
     virtual void initiateCreateInputSurface();
+    virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
     virtual void initiateStart();
     virtual void initiateShutdown(bool keepComponentAllocated = false);
 
+    virtual status_t setSurface(const sp<Surface> &surface);
+
     virtual void signalFlush();
     virtual void signalResume();
 
@@ -105,6 +110,10 @@
     enum {
         kWhatSetup                   = 'setu',
         kWhatOMXMessage              = 'omx ',
+        // same as kWhatOMXMessage - but only used with
+        // handleMessage during OMX message-list handling
+        kWhatOMXMessageItem          = 'omxI',
+        kWhatOMXMessageList          = 'omxL',
         kWhatInputBufferFilled       = 'inpF',
         kWhatOutputBufferDrained     = 'outD',
         kWhatShutdown                = 'shut',
@@ -113,12 +122,14 @@
         kWhatDrainDeferredMessages   = 'drai',
         kWhatAllocateComponent       = 'allo',
         kWhatConfigureComponent      = 'conf',
+        kWhatSetSurface              = 'setS',
         kWhatCreateInputSurface      = 'cisf',
+        kWhatSetInputSurface         = 'sisf',
         kWhatSignalEndOfInputStream  = 'eois',
         kWhatStart                   = 'star',
         kWhatRequestIDRFrame         = 'ridr',
         kWhatSetParameters           = 'setP',
-        kWhatSubmitOutputMetaDataBufferIfEOS = 'subm',
+        kWhatSubmitOutputMetadataBufferIfEOS = 'subm',
         kWhatOMXDied                 = 'OMXd',
         kWhatReleaseCodecInstance    = 'relC',
     };
@@ -134,6 +145,12 @@
         kFlagIsGrallocUsageProtected                  = 4,
     };
 
+    enum {
+        kVideoGrallocUsage = (GRALLOC_USAGE_HW_TEXTURE
+                            | GRALLOC_USAGE_HW_COMPOSER
+                            | GRALLOC_USAGE_EXTERNAL_DISP),
+    };
+
     struct BufferInfo {
         enum Status {
             OWNED_BY_US,
@@ -141,16 +158,39 @@
             OWNED_BY_UPSTREAM,
             OWNED_BY_DOWNSTREAM,
             OWNED_BY_NATIVE_WINDOW,
+            UNRECOGNIZED,            // not a tracked buffer
         };
 
+        static inline Status getSafeStatus(BufferInfo *info) {
+            return info == NULL ? UNRECOGNIZED : info->mStatus;
+        }
+
         IOMX::buffer_id mBufferID;
         Status mStatus;
         unsigned mDequeuedAt;
 
         sp<ABuffer> mData;
         sp<GraphicBuffer> mGraphicBuffer;
+        int mFenceFd;
+        FrameRenderTracker::Info *mRenderInfo;
+
+        // The following field and 4 methods are used for debugging only
+        bool mIsReadFence;
+        // Store |fenceFd| and set read/write flag. Log error, if there is already a fence stored.
+        void setReadFence(int fenceFd, const char *dbg);
+        void setWriteFence(int fenceFd, const char *dbg);
+        // Log error, if the current fence is not a read/write fence.
+        void checkReadFence(const char *dbg);
+        void checkWriteFence(const char *dbg);
     };
 
+    static const char *_asString(BufferInfo::Status s);
+    void dumpBuffers(OMX_U32 portIndex);
+
+    // If |fd| is non-negative, waits for fence with |fd| and logs an error if it fails. Returns
+    // the error code or OK on success. If |fd| is negative, it returns OK
+    status_t waitForFence(int fd, const char *dbg);
+
 #if TRACK_BUFFER_TIMING
     struct BufferStats {
         int64_t mEmptyBufferTimeUs;
@@ -181,10 +221,12 @@
     sp<MemoryDealer> mDealer[2];
 
     sp<ANativeWindow> mNativeWindow;
+    int mNativeWindowUsageBits;
     sp<AMessage> mInputFormat;
     sp<AMessage> mOutputFormat;
     sp<AMessage> mBaseOutputFormat;
 
+    FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
     Vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
@@ -192,8 +234,8 @@
     List<sp<AMessage> > mDeferredQueue;
 
     bool mSentFormat;
+    bool mIsVideo;
     bool mIsEncoder;
-    bool mUseMetadataOnEncoderOutput;
     bool mFatalError;
     bool mShutdownInProgress;
     bool mExplicitShutdown;
@@ -209,12 +251,15 @@
     bool mChannelMaskPresent;
     int32_t mChannelMask;
     unsigned mDequeueCounter;
-    bool mStoreMetaDataInOutputBuffers;
-    int32_t mMetaDataBuffersToSubmit;
+    MetadataBufferType mInputMetadataType;
+    MetadataBufferType mOutputMetadataType;
+    bool mLegacyAdaptiveExperiment;
+    int32_t mMetadataBuffersToSubmit;
     size_t mNumUndequeuedBuffers;
 
     int64_t mRepeatFrameDelayUs;
     int64_t mMaxPtsGapUs;
+    float mMaxFps;
 
     int64_t mTimePerFrameUs;
     int64_t mTimePerCaptureUs;
@@ -228,17 +273,29 @@
     status_t freeBuffersOnPort(OMX_U32 portIndex);
     status_t freeBuffer(OMX_U32 portIndex, size_t i);
 
+    status_t handleSetSurface(const sp<Surface> &surface);
+    status_t setupNativeWindowSizeFormatAndUsage(
+            ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */);
+
     status_t configureOutputBuffersFromNativeWindow(
             OMX_U32 *nBufferCount, OMX_U32 *nBufferSize,
             OMX_U32 *nMinUndequeuedBuffers);
-    status_t allocateOutputMetaDataBuffers();
-    status_t submitOutputMetaDataBuffer();
-    void signalSubmitOutputMetaDataBufferIfEOS_workaround();
+    status_t allocateOutputMetadataBuffers();
+    status_t submitOutputMetadataBuffer();
+    void signalSubmitOutputMetadataBufferIfEOS_workaround();
     status_t allocateOutputBuffersFromNativeWindow();
     status_t cancelBufferToNativeWindow(BufferInfo *info);
     status_t freeOutputBuffersNotOwnedByComponent();
     BufferInfo *dequeueBufferFromNativeWindow();
 
+    inline bool storingMetadataInDecodedBuffers() {
+        return mOutputMetadataType >= 0 && !mIsEncoder;
+    }
+
+    inline bool usingMetadataOnEncoderOutput() {
+        return mOutputMetadataType >= 0 && mIsEncoder;
+    }
+
     BufferInfo *findBufferByID(
             uint32_t portIndex, IOMX::buffer_id bufferID,
             ssize_t *index = NULL);
@@ -299,6 +356,9 @@
     status_t setupRawAudioFormat(
             OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels);
 
+    status_t setPriority(int32_t priority);
+    status_t setOperatingRate(float rateFloat, bool isVideo);
+
     status_t setMinBufferSize(OMX_U32 portIndex, size_t size);
 
     status_t setupMPEG4EncoderParameters(const sp<AMessage> &msg);
@@ -316,8 +376,6 @@
 
     status_t initNativeWindow();
 
-    status_t pushBlankBuffersToNativeWindow();
-
     // Returns true iff all buffers on the given port have status
     // OWNED_BY_US or OWNED_BY_NATIVE_WINDOW.
     bool allYourBuffersAreBelongToUs(OMX_U32 portIndex);
@@ -332,6 +390,23 @@
     void deferMessage(const sp<AMessage> &msg);
     void processDeferredMessages();
 
+    void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+    // called when we have dequeued a buffer |buf| from the native window to track render info.
+    // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
+    // stored.
+    void updateRenderInfoForDequeuedBuffer(
+            ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
+
+    // Checks to see if any frames have rendered up until |until|, and to notify client
+    // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
+    // unrendered frame. These frames are removed from the render queue.
+    // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
+    // queue, allowing all rendered framed up till then to be notified of.
+    // (This will effectively clear the render queue up-until (and including) |until|.)
+    // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
+    void notifyOfRenderedFrames(
+            bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
+
     void sendFormatChange(const sp<AMessage> &reply);
     status_t getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify);
 
diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h
index 392f968..b38be55 100644
--- a/include/media/stagefright/AMRWriter.h
+++ b/include/media/stagefright/AMRWriter.h
@@ -26,10 +26,9 @@
 namespace android {
 
 struct MediaSource;
-struct MetaData;
+class MetaData;
 
 struct AMRWriter : public MediaWriter {
-    AMRWriter(const char *filename);
     AMRWriter(int fd);
 
     status_t initCheck() const;
diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h
index 14afb85..e0cd965 100644
--- a/include/media/stagefright/AudioPlayer.h
+++ b/include/media/stagefright/AudioPlayer.h
@@ -25,9 +25,10 @@
 
 namespace android {
 
-class MediaSource;
+struct AudioPlaybackRate;
 class AudioTrack;
-class AwesomePlayer;
+struct AwesomePlayer;
+class MediaSource;
 
 class AudioPlayer : public TimeSource {
 public:
@@ -73,7 +74,8 @@
     bool isSeeking();
     bool reachedEOS(status_t *finalStatus);
 
-    status_t setPlaybackRatePermille(int32_t ratePermille);
+    status_t setPlaybackRate(const AudioPlaybackRate &rate);
+    status_t getPlaybackRate(AudioPlaybackRate *rate /* nonnull */);
 
     void notifyAudioEOS();
 
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index 4c9aaad..3074910 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -35,8 +35,10 @@
     // _not_ a bitmask of audio_channels_t constants.
     AudioSource(
             audio_source_t inputSource,
+            const String16 &opPackageName,
             uint32_t sampleRate,
-            uint32_t channels = 1);
+            uint32_t channels,
+            uint32_t outSampleRate = 0);
 
     status_t initCheck() const;
 
@@ -77,11 +79,13 @@
     status_t mInitCheck;
     bool mStarted;
     int32_t mSampleRate;
+    int32_t mOutSampleRate;
 
     bool mTrackMaxAmplitude;
     int64_t mStartTimeUs;
     int16_t mMaxAmplitude;
     int64_t mPrevSampleTimeUs;
+    int64_t mFirstSampleTimeUs;
     int64_t mInitialReadTimeUs;
     int64_t mNumFramesReceived;
     int64_t mNumClientOwnedBuffers;
diff --git a/include/media/stagefright/BufferProducerWrapper.h b/include/media/stagefright/BufferProducerWrapper.h
index d8acf30..4caa2c6 100644
--- a/include/media/stagefright/BufferProducerWrapper.h
+++ b/include/media/stagefright/BufferProducerWrapper.h
@@ -19,6 +19,7 @@
 #define BUFFER_PRODUCER_WRAPPER_H_
 
 #include <gui/IGraphicBufferProducer.h>
+#include <media/stagefright/foundation/ABase.h>
 
 namespace android {
 
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index dd0a106..069e897 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -83,7 +83,7 @@
                                           Size videoSize,
                                           int32_t frameRate,
                                           const sp<IGraphicBufferProducer>& surface,
-                                          bool storeMetaDataInVideoBuffers = false);
+                                          bool storeMetaDataInVideoBuffers = true);
 
     virtual ~CameraSource();
 
@@ -149,6 +149,8 @@
     int32_t  mNumInputBuffers;
     int32_t  mVideoFrameRate;
     int32_t  mColorFormat;
+    int32_t  mEncoderFormat;
+    int32_t  mEncoderDataSpace;
     status_t mInitCheck;
 
     sp<Camera>   mCamera;
@@ -188,7 +190,7 @@
     void releaseCamera();
 
 private:
-    friend class CameraSourceListener;
+    friend struct CameraSourceListener;
 
     Mutex mLock;
     Condition mFrameAvailableCondition;
diff --git a/include/media/stagefright/CodecBase.h b/include/media/stagefright/CodecBase.h
index 1bf27a6..bb36052 100644
--- a/include/media/stagefright/CodecBase.h
+++ b/include/media/stagefright/CodecBase.h
@@ -26,6 +26,7 @@
 namespace android {
 
 struct ABuffer;
+struct PersistentSurface;
 
 struct CodecBase : public AHandler {
     enum {
@@ -39,8 +40,10 @@
         kWhatComponentAllocated  = 'cAll',
         kWhatComponentConfigured = 'cCon',
         kWhatInputSurfaceCreated = 'isfc',
+        kWhatInputSurfaceAccepted = 'isfa',
         kWhatSignaledInputEOS    = 'seos',
         kWhatBuffersAllocated    = 'allc',
+        kWhatOutputFramesRendered = 'outR',
     };
 
     virtual void setNotificationMessage(const sp<AMessage> &msg) = 0;
@@ -48,12 +51,16 @@
     virtual void initiateAllocateComponent(const sp<AMessage> &msg) = 0;
     virtual void initiateConfigureComponent(const sp<AMessage> &msg) = 0;
     virtual void initiateCreateInputSurface() = 0;
+    virtual void initiateSetInputSurface(
+            const sp<PersistentSurface> &surface) = 0;
     virtual void initiateStart() = 0;
     virtual void initiateShutdown(bool keepComponentAllocated = false) = 0;
 
     // require an explicit message handler
     virtual void onMessageReceived(const sp<AMessage> &msg) = 0;
 
+    virtual status_t setSurface(const sp<Surface> &surface) { return INVALID_OPERATION; }
+
     virtual void signalFlush() = 0;
     virtual void signalResume() = 0;
 
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index 3630263..dcde36f 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -32,6 +32,7 @@
 
 struct AMessage;
 struct AString;
+class  IDataSource;
 struct IMediaHTTPService;
 class String8;
 struct HTTPBase;
@@ -53,11 +54,15 @@
             HTTPBase *httpSource = NULL);
 
     static sp<DataSource> CreateMediaHTTP(const sp<IMediaHTTPService> &httpService);
+    static sp<DataSource> CreateFromIDataSource(const sp<IDataSource> &source);
 
     DataSource() {}
 
     virtual status_t initCheck() const = 0;
 
+    // Returns the number of bytes read, or -1 on failure. It's not an error if
+    // this returns zero; it just means the given offset is equal to, or
+    // beyond, the end of the source.
     virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
 
     // Convenience methods:
diff --git a/include/media/stagefright/FrameRenderTracker.h b/include/media/stagefright/FrameRenderTracker.h
new file mode 100644
index 0000000..9333e8f
--- /dev/null
+++ b/include/media/stagefright/FrameRenderTracker.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_RENDER_TRACKER_H_
+
+#define FRAME_RENDER_TRACKER_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+#include <system/window.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <list>
+
+namespace android {
+
+class Fence;
+class GraphicBuffer;
+
+struct FrameRenderTracker : public RefBase {
+    // Tracks the render information about a frame. Frames go through several states while
+    // the render information is tracked:
+    //
+    // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+    // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+    // Key characteristics: mFence is not NULL and mIndex is negative.
+    //
+    // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+    // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+    // invalid.
+    //
+    // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+    // Key characteristics: mFence is NULL.
+    //
+    struct Info {
+        // set by client during onFrameQueued or onFrameRendered
+        int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+
+        // -1 if frame is not yet rendered
+        nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+        // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+        ssize_t getIndex() const        { return mIndex; }
+
+        // creates information for a queued frame
+        Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence)
+            : mMediaTimeUs(mediaTimeUs),
+              mRenderTimeNs(-1),
+              mIndex(-1),
+              mGraphicBuffer(graphicBuffer),
+              mFence(fence) {
+        }
+
+        // creates information for a frame rendered on a tunneled surface
+        Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+            : mMediaTimeUs(mediaTimeUs),
+              mRenderTimeNs(renderTimeNs),
+              mIndex(-1),
+              mGraphicBuffer(NULL),
+              mFence(NULL) {
+        }
+
+    private:
+        int64_t mMediaTimeUs;
+        nsecs_t mRenderTimeNs;
+        ssize_t mIndex;         // to be used by client
+        sp<GraphicBuffer> mGraphicBuffer;
+        sp<Fence> mFence;
+
+        friend class FrameRenderTracker;
+    };
+
+    FrameRenderTracker();
+
+    void setComponentName(const AString &componentName);
+
+    // clears all tracked frames, and resets last render time
+    void clear(nsecs_t lastRenderTimeNs);
+
+    // called when |graphicBuffer| corresponding to |mediaTimeUs| is
+    // queued to the output surface using |fence|.
+    void onFrameQueued(
+            int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence);
+
+    // Called when we have dequeued a buffer |buf| from the native window to track render info.
+    // |fenceFd| is the dequeue fence, and |index| is a positive buffer ID to be usable by the
+    // client to track this render info among the dequeued buffers.
+    // Returns pointer to the tracked info, or NULL if buffer is not tracked or if |index|
+    // is negative.
+    Info *updateInfoForDequeuedBuffer(ANativeWindowBuffer *buf, int fenceFd, int index);
+
+    // called when tunneled codec signals frame rendered event
+    // returns BAD_VALUE if systemNano is not monotonic. Otherwise, returns OK.
+    status_t onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+    // Checks to see if any frames have rendered up until |until|. If |until| is NULL or not a
+    // tracked info, this method searches the entire render queue.
+    // Returns list of rendered frames up-until the frame pointed to by |until| or the first
+    // unrendered frame, as well as any dropped frames (those with invalid fence) up-until |until|.
+    // These frames are removed from the render queue.
+    // If |dropIncomplete| is true, unrendered frames up-until |until| will also be dropped from the
+    // queue, allowing all rendered framed up till then to be notified of.
+    // (This will effectively clear the render queue up-until (and including) |until|.)
+    std::list<Info> checkFencesAndGetRenderedFrames(const Info *until, bool dropIncomplete);
+
+    // Stop tracking a queued frame (e.g. if the frame has been discarded). If |info| is NULL or is
+    // not tracked, this method is a no-op. If |index| is specified, all indices larger that |index|
+    // are decremented. This is useful if the untracked frame is deleted from the frame vector.
+    void untrackFrame(const Info *info, ssize_t index = SSIZE_MAX);
+
+    void dumpRenderQueue() const;
+
+    virtual ~FrameRenderTracker();
+
+private:
+
+    // Render information for buffers. Regular surface buffers are queued in the order of
+    // rendering. Tunneled buffers are queued in the order of receipt.
+    std::list<Info> mRenderQueue;
+    nsecs_t mLastRenderTimeNs;
+    AString mComponentName;
+
+    DISALLOW_EVIL_CONSTRUCTORS(FrameRenderTracker);
+};
+
+}  // namespace android
+
+#endif  // FRAME_RENDER_TRACKER_H_
diff --git a/include/media/stagefright/MPEG2TSWriter.h b/include/media/stagefright/MPEG2TSWriter.h
index 2e2922e..3d7960b 100644
--- a/include/media/stagefright/MPEG2TSWriter.h
+++ b/include/media/stagefright/MPEG2TSWriter.h
@@ -29,7 +29,6 @@
 
 struct MPEG2TSWriter : public MediaWriter {
     MPEG2TSWriter(int fd);
-    MPEG2TSWriter(const char *filename);
 
     MPEG2TSWriter(
             void *cookie,
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 26ce5f9..a195fe8 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -26,13 +26,13 @@
 
 namespace android {
 
+class AMessage;
 class MediaBuffer;
 class MediaSource;
 class MetaData;
 
 class MPEG4Writer : public MediaWriter {
 public:
-    MPEG4Writer(const char *filename);
     MPEG4Writer(int fd);
 
     // Limitations
@@ -49,6 +49,7 @@
     virtual status_t dump(int fd, const Vector<String16>& args);
 
     void beginBox(const char *fourcc);
+    void beginBox(uint32_t id);
     void writeInt8(int8_t x);
     void writeInt16(int16_t x);
     void writeInt32(int32_t x);
@@ -63,6 +64,7 @@
     int32_t getTimeScale() const { return mTimeScale; }
 
     status_t setGeoData(int latitudex10000, int longitudex10000);
+    status_t setCaptureRate(float captureFps);
     virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
     virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
 
@@ -89,6 +91,7 @@
     off64_t mFreeBoxOffset;
     bool mStreamableFile;
     off64_t mEstimatedMoovBoxSize;
+    off64_t mMoovExtraSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
     int64_t mStartTimestampUs;
@@ -103,6 +106,8 @@
 
     List<off64_t> mBoxes;
 
+    sp<AMessage> mMetaKeys;
+
     void setStartTimestampUs(int64_t timeUs);
     int64_t getStartTimestampUs();  // Not const
     status_t startTracks(MetaData *params);
@@ -196,6 +201,12 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
+
+    void addDeviceMeta();
+    void writeHdlr();
+    void writeKeys();
+    void writeIlst();
+    void writeMetaBox();
     void sendSessionSummary();
     void release();
     status_t reset();
diff --git a/include/media/stagefright/MediaClock.h b/include/media/stagefright/MediaClock.h
new file mode 100644
index 0000000..dd1a809
--- /dev/null
+++ b/include/media/stagefright/MediaClock.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CLOCK_H_
+
+#define MEDIA_CLOCK_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Mutex.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct AMessage;
+
+struct MediaClock : public RefBase {
+    MediaClock();
+
+    void setStartingTimeMedia(int64_t startingTimeMediaUs);
+
+    void clearAnchor();
+    // It's required to use timestamp of just rendered frame as
+    // anchor time in paused state.
+    void updateAnchor(
+            int64_t anchorTimeMediaUs,
+            int64_t anchorTimeRealUs,
+            int64_t maxTimeMediaUs = INT64_MAX);
+
+    void updateMaxTimeMedia(int64_t maxTimeMediaUs);
+
+    void setPlaybackRate(float rate);
+    float getPlaybackRate() const;
+
+    // query media time corresponding to real time |realUs|, and save the
+    // result in |outMediaUs|.
+    status_t getMediaTime(
+            int64_t realUs,
+            int64_t *outMediaUs,
+            bool allowPastMaxTime = false) const;
+    // query real time corresponding to media time |targetMediaUs|.
+    // The result is saved in |outRealUs|.
+    status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const;
+
+protected:
+    virtual ~MediaClock();
+
+private:
+    status_t getMediaTime_l(
+            int64_t realUs,
+            int64_t *outMediaUs,
+            bool allowPastMaxTime) const;
+
+    mutable Mutex mLock;
+
+    int64_t mAnchorTimeMediaUs;
+    int64_t mAnchorTimeRealUs;
+    int64_t mMaxTimeMediaUs;
+    int64_t mStartingTimeMediaUs;
+
+    float mPlaybackRate;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaClock);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_CLOCK_H_
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index d448097..c10963d 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -20,17 +20,25 @@
 
 #include <gui/IGraphicBufferProducer.h>
 #include <media/hardware/CryptoAPI.h>
+#include <media/MediaResource.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
 
 namespace android {
 
 struct ABuffer;
 struct AMessage;
+struct AReplyToken;
 struct AString;
 struct CodecBase;
-struct ICrypto;
 struct IBatteryStats;
+struct ICrypto;
+class IMemory;
+struct MemoryDealer;
+class IResourceManagerClient;
+class IResourceManagerService;
+struct PersistentSurface;
 struct SoftwareRenderer;
 struct Surface;
 
@@ -50,15 +58,20 @@
         CB_OUTPUT_AVAILABLE = 2,
         CB_ERROR = 3,
         CB_OUTPUT_FORMAT_CHANGED = 4,
+        CB_RESOURCE_RECLAIMED = 5,
     };
 
-    struct BatteryNotifier;
+    static const pid_t kNoPid = -1;
 
     static sp<MediaCodec> CreateByType(
-            const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL);
+            const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL,
+            pid_t pid = kNoPid);
 
     static sp<MediaCodec> CreateByComponentName(
-            const sp<ALooper> &looper, const char *name, status_t *err = NULL);
+            const sp<ALooper> &looper, const char *name, status_t *err = NULL,
+            pid_t pid = kNoPid);
+
+    static sp<PersistentSurface> CreatePersistentInputSurface();
 
     status_t configure(
             const sp<AMessage> &format,
@@ -68,8 +81,12 @@
 
     status_t setCallback(const sp<AMessage> &callback);
 
+    status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
+    status_t setInputSurface(const sp<PersistentSurface> &surface);
+
     status_t start();
 
     // Returns to a state in which the component remains allocated but
@@ -125,6 +142,8 @@
     status_t getOutputFormat(sp<AMessage> *format) const;
     status_t getInputFormat(sp<AMessage> *format) const;
 
+    status_t getWidevineLegacyBuffers(Vector<sp<ABuffer> > *buffers) const;
+
     status_t getInputBuffers(Vector<sp<ABuffer> > *buffers) const;
     status_t getOutputBuffers(Vector<sp<ABuffer> > *buffers) const;
 
@@ -132,6 +151,8 @@
     status_t getOutputFormat(size_t index, sp<AMessage> *format);
     status_t getInputBuffer(size_t index, sp<ABuffer> *buffer);
 
+    status_t setSurface(const sp<Surface> &nativeWindow);
+
     status_t requestIDRFrame();
 
     // Notification will be posted once there "is something to do", i.e.
@@ -143,11 +164,22 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    // Create a MediaCodec notification message from a list of rendered or dropped render infos
+    // by adding rendered frame information to a base notification message. Returns the number
+    // of frames that were rendered.
+    static size_t CreateFramesRenderedMessage(
+            std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg);
+
 protected:
     virtual ~MediaCodec();
     virtual void onMessageReceived(const sp<AMessage> &msg);
 
 private:
+    // used by ResourceManagerClient
+    status_t reclaim();
+    friend struct ResourceManagerClient;
+
+private:
     enum State {
         UNINITIALIZED,
         INITIALIZING,
@@ -170,7 +202,9 @@
     enum {
         kWhatInit                           = 'init',
         kWhatConfigure                      = 'conf',
+        kWhatSetSurface                     = 'sSur',
         kWhatCreateInputSurface             = 'cisf',
+        kWhatSetInputSurface                = 'sisf',
         kWhatStart                          = 'strt',
         kWhatStop                           = 'stop',
         kWhatRelease                        = 'rele',
@@ -191,6 +225,7 @@
         kWhatGetName                        = 'getN',
         kWhatSetParameters                  = 'setP',
         kWhatSetCallback                    = 'setC',
+        kWhatSetNotification                = 'setN',
     };
 
     enum {
@@ -206,39 +241,78 @@
         kFlagGatherCodecSpecificData    = 512,
         kFlagIsAsync                    = 1024,
         kFlagIsComponentAllocated       = 2048,
+        kFlagPushBlankBuffersOnShutdown = 4096,
     };
 
     struct BufferInfo {
         uint32_t mBufferID;
         sp<ABuffer> mData;
         sp<ABuffer> mEncryptedData;
+        sp<IMemory> mSharedEncryptedBuffer;
         sp<AMessage> mNotify;
         sp<AMessage> mFormat;
         bool mOwnedByClient;
     };
 
+    struct ResourceManagerServiceProxy : public IBinder::DeathRecipient {
+        ResourceManagerServiceProxy(pid_t pid);
+        ~ResourceManagerServiceProxy();
+
+        void init();
+
+        // implements DeathRecipient
+        virtual void binderDied(const wp<IBinder>& /*who*/);
+
+        void addResource(
+                int64_t clientId,
+                const sp<IResourceManagerClient> client,
+                const Vector<MediaResource> &resources);
+
+        void removeResource(int64_t clientId);
+
+        bool reclaimResource(const Vector<MediaResource> &resources);
+
+    private:
+        Mutex mLock;
+        sp<IResourceManagerService> mService;
+        pid_t mPid;
+    };
+
     State mState;
+    bool mReleasedByResourceManager;
     sp<ALooper> mLooper;
     sp<ALooper> mCodecLooper;
     sp<CodecBase> mCodec;
     AString mComponentName;
-    uint32_t mReplyID;
+    sp<AReplyToken> mReplyID;
     uint32_t mFlags;
     status_t mStickyError;
-    sp<Surface> mNativeWindow;
+    sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
+
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
+    sp<AMessage> mOnFrameRenderedNotification;
+    sp<MemoryDealer> mDealer;
+
+    sp<IResourceManagerClient> mResourceManagerClient;
+    sp<ResourceManagerServiceProxy> mResourceManagerService;
 
     bool mBatteryStatNotified;
     bool mIsVideo;
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
+    int32_t mRotationDegrees;
 
     // initial create parameters
     AString mInitName;
     bool mInitNameIsType;
     bool mInitIsEncoder;
 
+    // configure parameter
+    sp<AMessage> mConfigureMsg;
+
     // Used only to synchronize asynchronous getBufferAndFormat
     // across all the other (synchronous) buffer state change
     // operations, such as de/queueIn/OutputBuffer, start and
@@ -249,10 +323,10 @@
     Vector<BufferInfo> mPortBuffers[2];
 
     int32_t mDequeueInputTimeoutGeneration;
-    uint32_t mDequeueInputReplyID;
+    sp<AReplyToken> mDequeueInputReplyID;
 
     int32_t mDequeueOutputTimeoutGeneration;
-    uint32_t mDequeueOutputReplyID;
+    sp<AReplyToken> mDequeueOutputReplyID;
 
     sp<ICrypto> mCrypto;
 
@@ -261,13 +335,14 @@
     sp<AMessage> mActivityNotify;
 
     bool mHaveInputSurface;
+    bool mHavePendingInputBuffers;
 
-    MediaCodec(const sp<ALooper> &looper);
+    MediaCodec(const sp<ALooper> &looper, pid_t pid);
 
     static status_t PostAndAwaitResponse(
             const sp<AMessage> &msg, sp<AMessage> *response);
 
-    static void PostReplyWithError(int32_t replyID, int32_t err);
+    void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
 
     status_t init(const AString &name, bool nameIsType, bool encoder);
 
@@ -283,15 +358,16 @@
             size_t portIndex, size_t index,
             sp<ABuffer> *buffer, sp<AMessage> *format);
 
-    bool handleDequeueInputBuffer(uint32_t replyID, bool newRequest = false);
-    bool handleDequeueOutputBuffer(uint32_t replyID, bool newRequest = false);
+    bool handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
+    bool handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
     void cancelPendingDequeueOperations();
 
     void extractCSD(const sp<AMessage> &format);
     status_t queueCSDInputBuffer(size_t bufferIndex);
 
-    status_t setNativeWindow(
-            const sp<Surface> &surface);
+    status_t handleSetSurface(const sp<Surface> &surface);
+    status_t connectToSurface(const sp<Surface> &surface);
+    status_t disconnectFromSurface();
 
     void postActivityNotificationIfPossible();
 
@@ -306,6 +382,9 @@
     void updateBatteryStat();
     bool isExecuting() const;
 
+    uint64_t getGraphicBufferSize();
+    void addResource(const String8 &type, const String8 &subtype, uint64_t value);
+
     /* called to get the last codec error when the sticky flag is set.
      * if no such codec error is found, returns UNKNOWN_ERROR.
      */
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index c2bbe4d..3aaa032 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -32,6 +32,8 @@
 
 namespace android {
 
+extern const char *kMaxEncoderInputBuffers;
+
 struct AMessage;
 
 struct MediaCodecList : public BnMediaCodecList {
@@ -48,9 +50,17 @@
         return mCodecInfos.itemAt(index);
     }
 
+    virtual const sp<AMessage> getGlobalSettings() const;
+
     // to be used by MediaPlayerService alone
     static sp<IMediaCodecList> getLocalInstance();
 
+    // only to be used by getLocalInstance
+    static void *profilerThreadWrapper(void * /*arg*/);
+
+    // only to be used by MediaPlayerService
+    void parseTopLevelXMLFile(const char *path, bool ignore_errors = false);
+
 private:
     class BinderDeathObserver : public IBinder::DeathRecipient {
         void binderDied(const wp<IBinder> &the_late_who __unused);
@@ -60,6 +70,7 @@
 
     enum Section {
         SECTION_TOPLEVEL,
+        SECTION_SETTINGS,
         SECTION_DECODERS,
         SECTION_DECODER,
         SECTION_DECODER_TYPE,
@@ -74,10 +85,14 @@
 
     status_t mInitCheck;
     Section mCurrentSection;
+    bool mUpdate;
     Vector<Section> mPastSections;
     int32_t mDepth;
     AString mHrefBase;
 
+    sp<AMessage> mGlobalSettings;
+    KeyedVector<AString, CodecSettings> mOverrides;
+
     Vector<sp<MediaCodecInfo> > mCodecInfos;
     sp<MediaCodecInfo> mCurrentInfo;
     sp<IOMX> mOMX;
@@ -87,7 +102,6 @@
 
     status_t initCheck() const;
     void parseXMLFile(const char *path);
-    void parseTopLevelXMLFile(const char *path);
 
     static void StartElementHandlerWrapper(
             void *me, const char *name, const char **attrs);
@@ -98,9 +112,12 @@
     void endElementHandler(const char *name);
 
     status_t includeXMLFile(const char **attrs);
+    status_t addSettingFromAttributes(const char **attrs);
     status_t addMediaCodecFromAttributes(bool encoder, const char **attrs);
     void addMediaCodec(bool encoder, const char *name, const char *type = NULL);
 
+    void setCurrentCodecInfo(bool encoder, const char *name, const char *type);
+
     status_t addQuirk(const char **attrs);
     status_t addTypeFromAttributes(const char **attrs);
     status_t addLimit(const char **attrs);
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index 0970b2b..71f58a9 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -23,9 +23,11 @@
 
 namespace android {
 
-class ALooper;
+struct ALooper;
 class AMessage;
+struct AReplyToken;
 class IGraphicBufferProducer;
+class IGraphicBufferConsumer;
 class MediaCodec;
 class MetaData;
 
@@ -40,6 +42,7 @@
             const sp<ALooper> &looper,
             const sp<AMessage> &format,
             const sp<MediaSource> &source,
+            const sp<IGraphicBufferConsumer> &consumer = NULL,
             uint32_t flags = 0);
 
     bool isVideo() const { return mIsVideo; }
@@ -78,6 +81,7 @@
             const sp<ALooper> &looper,
             const sp<AMessage> &outputFormat,
             const sp<MediaSource> &source,
+            const sp<IGraphicBufferConsumer> &consumer,
             uint32_t flags = 0);
 
     status_t onStart(MetaData *params);
@@ -99,13 +103,17 @@
     sp<Puller> mPuller;
     sp<MediaCodec> mEncoder;
     uint32_t mFlags;
-    List<uint32_t> mStopReplyIDQueue;
+    List<sp<AReplyToken>> mStopReplyIDQueue;
     bool mIsVideo;
     bool mStarted;
     bool mStopping;
     bool mDoMoreWorkPending;
+    bool mSetEncoderFormat;
+    int mEncoderFormat;
+    int mEncoderDataSpace;
     sp<AMessage> mEncoderActivityNotify;
     sp<IGraphicBufferProducer> mGraphicBufferProducer;
+    sp<IGraphicBufferConsumer> mGraphicBufferConsumer;
     List<MediaBuffer *> mInputBufferQueue;
     List<size_t> mAvailEncoderInputIndices;
     List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index a0036e0..21eb04a 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -64,6 +64,7 @@
 extern const char *MEDIA_MIMETYPE_TEXT_SUBRIP;
 extern const char *MEDIA_MIMETYPE_TEXT_VTT;
 extern const char *MEDIA_MIMETYPE_TEXT_CEA_608;
+extern const char *MEDIA_MIMETYPE_DATA_TIMED_ID3;
 
 }  // namespace android
 
diff --git a/include/media/stagefright/MediaFilter.h b/include/media/stagefright/MediaFilter.h
new file mode 100644
index 0000000..d0a572c
--- /dev/null
+++ b/include/media/stagefright/MediaFilter.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_FILTER_H_
+#define MEDIA_FILTER_H_
+
+#include <media/stagefright/CodecBase.h>
+
+namespace android {
+
+struct ABuffer;
+struct GraphicBufferListener;
+struct MemoryDealer;
+struct SimpleFilter;
+
+struct MediaFilter : public CodecBase {
+    MediaFilter();
+
+    virtual void setNotificationMessage(const sp<AMessage> &msg);
+
+    virtual void initiateAllocateComponent(const sp<AMessage> &msg);
+    virtual void initiateConfigureComponent(const sp<AMessage> &msg);
+    virtual void initiateCreateInputSurface();
+    virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
+
+    virtual void initiateStart();
+    virtual void initiateShutdown(bool keepComponentAllocated = false);
+
+    virtual void signalFlush();
+    virtual void signalResume();
+
+    virtual void signalRequestIDRFrame();
+    virtual void signalSetParameters(const sp<AMessage> &msg);
+    virtual void signalEndOfInputStream();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    struct PortDescription : public CodecBase::PortDescription {
+        virtual size_t countBuffers();
+        virtual IOMX::buffer_id bufferIDAt(size_t index) const;
+        virtual sp<ABuffer> bufferAt(size_t index) const;
+
+    protected:
+        PortDescription();
+
+    private:
+        friend struct MediaFilter;
+
+        Vector<IOMX::buffer_id> mBufferIDs;
+        Vector<sp<ABuffer> > mBuffers;
+
+        void addBuffer(IOMX::buffer_id id, const sp<ABuffer> &buffer);
+
+        DISALLOW_EVIL_CONSTRUCTORS(PortDescription);
+    };
+
+protected:
+    virtual ~MediaFilter();
+
+private:
+    struct BufferInfo {
+        enum Status {
+            OWNED_BY_US,
+            OWNED_BY_UPSTREAM,
+        };
+
+        IOMX::buffer_id mBufferID;
+        int32_t mGeneration;
+        int32_t mOutputFlags;
+        Status mStatus;
+
+        sp<ABuffer> mData;
+    };
+
+    enum State {
+      UNINITIALIZED,
+      INITIALIZED,
+      CONFIGURED,
+      STARTED,
+    };
+
+    enum {
+        kWhatInputBufferFilled       = 'inpF',
+        kWhatOutputBufferDrained     = 'outD',
+        kWhatShutdown                = 'shut',
+        kWhatFlush                   = 'flus',
+        kWhatResume                  = 'resm',
+        kWhatAllocateComponent       = 'allo',
+        kWhatConfigureComponent      = 'conf',
+        kWhatCreateInputSurface      = 'cisf',
+        kWhatSignalEndOfInputStream  = 'eois',
+        kWhatStart                   = 'star',
+        kWhatSetParameters           = 'setP',
+        kWhatProcessBuffers          = 'proc',
+    };
+
+    enum {
+        kPortIndexInput  = 0,
+        kPortIndexOutput = 1
+    };
+
+    // member variables
+    AString mComponentName;
+    State mState;
+    status_t mInputEOSResult;
+    int32_t mWidth, mHeight;
+    int32_t mStride, mSliceHeight;
+    int32_t mColorFormatIn, mColorFormatOut;
+    size_t mMaxInputSize, mMaxOutputSize;
+    int32_t mGeneration;
+    sp<AMessage> mNotify;
+    sp<AMessage> mInputFormat;
+    sp<AMessage> mOutputFormat;
+
+    sp<MemoryDealer> mDealer[2];
+    Vector<BufferInfo> mBuffers[2];
+    Vector<BufferInfo*> mAvailableInputBuffers;
+    Vector<BufferInfo*> mAvailableOutputBuffers;
+    bool mPortEOS[2];
+
+    sp<SimpleFilter> mFilter;
+    sp<GraphicBufferListener> mGraphicBufferListener;
+
+    // helper functions
+    void signalProcessBuffers();
+    void signalError(status_t error);
+
+    status_t allocateBuffersOnPort(OMX_U32 portIndex);
+    BufferInfo *findBufferByID(
+            uint32_t portIndex, IOMX::buffer_id bufferID,
+            ssize_t *index = NULL);
+    void postFillThisBuffer(BufferInfo *info);
+    void postDrainThisBuffer(BufferInfo *info);
+    void postEOS();
+    void sendFormatChange();
+    void requestFillEmptyInput();
+    void processBuffers();
+
+    void onAllocateComponent(const sp<AMessage> &msg);
+    void onConfigureComponent(const sp<AMessage> &msg);
+    void onStart();
+    void onInputBufferFilled(const sp<AMessage> &msg);
+    void onOutputBufferDrained(const sp<AMessage> &msg);
+    void onShutdown(const sp<AMessage> &msg);
+    void onFlush();
+    void onSetParameters(const sp<AMessage> &msg);
+    void onCreateInputSurface();
+    void onInputFrameAvailable();
+    void onSignalEndOfInputStream();
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaFilter);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_FILTER_H_
diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h
index 9da98d9..fa855a8 100644
--- a/include/media/stagefright/MediaMuxer.h
+++ b/include/media/stagefright/MediaMuxer.h
@@ -29,9 +29,9 @@
 struct ABuffer;
 struct AMessage;
 struct MediaAdapter;
-struct MediaBuffer;
+class MediaBuffer;
 struct MediaSource;
-struct MetaData;
+class MetaData;
 struct MediaWriter;
 
 // MediaMuxer is used to mux multiple tracks into a video. Currently, we only
@@ -50,9 +50,6 @@
         OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
     };
 
-    // Construct the muxer with the output file path.
-    MediaMuxer(const char *path, OutputFormat format);
-
     // Construct the muxer with the file descriptor. Note that the MediaMuxer
     // will close this file at stop().
     MediaMuxer(int fd, OutputFormat format);
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
new file mode 100644
index 0000000..ef8cb23
--- /dev/null
+++ b/include/media/stagefright/MediaSync.h
@@ -0,0 +1,291 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_SYNC_H
+#define MEDIA_SYNC_H
+
+#include <gui/IConsumerListener.h>
+#include <gui/IProducerListener.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AVSyncSettings.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+#include <utils/Condition.h>
+#include <utils/KeyedVector.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+class AudioTrack;
+class BufferItem;
+class Fence;
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+struct MediaClock;
+struct VideoFrameScheduler;
+
+// MediaSync manages media playback and its synchronization to a media clock
+// source. It can be also used for video-only playback.
+//
+// For video playback, it requires an output surface and provides an input
+// surface. It then controls the rendering of input buffers (buffer queued to
+// the input surface) on the output surface to happen at the appropriate time.
+//
+// For audio playback, it requires an audio track and takes updates of
+// information of rendered audio data to maintain media clock when audio track
+// serves as media clock source. (TODO: move audio rendering from JAVA to
+// native code).
+//
+// It can use the audio or video track as media clock source, as well as an
+// external clock. (TODO: actually support external clock as media clock
+// sources; use video track as media clock source for audio-and-video stream).
+//
+// In video-only mode, MediaSync will playback every video frame even though
+// a video frame arrives late based on its timestamp and last frame's.
+//
+// The client needs to configure surface (for output video rendering) and audio
+// track (for querying information of audio rendering) for MediaSync.
+//
+// Then the client needs to obtain a surface from MediaSync and render video
+// frames onto that surface. Internally, the MediaSync will receive those video
+// frames and render them onto the output surface at the appropriate time.
+//
+// The client needs to call updateQueuedAudioData() immediately after it writes
+// audio data to the audio track. Such information will be used to update media
+// clock.
+//
+class MediaSync : public AHandler {
+public:
+    // Create an instance of MediaSync.
+    static sp<MediaSync> create();
+
+    // Called when MediaSync is used to render video. It should be called
+    // before createInputSurface().
+    status_t setSurface(const sp<IGraphicBufferProducer> &output);
+
+    // Called when audio track is used as media clock source. It should be
+    // called before updateQueuedAudioData().
+    status_t setAudioTrack(const sp<AudioTrack> &audioTrack);
+
+    // Create a surface for client to render video frames. This is the surface
+    // on which the client should render video frames. Those video frames will
+    // be internally directed to output surface for rendering at appropriate
+    // time.
+    status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);
+
+    // Update just-rendered audio data size and the presentation timestamp of
+    // the first frame of that audio data. It should be called immediately
+    // after the client write audio data into AudioTrack.
+    // This function assumes continous audio stream.
+    // TODO: support gap or backwards updates.
+    status_t updateQueuedAudioData(
+            size_t sizeInBytes, int64_t presentationTimeUs);
+
+    // Set the consumer name of the input queue.
+    void setName(const AString &name);
+
+    // Get the media clock used by the MediaSync so that the client can obtain
+    // corresponding media time or real time via
+    // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
+    sp<const MediaClock> getMediaClock();
+
+    // Flush mediasync
+    void flush();
+
+    // Set the video frame rate hint - this is used by the video FrameScheduler
+    status_t setVideoFrameRateHint(float rate);
+
+    // Get the video frame rate measurement from the FrameScheduler
+    // returns -1 if there is no measurement
+    float getVideoFrameRate();
+
+    // Set the sync settings parameters.
+    status_t setSyncSettings(const AVSyncSettings &syncSettings);
+
+    // Gets the sync settings parameters.
+    void getSyncSettings(AVSyncSettings *syncSettings /* nonnull */);
+
+    // Sets the playback rate using playback settings.
+    // This method can be called any time.
+    status_t setPlaybackSettings(const AudioPlaybackRate &rate);
+
+    // Gets the playback rate (playback settings parameters).
+    void getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+
+    // Get the play time for pending audio frames in audio sink.
+    status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDrainVideo = 'dVid',
+    };
+
+    // This is a thin wrapper class that lets us listen to
+    // IConsumerListener::onFrameAvailable from mInput.
+    class InputListener : public BnConsumerListener,
+                          public IBinder::DeathRecipient {
+    public:
+        InputListener(const sp<MediaSync> &sync);
+        virtual ~InputListener();
+
+        // From IConsumerListener
+        virtual void onFrameAvailable(const BufferItem &item);
+
+        // From IConsumerListener
+        // We don't care about released buffers because we detach each buffer as
+        // soon as we acquire it. See the comment for onBufferReleased below for
+        // some clarifying notes about the name.
+        virtual void onBuffersReleased() {}
+
+        // From IConsumerListener
+        // We don't care about sideband streams, since we won't relay them.
+        virtual void onSidebandStreamChanged();
+
+        // From IBinder::DeathRecipient
+        virtual void binderDied(const wp<IBinder> &who);
+
+    private:
+        sp<MediaSync> mSync;
+    };
+
+    // This is a thin wrapper class that lets us listen to
+    // IProducerListener::onBufferReleased from mOutput.
+    class OutputListener : public BnProducerListener,
+                           public IBinder::DeathRecipient {
+    public:
+        OutputListener(const sp<MediaSync> &sync, const sp<IGraphicBufferProducer> &output);
+        virtual ~OutputListener();
+
+        // From IProducerListener
+        virtual void onBufferReleased();
+
+        // From IBinder::DeathRecipient
+        virtual void binderDied(const wp<IBinder> &who);
+
+    private:
+        sp<MediaSync> mSync;
+        sp<IGraphicBufferProducer> mOutput;
+    };
+
+    // mIsAbandoned is set to true when the input or output dies.
+    // Once the MediaSync has been abandoned by one side, it will disconnect
+    // from the other side and not attempt to communicate with it further.
+    bool mIsAbandoned;
+
+    mutable Mutex mMutex;
+    Condition mReleaseCondition;
+    size_t mNumOutstandingBuffers;
+    sp<IGraphicBufferConsumer> mInput;
+    sp<IGraphicBufferProducer> mOutput;
+    int mUsageFlagsFromOutput;
+    uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
+    bool mReturnPendingInputFrame;    // set while we are pending before acquiring an input frame
+
+    sp<AudioTrack> mAudioTrack;
+    uint32_t mNativeSampleRateInHz;
+    int64_t mNumFramesWritten;
+    bool mHasAudio;
+
+    int64_t mNextBufferItemMediaUs;
+    List<BufferItem> mBufferItems;
+    sp<VideoFrameScheduler> mFrameScheduler;
+
+    // Keep track of buffers received from |mInput|. This is needed because
+    // it's possible the consumer of |mOutput| could return a different
+    // GraphicBuffer::handle (e.g., due to passing buffers through IPC),
+    // and that could cause problem if the producer of |mInput| only
+    // supports pre-registered buffers.
+    KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersFromInput;
+
+    // Keep track of buffers sent to |mOutput|. When a new output surface comes
+    // in, those buffers will be returned to input and old output surface will
+    // be disconnected immediately.
+    KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersSentToOutput;
+
+    sp<ALooper> mLooper;
+    float mPlaybackRate;
+
+    AudioPlaybackRate mPlaybackSettings;
+    AVSyncSettings mSyncSettings;
+
+    sp<MediaClock> mMediaClock;
+
+    MediaSync();
+
+    // Must be accessed through RefBase
+    virtual ~MediaSync();
+
+    int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
+    int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
+    int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);
+
+    void onDrainVideo_l();
+
+    // This implements the onFrameAvailable callback from IConsumerListener.
+    // It gets called from an InputListener.
+    // During this callback, we detach the buffer from the input, and queue
+    // it for rendering on the output. This call can block if there are too
+    // many outstanding buffers. If it blocks, it will resume when
+    // onBufferReleasedByOutput releases a buffer back to the input.
+    void onFrameAvailableFromInput();
+
+    // Send |bufferItem| to the output for rendering.
+    void renderOneBufferItem_l(const BufferItem &bufferItem);
+
+    // This implements the onBufferReleased callback from IProducerListener.
+    // It gets called from an OutputListener.
+    // During this callback, we detach the buffer from the output, and release
+    // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
+    void onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output);
+
+    // Return |buffer| back to the input.
+    void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
+
+    // When this is called, the MediaSync disconnects from (i.e., abandons) its
+    // input or output, and signals any waiting onFrameAvailable calls to wake
+    // up. This must be called with mMutex locked.
+    void onAbandoned_l(bool isInput);
+
+    // Set the playback in a desired speed.
+    // This method can be called any time.
+    // |rate| is the ratio between desired speed and the normal one, and should
+    // be non-negative. The meaning of rate values:
+    // 1.0 -- normal playback
+    // 0.0 -- stop or pause
+    // larger than 1.0 -- faster than normal speed
+    // between 0.0 and 1.0 -- slower than normal speed
+    void updatePlaybackRate_l(float rate);
+
+    // apply new sync settings
+    void resync_l();
+
+    // apply playback settings only - without resyncing or updating playback rate
+    status_t setPlaybackSettings_l(const AudioPlaybackRate &rate);
+
+    // helper.
+    bool isPlaying() { return mPlaybackRate != 0.0; }
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
+};
+
+} // namespace android
+
+#endif
diff --git a/include/media/stagefright/MediaWriter.h b/include/media/stagefright/MediaWriter.h
index e27ea1d..8e02506 100644
--- a/include/media/stagefright/MediaWriter.h
+++ b/include/media/stagefright/MediaWriter.h
@@ -24,7 +24,7 @@
 namespace android {
 
 struct MediaSource;
-struct MetaData;
+class MetaData;
 
 struct MediaWriter : public RefBase {
     MediaWriter()
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index 3f42790..8d4e15a 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -70,11 +70,15 @@
     kKeyDriftTime         = 'dftT',  // int64_t (usecs)
     kKeyAnchorTime        = 'ancT',  // int64_t (usecs)
     kKeyDuration          = 'dura',  // int64_t (usecs)
-    kKeyColorFormat       = 'colf',
+    kKeyPixelFormat       = 'pixf',  // int32_t
+    kKeyColorFormat       = 'colf',  // int32_t
+    kKeyColorSpace        = 'cols',  // int32_t
     kKeyPlatformPrivate   = 'priv',  // pointer
     kKeyDecoderComponent  = 'decC',  // cstring
     kKeyBufferID          = 'bfID',
     kKeyMaxInputSize      = 'inpS',
+    kKeyMaxWidth          = 'maxW',
+    kKeyMaxHeight         = 'maxH',
     kKeyThumbnailTime     = 'thbT',  // int64_t (usecs)
     kKeyTrackID           = 'trID',
     kKeyIsDRM             = 'idrm',  // int32_t (bool)
@@ -98,6 +102,7 @@
     kKeyCompilation       = 'cpil',  // cstring
     kKeyLocation          = 'loc ',  // cstring
     kKeyTimeScale         = 'tmsl',  // int32_t
+    kKeyCaptureFramerate  = 'capF',  // float (capture fps)
 
     // video profile and level
     kKeyVideoProfile      = 'vprf',  // int32_t
@@ -173,6 +178,9 @@
     kKeyTrackIsDefault    = 'dflt', // bool (int32_t)
     // Similar to MediaFormat.KEY_IS_FORCED_SUBTITLE but pertains to av tracks as well.
     kKeyTrackIsForced     = 'frcd', // bool (int32_t)
+
+    // H264 supplemental enhancement information offsets/sizes
+    kKeySEI               = 'sei ', // raw data
 };
 
 enum {
diff --git a/include/media/stagefright/NativeWindowWrapper.h b/include/media/stagefright/NativeWindowWrapper.h
deleted file mode 100644
index cfeec22..0000000
--- a/include/media/stagefright/NativeWindowWrapper.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef NATIVE_WINDOW_WRAPPER_H_
-
-#define NATIVE_WINDOW_WRAPPER_H_
-
-#include <gui/Surface.h>
-
-namespace android {
-
-// Surface derives from ANativeWindow which derives from multiple
-// base classes, in order to carry it in AMessages, we'll temporarily wrap it
-// into a NativeWindowWrapper.
-
-struct NativeWindowWrapper : RefBase {
-    NativeWindowWrapper(
-            const sp<Surface> &surfaceTextureClient) :
-        mSurfaceTextureClient(surfaceTextureClient) { }
-
-    sp<ANativeWindow> getNativeWindow() const {
-        return mSurfaceTextureClient;
-    }
-
-    sp<Surface> getSurfaceTextureClient() const {
-        return mSurfaceTextureClient;
-    }
-
-private:
-    const sp<Surface> mSurfaceTextureClient;
-
-    DISALLOW_EVIL_CONSTRUCTORS(NativeWindowWrapper);
-};
-
-}  // namespace android
-
-#endif  // NATIVE_WINDOW_WRAPPER_H_
diff --git a/include/media/stagefright/NuMediaExtractor.h b/include/media/stagefright/NuMediaExtractor.h
index 402e7f8..fd74452 100644
--- a/include/media/stagefright/NuMediaExtractor.h
+++ b/include/media/stagefright/NuMediaExtractor.h
@@ -30,12 +30,12 @@
 
 struct ABuffer;
 struct AMessage;
-struct DataSource;
+class DataSource;
 struct IMediaHTTPService;
-struct MediaBuffer;
+class MediaBuffer;
 struct MediaExtractor;
 struct MediaSource;
-struct MetaData;
+class MetaData;
 
 struct NuMediaExtractor : public RefBase {
     enum SampleFlags {
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 84b1b1a..7fabcb3 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -298,7 +298,6 @@
     status_t queueBufferToNativeWindow(BufferInfo *info);
     status_t cancelBufferToNativeWindow(BufferInfo *info);
     BufferInfo* dequeueBufferFromNativeWindow();
-    status_t pushBlankBuffersToNativeWindow();
 
     status_t freeBuffersOnPort(
             OMX_U32 portIndex, bool onlyThoseWeOwn = false);
@@ -347,7 +346,6 @@
 
     status_t configureCodec(const sp<MetaData> &meta);
 
-    status_t applyRotation();
     status_t waitForBufferFilled_l();
 
     int64_t getDecodingTimeUs();
diff --git a/include/media/stagefright/PersistentSurface.h b/include/media/stagefright/PersistentSurface.h
new file mode 100644
index 0000000..a35b9f1
--- /dev/null
+++ b/include/media/stagefright/PersistentSurface.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PERSISTENT_SURFACE_H_
+
+#define PERSISTENT_SURFACE_H_
+
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct PersistentSurface : public RefBase {
+    PersistentSurface(
+            const sp<IGraphicBufferProducer>& bufferProducer,
+            const sp<IGraphicBufferConsumer>& bufferConsumer) :
+        mBufferProducer(bufferProducer),
+        mBufferConsumer(bufferConsumer) { }
+
+    sp<IGraphicBufferProducer> getBufferProducer() const {
+        return mBufferProducer;
+    }
+
+    sp<IGraphicBufferConsumer> getBufferConsumer() const {
+        return mBufferConsumer;
+    }
+
+private:
+    const sp<IGraphicBufferProducer> mBufferProducer;
+    const sp<IGraphicBufferConsumer> mBufferConsumer;
+
+    DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
+};
+
+}  // namespace android
+
+#endif  // PERSISTENT_SURFACE_H_
diff --git a/include/media/stagefright/ProcessInfo.h b/include/media/stagefright/ProcessInfo.h
new file mode 100644
index 0000000..ec0cdff
--- /dev/null
+++ b/include/media/stagefright/ProcessInfo.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PROCESS_INFO_H_
+
+#define PROCESS_INFO_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+
+namespace android {
+
+struct ProcessInfo : public ProcessInfoInterface {
+    ProcessInfo();
+
+    virtual bool getPriority(int pid, int* priority);
+
+protected:
+    virtual ~ProcessInfo();
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
+};
+
+}  // namespace android
+
+#endif  // PROCESS_INFO_H_
diff --git a/services/audiopolicy/AudioPolicyFactory.cpp b/include/media/stagefright/ProcessInfoInterface.h
similarity index 60%
copy from services/audiopolicy/AudioPolicyFactory.cpp
copy to include/media/stagefright/ProcessInfoInterface.h
index 2ae7bc1..222f92d 100644
--- a/services/audiopolicy/AudioPolicyFactory.cpp
+++ b/include/media/stagefright/ProcessInfoInterface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014 The Android Open Source Project
+ * Copyright (C) 2015 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,19 +14,20 @@
  * limitations under the License.
  */
 
-#include "AudioPolicyManager.h"
+#ifndef PROCESS_INFO_INTERFACE_H_
+#define PROCESS_INFO_INTERFACE_H_
+
+#include <utils/RefBase.h>
 
 namespace android {
 
-extern "C" AudioPolicyInterface* createAudioPolicyManager(
-        AudioPolicyClientInterface *clientInterface)
-{
-    return new AudioPolicyManager(clientInterface);
-}
+struct ProcessInfoInterface : public RefBase {
+    virtual bool getPriority(int pid, int* priority) = 0;
 
-extern "C" void destroyAudioPolicyManager(AudioPolicyInterface *interface)
-{
-    delete interface;
-}
+protected:
+    virtual ~ProcessInfoInterface() {}
+};
 
-}; // namespace android
+}  // namespace android
+
+#endif  // PROCESS_INFO_INTERFACE_H_
diff --git a/include/media/stagefright/RenderScriptWrapper.h b/include/media/stagefright/RenderScriptWrapper.h
new file mode 100644
index 0000000..b42649e
--- /dev/null
+++ b/include/media/stagefright/RenderScriptWrapper.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RENDERSCRIPT_WRAPPER_H_
+#define RENDERSCRIPT_WRAPPER_H_
+
+#include <RenderScript.h>
+
+namespace android {
+
+struct RenderScriptWrapper : public RefBase {
+public:
+    struct RSFilterCallback : public RefBase {
+    public:
+        // called by RSFilter to process each input buffer
+        virtual status_t processBuffers(
+                RSC::Allocation* inBuffer,
+                RSC::Allocation* outBuffer) = 0;
+
+        virtual status_t handleSetParameters(const sp<AMessage> &msg) = 0;
+    };
+
+    sp<RSFilterCallback> mCallback;
+    RSC::sp<RSC::RS> mContext;
+};
+
+}   // namespace android
+
+#endif  // RENDERSCRIPT_WRAPPER_H_
diff --git a/include/media/stagefright/SurfaceUtils.h b/include/media/stagefright/SurfaceUtils.h
new file mode 100644
index 0000000..c1a9c0a
--- /dev/null
+++ b/include/media/stagefright/SurfaceUtils.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SURFACE_UTILS_H_
+
+#define SURFACE_UTILS_H_
+
+#include <utils/Errors.h>
+
+struct ANativeWindow;
+
+namespace android {
+
+status_t setNativeWindowSizeFormatAndUsage(
+        ANativeWindow *nativeWindow /* nonnull */,
+        int width, int height, int format, int rotation, int usage);
+status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
+
+} // namespace android
+
+#endif  // SURFACE_UTILS_H_
diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h
index a795c80..5e9d7d4 100644
--- a/include/media/stagefright/Utils.h
+++ b/include/media/stagefright/Utils.h
@@ -41,7 +41,7 @@
 uint64_t ntoh64(uint64_t x);
 uint64_t hton64(uint64_t x);
 
-struct MetaData;
+class MetaData;
 struct AMessage;
 status_t convertMetaDataToMessage(
         const sp<MetaData> &meta, sp<AMessage> *format);
@@ -65,6 +65,26 @@
 
 AString uriDebugString(const AString &uri, bool incognito = false);
 
+struct HLSTime {
+    int32_t mSeq;
+    int64_t mTimeUs;
+    sp<AMessage> mMeta;
+
+    HLSTime(const sp<AMessage> &meta = NULL);
+    int64_t getSegmentTimeUs() const;
+};
+
+bool operator <(const HLSTime &t0, const HLSTime &t1);
+
+// read and write various object to/from AMessage
+
+void writeToAMessage(sp<AMessage> msg, const AudioPlaybackRate &rate);
+void readFromAMessage(const sp<AMessage> &msg, AudioPlaybackRate *rate /* nonnull */);
+
+void writeToAMessage(sp<AMessage> msg, const AVSyncSettings &sync, float videoFpsHint);
+void readFromAMessage(
+        const sp<AMessage> &msg, AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
 }  // namespace android
 
 #endif  // UTILS_H_
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/include/media/stagefright/VideoFrameScheduler.h
similarity index 92%
rename from media/libmediaplayerservice/VideoFrameScheduler.h
rename to include/media/stagefright/VideoFrameScheduler.h
index 84b27b4..9d97dfd 100644
--- a/media/libmediaplayerservice/VideoFrameScheduler.h
+++ b/include/media/stagefright/VideoFrameScheduler.h
@@ -24,7 +24,7 @@
 
 namespace android {
 
-struct ISurfaceComposer;
+class ISurfaceComposer;
 
 struct VideoFrameScheduler : public RefBase {
     VideoFrameScheduler();
@@ -39,6 +39,9 @@
     // returns the vsync period for the main display
     nsecs_t getVsyncPeriod();
 
+    // returns the current frames-per-second, or 0.f if not primed
+    float getFrameRate();
+
     void release();
 
     static const size_t kHistorySize = 8;
@@ -54,8 +57,9 @@
         void reset(float fps = -1);
         // keep current estimate, but restart phase
         void restart();
-        // returns period
+        // returns period or 0 if not yet primed
         nsecs_t addSample(nsecs_t time);
+        nsecs_t getPeriod() const;
 
     private:
         nsecs_t mPeriod;
diff --git a/include/media/stagefright/foundation/ABase.h b/include/media/stagefright/foundation/ABase.h
index 72e3d87..ef1e010 100644
--- a/include/media/stagefright/foundation/ABase.h
+++ b/include/media/stagefright/foundation/ABase.h
@@ -18,7 +18,9 @@
 
 #define A_BASE_H_
 
+#ifndef ARRAY_SIZE
 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a)))
+#endif
 
 #define DISALLOW_EVIL_CONSTRUCTORS(name) \
     name(const name &); \
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 1d0e2cb..65f415a 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -24,6 +24,31 @@
 #include <media/stagefright/foundation/AString.h>
 #include <utils/Log.h>
 
+inline static const char *asString(android::status_t i, const char *def = "??") {
+    using namespace android;
+    switch (i) {
+        case NO_ERROR:              return "NO_ERROR";
+        case UNKNOWN_ERROR:         return "UNKNOWN_ERROR";
+        case NO_MEMORY:             return "NO_MEMORY";
+        case INVALID_OPERATION:     return "INVALID_OPERATION";
+        case BAD_VALUE:             return "BAD_VALUE";
+        case BAD_TYPE:              return "BAD_TYPE";
+        case NAME_NOT_FOUND:        return "NAME_NOT_FOUND";
+        case PERMISSION_DENIED:     return "PERMISSION_DENIED";
+        case NO_INIT:               return "NO_INIT";
+        case ALREADY_EXISTS:        return "ALREADY_EXISTS";
+        case DEAD_OBJECT:           return "DEAD_OBJECT";
+        case FAILED_TRANSACTION:    return "FAILED_TRANSACTION";
+        case BAD_INDEX:             return "BAD_INDEX";
+        case NOT_ENOUGH_DATA:       return "NOT_ENOUGH_DATA";
+        case WOULD_BLOCK:           return "WOULD_BLOCK";
+        case TIMED_OUT:             return "TIMED_OUT";
+        case UNKNOWN_TRANSACTION:   return "UNKNOWN_TRANSACTION";
+        case FDS_NOT_ALLOWED:       return "FDS_NOT_ALLOWED";
+        default:                    return def;
+    }
+}
+
 namespace android {
 
 #define LITERAL_TO_STRING_INTERNAL(x)    #x
@@ -92,7 +117,7 @@
 
     };
 
-    // parse the property or string to get the debug level for a component name
+    // parse the property or string to get a long-type level for a component name
     // string format is:
     // <level>[:<glob>][,<level>[:<glob>]...]
     // - <level> is 0-5 corresponding to ADebug::Level
@@ -100,14 +125,38 @@
     //   matches all components
     // - string is read left-to-right, and the last matching level is returned, or
     //   the def if no terms matched
+    static long GetLevelFromSettingsString(
+            const char *name, const char *value, long def);
+    static long GetLevelFromProperty(
+            const char *name, const char *value, long def);
+
+    // same for ADebug::Level - performs clamping to valid debug ranges
     static Level GetDebugLevelFromProperty(
             const char *name, const char *propertyName, Level def = kDebugNone);
-    static Level GetDebugLevelFromString(
-            const char *name, const char *value, Level def = kDebugNone);
 
     // remove redundant segments of a codec name, and return a newly allocated
     // string suitable for debugging
     static char *GetDebugName(const char *name);
+
+    inline static bool isExperimentEnabled(
+            const char *name __unused /* nonnull */, bool allow __unused = true) {
+#ifdef ENABLE_STAGEFRIGHT_EXPERIMENTS
+        if (!strcmp(name, "legacy-adaptive")) {
+            return getExperimentFlag(allow, name, 2, 1); // every other day
+        } else if (!strcmp(name, "legacy-setsurface")) {
+            return getExperimentFlag(allow, name, 3, 1); // every third day
+        } else {
+            ALOGE("unknown experiment '%s' (disabled)", name);
+        }
+#endif
+        return false;
+    }
+
+private:
+    // pass in allow, so we can print in the log if the experiment is disabled
+    static bool getExperimentFlag(
+            bool allow, const char *name, uint64_t modulo, uint64_t limit,
+            uint64_t plus = 0, uint64_t timeDivisor = 24 * 60 * 60 /* 1 day */);
 };
 
 }  // namespace android
diff --git a/include/media/stagefright/foundation/AHandler.h b/include/media/stagefright/foundation/AHandler.h
index 41ade77..fe02a86 100644
--- a/include/media/stagefright/foundation/AHandler.h
+++ b/include/media/stagefright/foundation/AHandler.h
@@ -29,6 +29,7 @@
 struct AHandler : public RefBase {
     AHandler()
         : mID(0),
+          mVerboseStats(false),
           mMessageCounter(0) {
     }
 
@@ -36,23 +37,40 @@
         return mID;
     }
 
-    sp<ALooper> looper();
+    sp<ALooper> looper() const {
+        return mLooper.promote();
+    }
+
+    wp<ALooper> getLooper() const {
+        return mLooper;
+    }
+
+    wp<AHandler> getHandler() const {
+        // allow getting a weak reference to a const handler
+        return const_cast<AHandler *>(this);
+    }
 
 protected:
     virtual void onMessageReceived(const sp<AMessage> &msg) = 0;
 
 private:
-    friend struct ALooperRoster;
+    friend struct AMessage;      // deliverMessage()
+    friend struct ALooperRoster; // setID()
 
     ALooper::handler_id mID;
+    wp<ALooper> mLooper;
 
-    void setID(ALooper::handler_id id) {
+    inline void setID(ALooper::handler_id id, wp<ALooper> looper) {
         mID = id;
+        mLooper = looper;
     }
 
+    bool mVerboseStats;
     uint32_t mMessageCounter;
     KeyedVector<uint32_t, uint32_t> mMessages;
 
+    void deliverMessage(const sp<AMessage> &msg);
+
     DISALLOW_EVIL_CONSTRUCTORS(AHandler);
 };
 
diff --git a/include/media/stagefright/foundation/ALooper.h b/include/media/stagefright/foundation/ALooper.h
index 70e0c5e..09c469b 100644
--- a/include/media/stagefright/foundation/ALooper.h
+++ b/include/media/stagefright/foundation/ALooper.h
@@ -30,6 +30,7 @@
 
 struct AHandler;
 struct AMessage;
+struct AReplyToken;
 
 struct ALooper : public RefBase {
     typedef int32_t event_id;
@@ -53,11 +54,15 @@
 
     static int64_t GetNowUs();
 
+    const char *getName() const {
+        return mName.c_str();
+    }
+
 protected:
     virtual ~ALooper();
 
 private:
-    friend struct ALooperRoster;
+    friend struct AMessage;       // post()
 
     struct Event {
         int64_t mWhenUs;
@@ -75,12 +80,32 @@
     sp<LooperThread> mThread;
     bool mRunningLocally;
 
+    // use a separate lock for reply handling, as it is always on another thread
+    // use a central lock, however, to avoid creating a mutex for each reply
+    Mutex mRepliesLock;
+    Condition mRepliesCondition;
+
+    // START --- methods used only by AMessage
+
+    // posts a message on this looper with the given timeout
     void post(const sp<AMessage> &msg, int64_t delayUs);
+
+    // creates a reply token to be used with this looper
+    sp<AReplyToken> createReplyToken();
+    // waits for a response for the reply token.  If status is OK, the response
+    // is stored into the supplied variable.  Otherwise, it is unchanged.
+    status_t awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage> *response);
+    // posts a reply for a reply token.  If the reply could be successfully posted,
+    // it returns OK. Otherwise, it returns an error value.
+    status_t postReply(const sp<AReplyToken> &replyToken, const sp<AMessage> &msg);
+
+    // END --- methods used only by AMessage
+
     bool loop();
 
     DISALLOW_EVIL_CONSTRUCTORS(ALooper);
 };
 
-}  // namespace android
+} // namespace android
 
 #endif  // A_LOOPER_H_
diff --git a/include/media/stagefright/foundation/ALooperRoster.h b/include/media/stagefright/foundation/ALooperRoster.h
index a0be8eb..9912455 100644
--- a/include/media/stagefright/foundation/ALooperRoster.h
+++ b/include/media/stagefright/foundation/ALooperRoster.h
@@ -33,16 +33,6 @@
     void unregisterHandler(ALooper::handler_id handlerID);
     void unregisterStaleHandlers();
 
-    status_t postMessage(const sp<AMessage> &msg, int64_t delayUs = 0);
-    void deliverMessage(const sp<AMessage> &msg);
-
-    status_t postAndAwaitResponse(
-            const sp<AMessage> &msg, sp<AMessage> *response);
-
-    void postReply(uint32_t replyID, const sp<AMessage> &reply);
-
-    sp<ALooper> findLooper(ALooper::handler_id handlerID);
-
     void dump(int fd, const Vector<String16>& args);
 
 private:
@@ -54,10 +44,6 @@
     Mutex mLock;
     KeyedVector<ALooper::handler_id, HandlerInfo> mHandlers;
     ALooper::handler_id mNextHandlerID;
-    uint32_t mNextReplyID;
-    Condition mRepliesCondition;
-
-    KeyedVector<uint32_t, sp<AMessage> > mReplies;
 
     DISALLOW_EVIL_CONSTRUCTORS(ALooperRoster);
 };
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index a9e235b..83b9444 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -26,11 +26,41 @@
 namespace android {
 
 struct ABuffer;
+struct AHandler;
 struct AString;
-struct Parcel;
+class Parcel;
+
+struct AReplyToken : public RefBase {
+    AReplyToken(const sp<ALooper> &looper)
+        : mLooper(looper),
+          mReplied(false) {
+    }
+
+private:
+    friend struct AMessage;
+    friend struct ALooper;
+    wp<ALooper> mLooper;
+    sp<AMessage> mReply;
+    bool mReplied;
+
+    sp<ALooper> getLooper() const {
+        return mLooper.promote();
+    }
+    // if reply is not set, returns false; otherwise, it retrieves the reply and returns true
+    bool retrieveReply(sp<AMessage> *reply) {
+        if (mReplied) {
+            *reply = mReply;
+            mReply.clear();
+        }
+        return mReplied;
+    }
+    // sets the reply for this token. returns OK or error
+    status_t setReply(const sp<AMessage> &reply);
+};
 
 struct AMessage : public RefBase {
-    AMessage(uint32_t what = 0, ALooper::handler_id target = 0);
+    AMessage();
+    AMessage(uint32_t what, const sp<const AHandler> &handler);
 
     static sp<AMessage> FromParcel(const Parcel &parcel);
     void writeToParcel(Parcel *parcel) const;
@@ -38,8 +68,7 @@
     void setWhat(uint32_t what);
     uint32_t what() const;
 
-    void setTarget(ALooper::handler_id target);
-    ALooper::handler_id target() const;
+    void setTarget(const sp<const AHandler> &handler);
 
     void clear();
 
@@ -76,18 +105,22 @@
             const char *name,
             int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const;
 
-    void post(int64_t delayUs = 0);
+    status_t post(int64_t delayUs = 0);
 
     // Posts the message to its target and waits for a response (or error)
     // before returning.
     status_t postAndAwaitResponse(sp<AMessage> *response);
 
     // If this returns true, the sender of this message is synchronously
-    // awaiting a response, the "replyID" can be used to send the response
-    // via "postReply" below.
-    bool senderAwaitsResponse(uint32_t *replyID) const;
+    // awaiting a response and the reply token is consumed from the message
+    // and stored into replyID. The reply token must be used to send the response
+    // using "postReply" below.
+    bool senderAwaitsResponse(sp<AReplyToken> *replyID);
 
-    void postReply(uint32_t replyID);
+    // Posts the message as a response to a reply token.  A reply token can
+    // only be used once. Returns OK if the response could be posted; otherwise,
+    // an error.
+    status_t postReply(const sp<AReplyToken> &replyID);
 
     // Performs a deep-copy of "this", contained messages are in turn "dup'ed".
     // Warning: RefBase items, i.e. "objects" are _not_ copied but only have
@@ -117,9 +150,16 @@
     virtual ~AMessage();
 
 private:
+    friend struct ALooper; // deliver()
+
     uint32_t mWhat;
+
+    // used only for debugging
     ALooper::handler_id mTarget;
 
+    wp<AHandler> mHandler;
+    wp<ALooper> mLooper;
+
     struct Rect {
         int32_t mLeft, mTop, mRight, mBottom;
     };
@@ -157,6 +197,8 @@
 
     size_t findItemIndex(const char *name, size_t len) const;
 
+    void deliver();
+
     DISALLOW_EVIL_CONSTRUCTORS(AMessage);
 };
 
diff --git a/include/media/stagefright/foundation/AString.h b/include/media/stagefright/foundation/AString.h
index 822dbb3..2f6d532 100644
--- a/include/media/stagefright/foundation/AString.h
+++ b/include/media/stagefright/foundation/AString.h
@@ -24,7 +24,7 @@
 namespace android {
 
 class String8;
-struct Parcel;
+class Parcel;
 
 struct AString {
     AString();
diff --git a/include/media/stagefright/timedtext/TimedTextDriver.h b/include/media/stagefright/timedtext/TimedTextDriver.h
index 37ef674..6f7c693 100644
--- a/include/media/stagefright/timedtext/TimedTextDriver.h
+++ b/include/media/stagefright/timedtext/TimedTextDriver.h
@@ -24,7 +24,7 @@
 
 namespace android {
 
-class ALooper;
+struct ALooper;
 struct IMediaHTTPService;
 class MediaPlayerBase;
 class MediaSource;
diff --git a/include/ndk/NdkMediaCodec.h b/include/ndk/NdkMediaCodec.h
index c07f4c9..4f6a1ef 100644
--- a/include/ndk/NdkMediaCodec.h
+++ b/include/ndk/NdkMediaCodec.h
@@ -142,7 +142,8 @@
 /**
  * Get the index of the next available buffer of processed data.
  */
-ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info, int64_t timeoutUs);
+ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info,
+        int64_t timeoutUs);
 AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec*);
 
 /**
diff --git a/include/ndk/NdkMediaExtractor.h b/include/ndk/NdkMediaExtractor.h
index 7a4e702..7324d31 100644
--- a/include/ndk/NdkMediaExtractor.h
+++ b/include/ndk/NdkMediaExtractor.h
@@ -55,12 +55,14 @@
 /**
  *  Set the file descriptor from which the extractor will read.
  */
-media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor*, int fd, off64_t offset, off64_t length);
+media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor*, int fd, off64_t offset,
+        off64_t length);
 
 /**
  * Set the URI from which the extractor will read.
  */
-media_status_t AMediaExtractor_setDataSource(AMediaExtractor*, const char *location); // TODO support headers
+media_status_t AMediaExtractor_setDataSource(AMediaExtractor*, const char *location);
+        // TODO support headers
 
 /**
  * Return the number of tracks in the previously specified media file
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 31dff36..1e5064f 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -24,9 +24,9 @@
 #include <utils/threads.h>
 #include <utils/Log.h>
 #include <utils/RefBase.h>
-#include <media/nbaio/roundup.h>
+#include <audio_utils/roundup.h>
+#include <media/AudioResamplerPublic.h>
 #include <media/SingleStateQueue.h>
-#include <private/media/StaticAudioTrackState.h>
 
 namespace android {
 
@@ -54,24 +54,68 @@
 struct AudioTrackSharedStreaming {
     // similar to NBAIO MonoPipe
     // in continuously incrementing frame units, take modulo buffer size, which must be a power of 2
-    volatile int32_t mFront;    // read by server
-    volatile int32_t mRear;     // write by client
+    volatile int32_t mFront;    // read by consumer (output: server, input: client)
+    volatile int32_t mRear;     // written by producer (output: client, input: server)
     volatile int32_t mFlush;    // incremented by client to indicate a request to flush;
                                 // server notices and discards all data between mFront and mRear
     volatile uint32_t mUnderrunFrames;  // server increments for each unavailable but desired frame
 };
 
+// Represents a single state of an AudioTrack that was created in static mode (shared memory buffer
+// supplied by the client).  This state needs to be communicated from the client to server.  As this
+// state is too large to be updated atomically without a mutex, and mutexes aren't allowed here, the
+// state is wrapped by a SingleStateQueue.
+struct StaticAudioTrackState {
+    // Do not define constructors, destructors, or virtual methods as this is part of a
+    // union in shared memory and they will not get called properly.
+
+    // These fields should both be size_t, but since they are located in shared memory we
+    // force to 32-bit.  The client and server may have different typedefs for size_t.
+
+    // The state has a sequence counter to indicate whether changes are made to loop or position.
+    // The sequence counter also currently indicates whether loop or position is first depending
+    // on which is greater; it jumps by max(mLoopSequence, mPositionSequence) + 1.
+
+    uint32_t    mLoopStart;
+    uint32_t    mLoopEnd;
+    int32_t     mLoopCount;
+    uint32_t    mLoopSequence; // a sequence counter to indicate changes to loop
+    uint32_t    mPosition;
+    uint32_t    mPositionSequence; // a sequence counter to indicate changes to position
+};
+
 typedef SingleStateQueue<StaticAudioTrackState> StaticAudioTrackSingleStateQueue;
 
+struct StaticAudioTrackPosLoop {
+    // Do not define constructors, destructors, or virtual methods as this is part of a
+    // union in shared memory and will not get called properly.
+
+    // These fields should both be size_t, but since they are located in shared memory we
+    // force to 32-bit.  The client and server may have different typedefs for size_t.
+
+    // This struct information is stored in a single state queue to communicate the
+    // static AudioTrack server state to the client while data is consumed.
+    // It is smaller than StaticAudioTrackState to prevent unnecessary information from
+    // being sent.
+
+    uint32_t mBufferPosition;
+    int32_t  mLoopCount;
+};
+
+typedef SingleStateQueue<StaticAudioTrackPosLoop> StaticAudioTrackPosLoopQueue;
+
 struct AudioTrackSharedStatic {
+    // client requests to the server for loop or position changes.
     StaticAudioTrackSingleStateQueue::Shared
                     mSingleStateQueue;
-    // This field should be a size_t, but since it is located in shared memory we
-    // force to 32-bit.  The client and server may have different typedefs for size_t.
-    uint32_t        mBufferPosition;    // updated asynchronously by server,
-                                        // "for entertainment purposes only"
+    // position info updated asynchronously by server and read by client,
+    // "for entertainment purposes only"
+    StaticAudioTrackPosLoopQueue::Shared
+                    mPosLoopQueue;
 };
 
+typedef SingleStateQueue<AudioPlaybackRate> PlaybackRateQueue;
+
 // ----------------------------------------------------------------------------
 
 // Important: do not add any virtual methods, including ~
@@ -96,7 +140,8 @@
                 uint32_t    mServer;    // Number of filled frames consumed by server (mIsOut),
                                         // or filled frames provided by server (!mIsOut).
                                         // It is updated asynchronously by server without a barrier.
-                                        // The value should be used "for entertainment purposes only",
+                                        // The value should be used
+                                        // "for entertainment purposes only",
                                         // which means don't make important decisions based on it.
 
                 uint32_t    mPad1;      // unused
@@ -117,6 +162,8 @@
                 uint32_t    mSampleRate;    // AudioTrack only: client's requested sample rate in Hz
                                             // or 0 == default. Write-only client, read-only server.
 
+                PlaybackRateQueue::Shared mPlaybackRateQueue;
+
                 // client write-only, server read-only
                 uint16_t    mSendLevel;      // Fixed point U4.12 so 0x1000 means 1.0
 
@@ -271,7 +318,8 @@
     AudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
             size_t frameSize, bool clientInServer = false)
         : ClientProxy(cblk, buffers, frameCount, frameSize, true /*isOut*/,
-          clientInServer) { }
+          clientInServer),
+          mPlaybackRateMutator(&cblk->mPlaybackRateQueue) { }
     virtual ~AudioTrackClientProxy() { }
 
     // No barriers on the following operations, so the ordering of loads/stores
@@ -291,6 +339,10 @@
         mCblk->mSampleRate = sampleRate;
     }
 
+    void        setPlaybackRate(const AudioPlaybackRate& playbackRate) {
+        mPlaybackRateMutator.push(playbackRate);
+    }
+
     virtual void flush();
 
     virtual uint32_t    getUnderrunFrames() const {
@@ -302,6 +354,9 @@
     bool        getStreamEndDone() const;
 
     status_t    waitStreamEndDone(const struct timespec *requested);
+
+private:
+    PlaybackRateQueue::Mutator   mPlaybackRateMutator;
 };
 
 class StaticAudioTrackClientProxy : public AudioTrackClientProxy {
@@ -313,8 +368,28 @@
     virtual void    flush();
 
 #define MIN_LOOP    16  // minimum length of each loop iteration in frames
+
+            // setLoop(), setBufferPosition(), and setBufferPositionAndLoop() set the
+            // static buffer position and looping parameters.  These commands are not
+            // synchronous (they do not wait or block); instead they take effect at the
+            // next buffer data read from the server side. However, the client side
+            // getters will read a cached version of the position and loop variables
+            // until the setting takes effect.
+            //
+            // setBufferPositionAndLoop() is equivalent to calling, in order, setLoop() and
+            // setBufferPosition().
+            //
+            // The functions should not be relied upon to do parameter or state checking.
+            // That is done at the AudioTrack level.
+
             void    setLoop(size_t loopStart, size_t loopEnd, int loopCount);
+            void    setBufferPosition(size_t position);
+            void    setBufferPositionAndLoop(size_t position, size_t loopStart, size_t loopEnd,
+                                             int loopCount);
             size_t  getBufferPosition();
+                    // getBufferPositionAndLoopCount() provides the proper snapshot of
+                    // position and loopCount together.
+            void    getBufferPositionAndLoopCount(size_t *position, int *loopCount);
 
     virtual size_t  getMisalignment() {
         return 0;
@@ -326,7 +401,9 @@
 
 private:
     StaticAudioTrackSingleStateQueue::Mutator   mMutator;
-    size_t          mBufferPosition;    // so that getBufferPosition() appears to be synchronous
+    StaticAudioTrackPosLoopQueue::Observer      mPosLoopObserver;
+                        StaticAudioTrackState   mState;   // last communicated state to server
+                        StaticAudioTrackPosLoop mPosLoop; // snapshot of position and loop.
 };
 
 // ----------------------------------------------------------------------------
@@ -394,8 +471,10 @@
 public:
     AudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
             size_t frameSize, bool clientInServer = false, uint32_t sampleRate = 0)
-        : ServerProxy(cblk, buffers, frameCount, frameSize, true /*isOut*/, clientInServer) {
+        : ServerProxy(cblk, buffers, frameCount, frameSize, true /*isOut*/, clientInServer),
+          mPlaybackRateObserver(&cblk->mPlaybackRateQueue) {
         mCblk->mSampleRate = sampleRate;
+        mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     }
 protected:
     virtual ~AudioTrackServerProxy() { }
@@ -429,6 +508,13 @@
 
     // Return the total number of frames that AudioFlinger has obtained and released
     virtual size_t      framesReleased() const { return mCblk->mServer; }
+
+    // Return the playback speed and pitch read atomically. Not multi-thread safe on server side.
+    AudioPlaybackRate getPlaybackRate();
+
+private:
+    AudioPlaybackRate             mPlaybackRate;  // last observed playback rate
+    PlaybackRateQueue::Observer   mPlaybackRateObserver;
 };
 
 class StaticAudioTrackServerProxy : public AudioTrackServerProxy {
@@ -447,10 +533,13 @@
     virtual uint32_t    getUnderrunFrames() const { return 0; }
 
 private:
+    status_t            updateStateWithLoop(StaticAudioTrackState *localState,
+                                            const StaticAudioTrackState &update) const;
+    status_t            updateStateWithPosition(StaticAudioTrackState *localState,
+                                                const StaticAudioTrackState &update) const;
     ssize_t             pollPosition(); // poll for state queue update, and return current position
     StaticAudioTrackSingleStateQueue::Observer  mObserver;
-    size_t              mPosition;  // server's current play position in frames, relative to 0
-
+    StaticAudioTrackPosLoopQueue::Mutator       mPosLoopMutator;
     size_t              mFramesReadySafe; // Assuming size_t read/writes are atomic on 32 / 64 bit
                                           // processors, this is a thread-safe version of
                                           // mFramesReady.
@@ -459,7 +548,8 @@
                                           // can cause a track to appear to have a large number
                                           // of frames. INT64_MAX means an infinite loop.
     bool                mFramesReadyIsCalledByMultipleThreads;
-    StaticAudioTrackState   mState;
+    StaticAudioTrackState mState;         // Server side state. Any updates from client must be
+                                          // passed by the mObserver SingleStateQueue.
 };
 
 // Proxy used by AudioFlinger for servicing AudioRecord
diff --git a/include/private/media/StaticAudioTrackState.h b/include/private/media/StaticAudioTrackState.h
deleted file mode 100644
index d483061..0000000
--- a/include/private/media/StaticAudioTrackState.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STATIC_AUDIO_TRACK_STATE_H
-#define STATIC_AUDIO_TRACK_STATE_H
-
-namespace android {
-
-// Represents a single state of an AudioTrack that was created in static mode (shared memory buffer
-// supplied by the client).  This state needs to be communicated from the client to server.  As this
-// state is too large to be updated atomically without a mutex, and mutexes aren't allowed here, the
-// state is wrapped by a SingleStateQueue.
-struct StaticAudioTrackState {
-    // do not define constructors, destructors, or virtual methods
-
-    // These fields should both be size_t, but since they are located in shared memory we
-    // force to 32-bit.  The client and server may have different typedefs for size_t.
-    uint32_t    mLoopStart;
-    uint32_t    mLoopEnd;
-
-    int         mLoopCount;
-};
-
-}   // namespace android
-
-#endif  // STATIC_AUDIO_TRACK_STATE_H
diff --git a/include/radio/IRadio.h b/include/radio/IRadio.h
new file mode 100644
index 0000000..1877f8f
--- /dev/null
+++ b/include/radio/IRadio.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_IRADIO_H
+#define ANDROID_HARDWARE_IRADIO_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <system/radio.h>
+
+namespace android {
+
+class IRadio : public IInterface
+{
+public:
+
+    DECLARE_META_INTERFACE(Radio);
+
+    virtual void detach() = 0;
+
+    virtual status_t setConfiguration(const struct radio_band_config *config) = 0;
+
+    virtual status_t getConfiguration(struct radio_band_config *config) = 0;
+
+    virtual status_t setMute(bool mute) = 0;
+
+    virtual status_t getMute(bool *mute) = 0;
+
+    virtual status_t step(radio_direction_t direction, bool skipSubChannel) = 0;
+
+    virtual status_t scan(radio_direction_t direction, bool skipSubChannel) = 0;
+
+    virtual status_t tune(unsigned int channel, unsigned int subChannel) = 0;
+
+    virtual status_t cancel() = 0;
+
+    virtual status_t getProgramInformation(struct radio_program_info *info) = 0;
+
+    virtual status_t hasControl(bool *hasControl) = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnRadio: public BnInterface<IRadio>
+{
+public:
+    virtual status_t    onTransact( uint32_t code,
+                                    const Parcel& data,
+                                    Parcel* reply,
+                                    uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_IRADIO_H
diff --git a/include/radio/IRadioClient.h b/include/radio/IRadioClient.h
new file mode 100644
index 0000000..9062ad6
--- /dev/null
+++ b/include/radio/IRadioClient.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_IRADIO_CLIENT_H
+#define ANDROID_HARDWARE_IRADIO_CLIENT_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class IRadioClient : public IInterface
+{
+public:
+
+    DECLARE_META_INTERFACE(RadioClient);
+
+    virtual void onEvent(const sp<IMemory>& eventMemory) = 0;
+
+};
+
+// ----------------------------------------------------------------------------
+
+class BnRadioClient : public BnInterface<IRadioClient>
+{
+public:
+    virtual status_t    onTransact( uint32_t code,
+                                    const Parcel& data,
+                                    Parcel* reply,
+                                    uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_IRADIO_CLIENT_H
diff --git a/include/radio/IRadioService.h b/include/radio/IRadioService.h
new file mode 100644
index 0000000..a946dd5
--- /dev/null
+++ b/include/radio/IRadioService.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_IRADIO_SERVICE_H
+#define ANDROID_HARDWARE_IRADIO_SERVICE_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+#include <system/radio.h>
+
+namespace android {
+
+class IRadio;
+class IRadioClient;
+
+class IRadioService : public IInterface
+{
+public:
+
+    DECLARE_META_INTERFACE(RadioService);
+
+    virtual status_t listModules(struct radio_properties *properties,
+                                 uint32_t *numModules) = 0;
+
+    virtual status_t attach(const radio_handle_t handle,
+                            const sp<IRadioClient>& client,
+                            const struct radio_band_config *config,
+                            bool withAudio,
+                            sp<IRadio>& radio) = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnRadioService: public BnInterface<IRadioService>
+{
+public:
+    virtual status_t    onTransact( uint32_t code,
+                                    const Parcel& data,
+                                    Parcel* reply,
+                                    uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_IRADIO_SERVICE_H
diff --git a/include/radio/Radio.h b/include/radio/Radio.h
new file mode 100644
index 0000000..302bf16
--- /dev/null
+++ b/include/radio/Radio.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_RADIO_H
+#define ANDROID_HARDWARE_RADIO_H
+
+#include <binder/IBinder.h>
+#include <utils/threads.h>
+#include <radio/RadioCallback.h>
+#include <radio/IRadio.h>
+#include <radio/IRadioService.h>
+#include <radio/IRadioClient.h>
+#include <system/radio.h>
+
+namespace android {
+
+class MemoryDealer;
+
+class Radio : public BnRadioClient,
+                        public IBinder::DeathRecipient
+{
+public:
+
+    virtual ~Radio();
+
+    static  status_t listModules(struct radio_properties *properties,
+                                 uint32_t *numModules);
+    static  sp<Radio> attach(radio_handle_t handle,
+                             const struct radio_band_config *config,
+                             bool withAudio,
+                             const sp<RadioCallback>& callback);
+
+
+            void detach();
+
+            status_t setConfiguration(const struct radio_band_config *config);
+
+            status_t getConfiguration(struct radio_band_config *config);
+
+            status_t setMute(bool mute);
+
+            status_t getMute(bool *mute);
+
+            status_t step(radio_direction_t direction, bool skipSubChannel);
+
+            status_t scan(radio_direction_t direction, bool skipSubChannel);
+
+            status_t tune(unsigned int channel, unsigned int subChannel);
+
+            status_t cancel();
+
+            status_t getProgramInformation(struct radio_program_info *info);
+
+            status_t hasControl(bool *hasControl);
+
+            // BpRadioClient
+            virtual void onEvent(const sp<IMemory>& eventMemory);
+
+            //IBinder::DeathRecipient
+            virtual void binderDied(const wp<IBinder>& who);
+
+private:
+            Radio(radio_handle_t handle,
+                            const sp<RadioCallback>&);
+            static const sp<IRadioService>& getRadioService();
+
+            Mutex                   mLock;
+            sp<IRadio>              mIRadio;
+            const radio_handle_t    mHandle;
+            sp<RadioCallback>       mCallback;
+};
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_RADIO_H
diff --git a/include/radio/RadioCallback.h b/include/radio/RadioCallback.h
new file mode 100644
index 0000000..4a7f1a6
--- /dev/null
+++ b/include/radio/RadioCallback.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_RADIO_CALLBACK_H
+#define ANDROID_HARDWARE_RADIO_CALLBACK_H
+
+#include <utils/RefBase.h>
+#include <system/radio.h>
+
+namespace android {
+
+class RadioCallback : public RefBase
+{
+public:
+
+            RadioCallback() {}
+    virtual ~RadioCallback() {}
+
+    virtual void onEvent(struct radio_event *event) = 0;
+
+};
+
+}; // namespace android
+
+#endif //ANDROID_HARDWARE_RADIO_CALLBACK_H
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
index 4389b02..3dcedc5 100644
--- a/media/img_utils/include/img_utils/DngUtils.h
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -31,6 +31,7 @@
 namespace img_utils {
 
 #define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0])))
+#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))
 
 /**
  * Utility class for building values for the OpcodeList tags specified
@@ -107,13 +108,49 @@
                                     uint32_t mapPlanes,
                                     const float* mapGains);
 
+        /**
+         * Add WarpRectilinear opcode for the given metadata parameters.
+         *
+         * Returns OK on success, or a negative error code.
+         */
+        virtual status_t addWarpRectilinearForMetadata(const float* kCoeffs,
+                                                       uint32_t activeArrayWidth,
+                                                       uint32_t activeArrayHeight,
+                                                       float opticalCenterX,
+                                                       float opticalCenterY);
+
+        /**
+         * Add a WarpRectilinear opcode.
+         *
+         * numPlanes - Number of planes included in this opcode.
+         * opticalCenterX, opticalCenterY - Normalized x,y coordinates of the sensor optical
+         *          center relative to the top,left pixel of the produced images (e.g. [0.5, 0.5]
+         *          gives a sensor optical center in the image center.
+         * kCoeffs - A list of coefficients for the polynomial equation representing the distortion
+         *          correction.  For each plane, 6 coefficients must be included:
+         *          {k_r0, k_r1, k_r2, k_r3, k_t0, k_t1}.  See the DNG 1.4 specification for an
+         *          outline of the polynomial used here.
+         *
+         * Returns OK on success, or a negative error code.
+         */
+        virtual status_t addWarpRectilinear(uint32_t numPlanes,
+                                            double opticalCenterX,
+                                            double opticalCenterY,
+                                            const double* kCoeffs);
+
         // TODO: Add other Opcode methods
     protected:
         static const uint32_t FLAG_OPTIONAL = 0x1u;
         static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u;
 
+        // Opcode IDs
         enum {
+            WARP_RECTILINEAR_ID = 1,
             GAIN_MAP_ID = 9,
+        };
+
+        // LSM mosaic indices
+        enum {
             LSM_R_IND = 0,
             LSM_GE_IND = 1,
             LSM_GO_IND = 2,
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
index d3b4a35..b213403 100644
--- a/media/img_utils/src/DngUtils.cpp
+++ b/media/img_utils/src/DngUtils.cpp
@@ -16,6 +16,10 @@
 
 #include <img_utils/DngUtils.h>
 
+#include <inttypes.h>
+
+#include <math.h>
+
 namespace android {
 namespace img_utils {
 
@@ -229,7 +233,7 @@
     err = mEndianOut.write(version, 0, NELEMS(version));
     if (err != OK) return err;
 
-    // Do not include optional flag for preview, as this can have a large effect on the output.
+    // Allow this opcode to be skipped if not supported
     uint32_t flags = FLAG_OPTIONAL;
 
     err = mEndianOut.write(&flags, 0, 1);
@@ -278,5 +282,96 @@
     return OK;
 }
 
+status_t OpcodeListBuilder::addWarpRectilinearForMetadata(const float* kCoeffs,
+                                                          uint32_t activeArrayWidth,
+                                                          uint32_t activeArrayHeight,
+                                                          float opticalCenterX,
+                                                          float opticalCenterY) {
+    if (activeArrayWidth <= 1 || activeArrayHeight <= 1) {
+        ALOGE("%s: Cannot add opcode for active array with dimensions w=%" PRIu32 ", h=%" PRIu32,
+                __FUNCTION__, activeArrayWidth, activeArrayHeight);
+        return BAD_VALUE;
+    }
+
+    double normalizedOCX = opticalCenterX / static_cast<double>(activeArrayWidth - 1);
+    double normalizedOCY = opticalCenterY / static_cast<double>(activeArrayHeight - 1);
+
+    normalizedOCX = CLAMP(normalizedOCX, 0, 1);
+    normalizedOCY = CLAMP(normalizedOCY, 0, 1);
+
+    // Conversion factors from Camera2 K factors to DNG spec. K factors:
+    //
+    //      Note: these are necessary because our unit system assumes a
+    //      normalized max radius of sqrt(2), whereas the DNG spec's
+    //      WarpRectilinear opcode assumes a normalized max radius of 1.
+    //      Thus, each K coefficient must include the domain scaling
+    //      factor (the DNG domain is scaled by sqrt(2) to emulate the
+    //      domain used by the Camera2 specification).
+
+    const double c_0 = sqrt(2);
+    const double c_1 = 2 * sqrt(2);
+    const double c_2 = 4 * sqrt(2);
+    const double c_3 = 8 * sqrt(2);
+    const double c_4 = 2;
+    const double c_5 = 2;
+
+    const double coeffs[] = { c_0 * kCoeffs[0],
+                              c_1 * kCoeffs[1],
+                              c_2 * kCoeffs[2],
+                              c_3 * kCoeffs[3],
+                              c_4 * kCoeffs[4],
+                              c_5 * kCoeffs[5] };
+
+
+    return addWarpRectilinear(/*numPlanes*/1,
+                              /*opticalCenterX*/normalizedOCX,
+                              /*opticalCenterY*/normalizedOCY,
+                              coeffs);
+}
+
+status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes,
+                                               double opticalCenterX,
+                                               double opticalCenterY,
+                                               const double* kCoeffs) {
+
+    uint32_t opcodeId = WARP_RECTILINEAR_ID;
+
+    status_t err = mEndianOut.write(&opcodeId, 0, 1);
+    if (err != OK) return err;
+
+    uint8_t version[] = {1, 3, 0, 0};
+    err = mEndianOut.write(version, 0, NELEMS(version));
+    if (err != OK) return err;
+
+    // Allow this opcode to be skipped if not supported
+    uint32_t flags = FLAG_OPTIONAL;
+
+    err = mEndianOut.write(&flags, 0, 1);
+    if (err != OK) return err;
+
+    const uint32_t NUMBER_CENTER_ARGS = 2;
+    const uint32_t NUMBER_COEFFS = numPlanes * 6;
+    uint32_t totalSize = (NUMBER_CENTER_ARGS + NUMBER_COEFFS) * sizeof(double) + sizeof(uint32_t);
+
+    err = mEndianOut.write(&totalSize, 0, 1);
+    if (err != OK) return err;
+
+    err = mEndianOut.write(&numPlanes, 0, 1);
+    if (err != OK) return err;
+
+    err = mEndianOut.write(kCoeffs, 0, NUMBER_COEFFS);
+    if (err != OK) return err;
+
+    err = mEndianOut.write(&opticalCenterX, 0, 1);
+    if (err != OK) return err;
+
+    err = mEndianOut.write(&opticalCenterY, 0, 1);
+    if (err != OK) return err;
+
+    mCount++;
+
+    return OK;
+}
+
 } /*namespace img_utils*/
 } /*namespace android*/
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index 6d30d64..db7865a 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -24,10 +24,12 @@
 
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
+#include <cutils/properties.h>
 #include <audio_effects/audio_effects_conf.h>
 
 static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects
 static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries
+static list_elem_t *gSkippedEffects; // list of effects skipped because of duplicate uuid
 // list of effect_descriptor and list of sub effects : all currently loaded
 // It does not contain effects without sub effects.
 static list_sub_elem_t *gSubEffectList;
@@ -63,10 +65,10 @@
                lib_entry_t **lib,
                effect_descriptor_t **desc);
 // To search a subeffect in the gSubEffectList
-int findSubEffect(const effect_uuid_t *uuid,
+static int findSubEffect(const effect_uuid_t *uuid,
                lib_entry_t **lib,
                effect_descriptor_t **desc);
-static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len);
+static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent);
 static int stringToUuid(const char *str, effect_uuid_t *uuid);
 static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen);
 
@@ -237,8 +239,8 @@
     }
 
 #if (LOG_NDEBUG == 0)
-    char str[256];
-    dumpEffectDescriptor(pDescriptor, str, 256);
+    char str[512];
+    dumpEffectDescriptor(pDescriptor, str, sizeof(str), 0 /* indent */);
     ALOGV("EffectQueryEffect() desc:%s", str);
 #endif
     pthread_mutex_unlock(&gLibLock);
@@ -446,12 +448,19 @@
         return 0;
     }
 
+    // ignore effects or not?
+    const bool ignoreFxConfFiles = property_get_bool(PROPERTY_IGNORE_EFFECTS, false);
+
     pthread_mutex_init(&gLibLock, NULL);
 
-    if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
-        loadEffectConfigFile(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
-    } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
-        loadEffectConfigFile(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+    if (ignoreFxConfFiles) {
+        ALOGI("Audio effects in configuration files will be ignored");
+    } else {
+        if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
+            loadEffectConfigFile(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+        } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
+            loadEffectConfigFile(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+        }
     }
 
     updateNumEffects();
@@ -503,15 +512,31 @@
     audio_effect_library_t *desc;
     list_elem_t *e;
     lib_entry_t *l;
+    char path[PATH_MAX];
+    char *str;
+    size_t len;
 
     node = config_find(root, PATH_TAG);
     if (node == NULL) {
         return -EINVAL;
     }
+    // audio_effects.conf always specifies 32 bit lib path: convert to 64 bit path if needed
+    strlcpy(path, node->value, PATH_MAX);
+#ifdef __LP64__
+    str = strstr(path, "/lib/");
+    if (str == NULL)
+        return -EINVAL;
+    len = str - path;
+    path[len] = '\0';
+    strlcat(path, "/lib64/", PATH_MAX);
+    strlcat(path, node->value + len + strlen("/lib/"), PATH_MAX);
+#endif
+    if (strlen(path) >= PATH_MAX - 1)
+        return -EINVAL;
 
-    hdl = dlopen(node->value, RTLD_NOW);
+    hdl = dlopen(path, RTLD_NOW);
     if (hdl == NULL) {
-        ALOGW("loadLibrary() failed to open %s", node->value);
+        ALOGW("loadLibrary() failed to open %s", path);
         goto error;
     }
 
@@ -535,7 +560,7 @@
     // add entry for library in gLibraryList
     l = malloc(sizeof(lib_entry_t));
     l->name = strndup(name, PATH_MAX);
-    l->path = strndup(node->value, PATH_MAX);
+    l->path = strndup(path, PATH_MAX);
     l->handle = hdl;
     l->desc = desc;
     l->effects = NULL;
@@ -547,7 +572,7 @@
     e->next = gLibraryList;
     gLibraryList = e;
     pthread_mutex_unlock(&gLibLock);
-    ALOGV("getLibrary() linked library %p for path %s", l, node->value);
+    ALOGV("getLibrary() linked library %p for path %s", l, path);
 
     return 0;
 
@@ -595,8 +620,8 @@
         return -EINVAL;
     }
 #if (LOG_NDEBUG==0)
-    char s[256];
-    dumpEffectDescriptor(d, s, 256);
+    char s[512];
+    dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
     ALOGV("addSubEffect() read descriptor %p:%s",d, s);
 #endif
     if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
@@ -660,6 +685,13 @@
         ALOGW("loadEffect() invalid uuid %s", node->value);
         return -EINVAL;
     }
+    lib_entry_t *tmp;
+    bool skip = false;
+    if (findEffect(NULL, &uuid, &tmp, NULL) == 0) {
+        ALOGW("skipping duplicate uuid %s %s", node->value,
+                node->next ? "and its sub-effects" : "");
+        skip = true;
+    }
 
     d = malloc(sizeof(effect_descriptor_t));
     if (l->desc->get_descriptor(&uuid, d) != 0) {
@@ -670,8 +702,8 @@
         return -EINVAL;
     }
 #if (LOG_NDEBUG==0)
-    char s[256];
-    dumpEffectDescriptor(d, s, 256);
+    char s[512];
+    dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
     ALOGV("loadEffect() read descriptor %p:%s",d, s);
 #endif
     if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
@@ -682,8 +714,14 @@
     }
     e = malloc(sizeof(list_elem_t));
     e->object = d;
-    e->next = l->effects;
-    l->effects = e;
+    if (skip) {
+        e->next = gSkippedEffects;
+        gSkippedEffects = e;
+        return -EINVAL;
+    } else {
+        e->next = l->effects;
+        l->effects = e;
+    }
 
     // After the UUID node in the config_tree, if node->next is valid,
     // that would be sub effect node.
@@ -876,22 +914,30 @@
     return ret;
 }
 
-void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len) {
+void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent) {
     char s[256];
+    char ss[256];
+    char idt[indent + 1];
 
-    snprintf(str, len, "\nEffect Descriptor %p:\n", desc);
-    strncat(str, "- TYPE: ", len);
-    uuidToString(&desc->uuid, s, 256);
-    snprintf(str, len, "- UUID: %s\n", s);
-    uuidToString(&desc->type, s, 256);
-    snprintf(str, len, "- TYPE: %s\n", s);
-    sprintf(s, "- apiVersion: %08X\n- flags: %08X\n",
-            desc->apiVersion, desc->flags);
-    strncat(str, s, len);
-    sprintf(s, "- name: %s\n", desc->name);
-    strncat(str, s, len);
-    sprintf(s, "- implementor: %s\n", desc->implementor);
-    strncat(str, s, len);
+    memset(idt, ' ', indent);
+    idt[indent] = 0;
+
+    str[0] = 0;
+
+    snprintf(s, sizeof(s), "%s%s / %s\n", idt, desc->name, desc->implementor);
+    strlcat(str, s, len);
+
+    uuidToString(&desc->uuid, s, sizeof(s));
+    snprintf(ss, sizeof(ss), "%s  UUID: %s\n", idt, s);
+    strlcat(str, ss, len);
+
+    uuidToString(&desc->type, s, sizeof(s));
+    snprintf(ss, sizeof(ss), "%s  TYPE: %s\n", idt, s);
+    strlcat(str, ss, len);
+
+    sprintf(s, "%s  apiVersion: %08X\n%s  flags: %08X\n", idt,
+            desc->apiVersion, idt, desc->flags);
+    strlcat(str, s, len);
 }
 
 int stringToUuid(const char *str, effect_uuid_t *uuid)
@@ -934,3 +980,40 @@
     return 0;
 }
 
+int EffectDumpEffects(int fd) {
+    char s[512];
+    list_elem_t *e = gLibraryList;
+    lib_entry_t *l = NULL;
+    effect_descriptor_t *d = NULL;
+    int found = 0;
+    int ret = 0;
+
+    while (e) {
+        l = (lib_entry_t *)e->object;
+        list_elem_t *efx = l->effects;
+        dprintf(fd, "Library %s\n", l->name);
+        if (!efx) {
+            dprintf(fd, "  (no effects)\n");
+        }
+        while (efx) {
+            d = (effect_descriptor_t *)efx->object;
+            dumpEffectDescriptor(d, s, sizeof(s), 2);
+            dprintf(fd, "%s", s);
+            efx = efx->next;
+        }
+        e = e->next;
+    }
+
+    e = gSkippedEffects;
+    if (e) {
+        dprintf(fd, "Skipped effects\n");
+        while(e) {
+            d = (effect_descriptor_t *)e->object;
+            dumpEffectDescriptor(d, s, sizeof(s), 2 /* indent */);
+            dprintf(fd, "%s", s);
+            e = e->next;
+        }
+    }
+    return ret;
+}
+
diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h
index 560b485..518800d 100644
--- a/media/libeffects/factory/EffectsFactory.h
+++ b/media/libeffects/factory/EffectsFactory.h
@@ -26,6 +26,7 @@
 extern "C" {
 #endif
 
+#define PROPERTY_IGNORE_EFFECTS "ro.audio.ignore_effects"
 
 typedef struct list_elem_s {
     void *object;
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 40c7fef..af904a6 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -27,6 +27,7 @@
 
 #include <cutils/log.h>
 #include "EffectBundle.h"
+#include "math.h"
 
 
 // effect_handle_t interface implementation for bass boost
@@ -830,32 +831,69 @@
     int gainCorrection = 0;
     //Count the energy contribution per band for EQ and BassBoost only if they are active.
     float energyContribution = 0;
+    float energyCross = 0;
+    float energyBassBoost = 0;
+    float crossCorrection = 0;
 
     //EQ contribution
     if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-            float bandEnergy = (pContext->pBundledContext->bandGaindB[i] *
-                    LimitLevel_bandEnergyContribution[i])/15.0;
+            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+            float bandCoefficient = LimitLevel_bandEnergyCoefficient[i];
+            float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
             if (bandEnergy > 0)
                 energyContribution += bandEnergy;
         }
+
+        //cross EQ coefficients
+        float bandFactorSum = 0;
+        for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) {
+            float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0;
+            float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0;
+
+            if (bandFactor1 > 0 && bandFactor2 > 0) {
+                float crossEnergy = bandFactor1 * bandFactor2 *
+                        LimitLevel_bandEnergyCrossCoefficient[i];
+                bandFactorSum += bandFactor1 * bandFactor2;
+
+                if (crossEnergy > 0)
+                    energyCross += crossEnergy;
+            }
+        }
+        bandFactorSum -= 1.0;
+        if (bandFactorSum > 0)
+            crossCorrection = bandFactorSum * 0.7;
     }
 
     //BassBoost contribution
     if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
-        float bandEnergy = (pContext->pBundledContext->BassStrengthSaved *
-                LimitLevel_bassBoostEnergyContribution)/1000.0;
-        if (bandEnergy > 0)
-            energyContribution += bandEnergy;
+        float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0;
+        float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient;
+
+        energyContribution += boostFactor * boostCoefficient * boostCoefficient;
+
+        for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+            float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i];
+            float bandEnergy = boostFactor * bandFactor *
+                    bandCrossCoefficient;
+            if (bandEnergy > 0)
+                energyBassBoost += bandEnergy;
+        }
     }
 
     //Virtualizer contribution
     if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
-                   energyContribution += LimitLevel_virtualizerContribution;
-        }
+        energyContribution += LimitLevel_virtualizerContribution *
+                LimitLevel_virtualizerContribution;
+    }
+
+    double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) -
+            crossCorrection;
+    ALOGV(" TOTAL energy estimation: %0.2f", totalEnergyEstimation);
 
     //roundoff
-    int maxLevelRound = (int)(energyContribution + 0.99);
+    int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
     if (maxLevelRound + pContext->pBundledContext->volume > 0) {
         gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
     }
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index b3071f4..9459b87 100644
--- a/media