am ef6c1c57: Merge changes Ie449043b,I0c08e8cd

* commit 'ef6c1c57263dfcc8c577fda5fb6293b3f9899612':
  GPU: Build gralloc.ranchu.so
  init.ranchu.rc: enable qemu-props & use GPU emulation if available
diff --git a/camera/EmulatedCamera3.cpp b/camera/EmulatedCamera3.cpp
index 47de44f..e9110cc 100644
--- a/camera/EmulatedCamera3.cpp
+++ b/camera/EmulatedCamera3.cpp
@@ -41,7 +41,7 @@
 EmulatedCamera3::EmulatedCamera3(int cameraId,
         struct hw_module_t* module):
         EmulatedBaseCamera(cameraId,
-                CAMERA_DEVICE_API_VERSION_3_0,
+                CAMERA_DEVICE_API_VERSION_3_3,
                 &common,
                 module),
         mStatus(STATUS_ERROR)
@@ -51,13 +51,6 @@
 
     mCallbackOps = NULL;
 
-    mVendorTagOps.get_camera_vendor_section_name =
-            EmulatedCamera3::get_camera_vendor_section_name;
-    mVendorTagOps.get_camera_vendor_tag_name =
-            EmulatedCamera3::get_camera_vendor_tag_name;
-    mVendorTagOps.get_camera_vendor_tag_type =
-            EmulatedCamera3::get_camera_vendor_tag_type;
-    mVendorTagOps.parent = this;
 }
 
 /* Destructs EmulatedCamera3 instance. */
@@ -156,21 +149,9 @@
     return INVALID_OPERATION;
 }
 
-/** Custom tag query methods */
-
-const char* EmulatedCamera3::getVendorSectionName(uint32_t tag) {
+status_t EmulatedCamera3::flush() {
     ALOGE("%s: Not implemented", __FUNCTION__);
-    return NULL;
-}
-
-const char* EmulatedCamera3::getVendorTagName(uint32_t tag) {
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return NULL;
-}
-
-int EmulatedCamera3::getVendorTagType(uint32_t tag) {
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return -1;
+    return INVALID_OPERATION;
 }
 
 /** Debug methods */
@@ -241,39 +222,16 @@
     return ec->constructDefaultRequestSettings(type);
 }
 
-void EmulatedCamera3::get_metadata_vendor_tag_ops(const camera3_device_t *d,
-        vendor_tag_query_ops_t *ops) {
-    ops->get_camera_vendor_section_name = get_camera_vendor_section_name;
-    ops->get_camera_vendor_tag_name = get_camera_vendor_tag_name;
-    ops->get_camera_vendor_tag_type = get_camera_vendor_tag_type;
-}
-
-const char* EmulatedCamera3::get_camera_vendor_section_name(
-        const vendor_tag_query_ops_t *v,
-        uint32_t tag) {
-    EmulatedCamera3* ec = static_cast<const TagOps*>(v)->parent;
-    return ec->getVendorSectionName(tag);
-}
-
-const char* EmulatedCamera3::get_camera_vendor_tag_name(
-        const vendor_tag_query_ops_t *v,
-        uint32_t tag) {
-    EmulatedCamera3* ec = static_cast<const TagOps*>(v)->parent;
-    return ec->getVendorTagName(tag);
-}
-
-int EmulatedCamera3::get_camera_vendor_tag_type(
-        const vendor_tag_query_ops_t *v,
-        uint32_t tag)  {
-    EmulatedCamera3* ec = static_cast<const TagOps*>(v)->parent;
-    return ec->getVendorTagType(tag);
-}
-
 void EmulatedCamera3::dump(const camera3_device_t *d, int fd) {
     EmulatedCamera3* ec = getInstance(d);
     ec->dump(fd);
 }
 
+int EmulatedCamera3::flush(const camera3_device_t *d) {
+    EmulatedCamera3* ec = getInstance(d);
+    return ec->flush();
+}
+
 int EmulatedCamera3::close(struct hw_device_t* device) {
     EmulatedCamera3* ec =
             static_cast<EmulatedCamera3*>(
@@ -288,11 +246,26 @@
 camera3_device_ops_t EmulatedCamera3::sDeviceOps = {
     EmulatedCamera3::initialize,
     EmulatedCamera3::configure_streams,
-    EmulatedCamera3::register_stream_buffers,
+    /* DEPRECATED: register_stream_buffers */ nullptr,
     EmulatedCamera3::construct_default_request_settings,
     EmulatedCamera3::process_capture_request,
-    EmulatedCamera3::get_metadata_vendor_tag_ops,
-    EmulatedCamera3::dump
+    /* DEPRECATED: get_metadata_vendor_tag_ops */ nullptr,
+    EmulatedCamera3::dump,
+    EmulatedCamera3::flush
+};
+
+const char* EmulatedCamera3::sAvailableCapabilitiesStrings[NUM_CAPABILITIES] = {
+    "BACKWARD_COMPATIBLE",
+    "MANUAL_SENSOR",
+    "MANUAL_POST_PROCESSING",
+    "RAW",
+    "PRIVATE_REPROCESSING",
+    "READ_SENSOR_SETTINGS",
+    "BURST_CAPTURE",
+    "YUV_REPROCESSING",
+    "DEPTH_OUTPUT",
+    "CONSTRAINED_HIGH_SPEED_VIDEO",
+    "FULL_LEVEL"
 };
 
 }; /* namespace android */
diff --git a/camera/EmulatedCamera3.h b/camera/EmulatedCamera3.h
index c1bddf6..9d2d7b2 100644
--- a/camera/EmulatedCamera3.h
+++ b/camera/EmulatedCamera3.h
@@ -54,6 +54,27 @@
     /* Destructs EmulatedCamera2 instance. */
     virtual ~EmulatedCamera3();
 
+    /* List of all defined capabilities plus useful HW levels */
+    enum AvailableCapabilities {
+        BACKWARD_COMPATIBLE,
+        MANUAL_SENSOR,
+        MANUAL_POST_PROCESSING,
+        RAW,
+        PRIVATE_REPROCESSING,
+        READ_SENSOR_SETTINGS,
+        BURST_CAPTURE,
+        YUV_REPROCESSING,
+        DEPTH_OUTPUT,
+        CONSTRAINED_HIGH_SPEED_VIDEO,
+        // Levels
+        FULL_LEVEL,
+
+        NUM_CAPABILITIES
+    };
+
+    // Char strings for above enum, with size NUM_CAPABILITIES
+    static const char *sAvailableCapabilitiesStrings[];
+
     /****************************************************************************
      * Abstract API
      ***************************************************************************/
@@ -99,17 +120,12 @@
 
     virtual status_t processCaptureRequest(camera3_capture_request *request);
 
+    virtual status_t flush();
+
     /** Debug methods */
 
     virtual void dump(int fd);
 
-    /** Tag query methods */
-    virtual const char *getVendorSectionName(uint32_t tag);
-
-    virtual const char *getVendorTagName(uint32_t tag);
-
-    virtual int getVendorTagType(uint32_t tag);
-
     /****************************************************************************
      * Camera API callbacks as defined by camera3_device_ops structure.  See
      * hardware/libhardware/include/hardware/camera3.h for information on each
@@ -142,22 +158,10 @@
     static int process_capture_request(const struct camera3_device *,
             camera3_capture_request_t *request);
 
-    /** Vendor metadata registration */
-    static void get_metadata_vendor_tag_ops(const camera3_device_t *,
-            vendor_tag_query_ops_t *ops);
-    // for get_metadata_vendor_tag_ops
-    static const char* get_camera_vendor_section_name(
-            const vendor_tag_query_ops_t *,
-            uint32_t tag);
-    static const char* get_camera_vendor_tag_name(
-            const vendor_tag_query_ops_t *,
-            uint32_t tag);
-    static int get_camera_vendor_tag_type(
-            const vendor_tag_query_ops_t *,
-            uint32_t tag);
-
     static void dump(const camera3_device_t *, int fd);
 
+    static int flush(const camera3_device_t *);
+
     /** For hw_device_t ops */
     static int close(struct hw_device_t* device);
 
@@ -166,11 +170,6 @@
      ***************************************************************************/
   protected:
 
-    struct TagOps : public vendor_tag_query_ops {
-        EmulatedCamera3 *parent;
-    };
-    TagOps      mVendorTagOps;
-
     enum {
         // State at construction time, and after a device operation error
         STATUS_ERROR = 0,
diff --git a/camera/EmulatedCameraFactory.cpp b/camera/EmulatedCameraFactory.cpp
index 0a8ada7..586c9c6 100755
--- a/camera/EmulatedCameraFactory.cpp
+++ b/camera/EmulatedCameraFactory.cpp
@@ -259,6 +259,12 @@
     return OK;
 }
 
+void EmulatedCameraFactory::getVendorTagOps(vendor_tag_ops_t* ops) {
+    ALOGV("%s: ops = %p", __FUNCTION__, ops);
+
+    // No vendor tags defined for emulator yet, so not touching ops
+}
+
 /****************************************************************************
  * Camera HAL API callbacks.
  ***************************************************************************/
@@ -302,6 +308,17 @@
     return gEmulatedCameraFactory.setCallbacks(callbacks);
 }
 
+void EmulatedCameraFactory::get_vendor_tag_ops(vendor_tag_ops_t* ops)
+{
+    gEmulatedCameraFactory.getVendorTagOps(ops);
+}
+
+int EmulatedCameraFactory::open_legacy(const struct hw_module_t* module,
+        const char* id, uint32_t halVersion, struct hw_device_t** device) {
+    // Not supporting legacy open
+    return -ENOSYS;
+}
+
 /********************************************************************************
  * Internal API
  *******************************************************************************/
diff --git a/camera/EmulatedCameraFactory.h b/camera/EmulatedCameraFactory.h
index 470f5ea..3f19be1 100755
--- a/camera/EmulatedCameraFactory.h
+++ b/camera/EmulatedCameraFactory.h
@@ -80,6 +80,11 @@
      */
     int setCallbacks(const camera_module_callbacks_t *callbacks);
 
+    /* Fill in vendor tags for the module
+     * This method is called in response to camera_module_t::get_vendor_tag_ops callback.
+     */
+    void getVendorTagOps(vendor_tag_ops_t* ops);
+
     /****************************************************************************
      * Camera HAL API callbacks.
      ***************************************************************************/
@@ -94,6 +99,13 @@
     /* camera_module_t::set_callbacks callback entry point. */
     static int set_callbacks(const camera_module_callbacks_t *callbacks);
 
+    /* camera_module_t::get_vendor_tag_ops callback entry point */
+    static void get_vendor_tag_ops(vendor_tag_ops_t* ops);
+
+    /* camera_module_t::open_legacy callback entry point */
+    static int open_legacy(const struct hw_module_t* module, const char* id,
+            uint32_t halVersion, struct hw_device_t** device);
+
 private:
     /* hw_module_methods_t::open callback entry point. */
     static int device_open(const hw_module_t* module,
diff --git a/camera/EmulatedCameraHal.cpp b/camera/EmulatedCameraHal.cpp
index 802a5bb..b1f8b3a 100755
--- a/camera/EmulatedCameraHal.cpp
+++ b/camera/EmulatedCameraHal.cpp
@@ -31,7 +31,7 @@
 camera_module_t HAL_MODULE_INFO_SYM = {
     common: {
          tag:                HARDWARE_MODULE_TAG,
-         module_api_version: CAMERA_MODULE_API_VERSION_2_1,
+         module_api_version: CAMERA_MODULE_API_VERSION_2_3,
          hal_api_version:    HARDWARE_HAL_API_VERSION,
          id:                 CAMERA_HARDWARE_MODULE_ID,
          name:               "Emulated Camera Module",
@@ -43,4 +43,6 @@
     get_number_of_cameras:  android::EmulatedCameraFactory::get_number_of_cameras,
     get_camera_info:        android::EmulatedCameraFactory::get_camera_info,
     set_callbacks:          android::EmulatedCameraFactory::set_callbacks,
+    get_vendor_tag_ops:     android::EmulatedCameraFactory::get_vendor_tag_ops,
+    open_legacy:            android::EmulatedCameraFactory::open_legacy
 };
diff --git a/camera/EmulatedFakeCamera2.cpp b/camera/EmulatedFakeCamera2.cpp
index 844a71e..d1beb92 100644
--- a/camera/EmulatedFakeCamera2.cpp
+++ b/camera/EmulatedFakeCamera2.cpp
@@ -42,7 +42,7 @@
 const int64_t SEC = MSEC * 1000LL;
 
 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
-        HAL_PIXEL_FORMAT_RAW_SENSOR,
+        HAL_PIXEL_FORMAT_RAW16,
         HAL_PIXEL_FORMAT_BLOB,
         HAL_PIXEL_FORMAT_RGBA_8888,
         //        HAL_PIXEL_FORMAT_YV12,
@@ -55,7 +55,7 @@
 };
 
 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
-    Sensor::kFrameDurationRange[0]
+    static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
 };
 
 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
@@ -69,7 +69,7 @@
 };
 
 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
-    Sensor::kFrameDurationRange[0]
+    static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
 };
 
 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
@@ -84,7 +84,7 @@
 
 
 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
-    Sensor::kFrameDurationRange[0]
+    static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
 };
 
 
@@ -374,7 +374,7 @@
     const uint32_t *availableSizes;
     size_t availableSizeCount;
     switch (format) {
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
             availableSizes = kAvailableRawSizes;
             availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
             break;
@@ -412,7 +412,7 @@
     }
 
     switch (format) {
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
             if (mRawStreamCount >= kMaxRawStreamCount) {
                 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
                         __FUNCTION__, mRawStreamCount);
@@ -523,7 +523,7 @@
     }
 
     switch(mStreams.valueAt(streamIndex).format) {
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
             mRawStreamCount--;
             break;
         case HAL_PIXEL_FORMAT_BLOB:
@@ -1455,15 +1455,15 @@
     // the rectangles don't line up quite right.
     const size_t numFaces = 2;
     int32_t rects[numFaces * 4] = {
-            Sensor::kResolution[0] * 10 / 20,
-            Sensor::kResolution[1] * 15 / 20,
-            Sensor::kResolution[0] * 12 / 20,
-            Sensor::kResolution[1] * 17 / 20,
+        static_cast<int32_t>(Sensor::kResolution[0] * 10 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 15 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 12 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 17 / 20),
 
-            Sensor::kResolution[0] * 16 / 20,
-            Sensor::kResolution[1] * 15 / 20,
-            Sensor::kResolution[0] * 18 / 20,
-            Sensor::kResolution[1] * 17 / 20
+        static_cast<int32_t>(Sensor::kResolution[0] * 16 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 15 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 18 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 17 / 20)
     };
     // To simulate some kind of real detection going on, we jitter the rectangles on
     // each frame by a few pixels in each dimension.
@@ -1496,19 +1496,19 @@
     // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
     // mouthX, mouthY). The mapping is the same as the face rectangles.
     int32_t features[numFaces * 6] = {
-        Sensor::kResolution[0] * 10.5 / 20,
-        Sensor::kResolution[1] * 16 / 20,
-        Sensor::kResolution[0] * 11.5 / 20,
-        Sensor::kResolution[1] * 16 / 20,
-        Sensor::kResolution[0] * 11 / 20,
-        Sensor::kResolution[1] * 16.5 / 20,
+        static_cast<int32_t>(Sensor::kResolution[0] * 10.5 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 11.5 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 11 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16.5 / 20),
 
-        Sensor::kResolution[0] * 16.5 / 20,
-        Sensor::kResolution[1] * 16 / 20,
-        Sensor::kResolution[0] * 17.5 / 20,
-        Sensor::kResolution[1] * 16 / 20,
-        Sensor::kResolution[0] * 17 / 20,
-        Sensor::kResolution[1] * 16.5 / 20,
+        static_cast<int32_t>(Sensor::kResolution[0] * 16.5 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 17.5 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16 / 20),
+        static_cast<int32_t>(Sensor::kResolution[0] * 17 / 20),
+        static_cast<int32_t>(Sensor::kResolution[1] * 16.5 / 20),
     };
     // Jitter these a bit less than the rects
     for (size_t i = 0; i < numFaces * 6; i++) {
@@ -2104,21 +2104,6 @@
             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
     ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
 
-    float lensPosition[3];
-    if (mFacingBack) {
-        // Back-facing camera is center-top on device
-        lensPosition[0] = 0;
-        lensPosition[1] = 20;
-        lensPosition[2] = -5;
-    } else {
-        // Front-facing camera is center-right on device
-        lensPosition[0] = 20;
-        lensPosition[1] = 20;
-        lensPosition[2] = 0;
-    }
-    ADD_OR_SIZE(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
-            sizeof(float));
-
     // android.sensor
 
     ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
@@ -2149,8 +2134,10 @@
             &Sensor::kMaxRawValue, 1);
 
     static const int32_t blackLevelPattern[4] = {
-            Sensor::kBlackLevel, Sensor::kBlackLevel,
-            Sensor::kBlackLevel, Sensor::kBlackLevel
+        static_cast<int32_t>(Sensor::kBlackLevel),
+        static_cast<int32_t>(Sensor::kBlackLevel),
+        static_cast<int32_t>(Sensor::kBlackLevel),
+        static_cast<int32_t>(Sensor::kBlackLevel)
     };
     ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
             blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
@@ -2511,7 +2498,7 @@
 
     /** android.scaler */
     static const int32_t cropRegion[3] = {
-        0, 0, Sensor::kResolution[0]
+        0, 0, static_cast<int32_t>(Sensor::kResolution[0])
     };
     ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
 
@@ -2598,7 +2585,10 @@
     ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
 
     static const int32_t controlRegions[5] = {
-        0, 0, Sensor::kResolution[0], Sensor::kResolution[1], 1000
+        0, 0,
+        static_cast<int32_t>(Sensor::kResolution[0]),
+        static_cast<int32_t>(Sensor::kResolution[1]),
+        1000
     };
     ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
 
diff --git a/camera/EmulatedFakeCamera3.cpp b/camera/EmulatedFakeCamera3.cpp
index fe07a25..50d6096 100644
--- a/camera/EmulatedFakeCamera3.cpp
+++ b/camera/EmulatedFakeCamera3.cpp
@@ -24,6 +24,7 @@
 //#define LOG_NDEBUG 0
 //#define LOG_NNDEBUG 0
 #define LOG_TAG "EmulatedCamera_FakeCamera3"
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 #include "EmulatedFakeCamera3.h"
@@ -37,6 +38,8 @@
 #include "fake-pipeline2/JpegCompressor.h"
 #include <cmath>
 
+#include <vector>
+
 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
 #define ALOGVV ALOGV
 #else
@@ -53,53 +56,16 @@
 const int64_t MSEC = USEC * 1000LL;
 const int64_t SEC = MSEC * 1000LL;
 
-const int32_t EmulatedFakeCamera3::kAvailableFormats[5] = {
-        HAL_PIXEL_FORMAT_RAW_SENSOR,
+const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
+        HAL_PIXEL_FORMAT_RAW16,
         HAL_PIXEL_FORMAT_BLOB,
         HAL_PIXEL_FORMAT_RGBA_8888,
         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
         // These are handled by YCbCr_420_888
         //        HAL_PIXEL_FORMAT_YV12,
         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
-        HAL_PIXEL_FORMAT_YCbCr_420_888
-};
-
-const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
-    640, 480
-    //    Sensor::kResolution[0], Sensor::kResolution[1]
-};
-
-const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
-    (const uint64_t)Sensor::kFrameDurationRange[0]
-};
-
-const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[4] = {
-    640, 480, 320, 240
-    //    Sensor::kResolution[0], Sensor::kResolution[1]
-};
-
-const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
-    320, 240, 160, 120
-    //    Sensor::kResolution[0], Sensor::kResolution[1]
-};
-
-const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
-    (const uint64_t)Sensor::kFrameDurationRange[0]
-};
-
-const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
-    640, 480
-    //    Sensor::kResolution[0], Sensor::kResolution[1]
-};
-
-const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
-    320, 240
-    //    Sensor::kResolution[0], Sensor::kResolution[1]
-};
-
-
-const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
-    (const uint64_t)Sensor::kFrameDurationRange[0]
+        HAL_PIXEL_FORMAT_YCbCr_420_888,
+        HAL_PIXEL_FORMAT_Y16
 };
 
 /**
@@ -125,18 +91,13 @@
         struct hw_module_t* module) :
         EmulatedCamera3(cameraId, module),
         mFacingBack(facingBack) {
-    ALOGI("Constructing emulated fake camera 3 facing %s",
-            facingBack ? "back" : "front");
+    ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
+            mCameraID, facingBack ? "back" : "front");
 
     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
         mDefaultTemplates[i] = NULL;
     }
 
-    /**
-     * Front cameras = limited mode
-     * Back cameras = full mode
-     */
-    mFullMode = facingBack;
 }
 
 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
@@ -156,6 +117,13 @@
         return INVALID_OPERATION;
     }
 
+    res = getCameraCapabilities();
+    if (res != OK) {
+        ALOGE("%s: Unable to get camera capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
     res = constructStaticInfo();
     if (res != OK) {
         ALOGE("%s: Unable to allocate static info: %s (%d)",
@@ -198,8 +166,8 @@
     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
-    mAfTriggerId  = 0;
-    mAeTriggerId  = 0;
+    mAeCounter    = 0;
+    mAeTargetExposureTime = kNormalExposureTime;
     mAeCurrentExposureTime = kNormalExposureTime;
     mAeCurrentSensitivity  = kNormalSensitivity;
 
@@ -341,20 +309,7 @@
             // New stream, construct info
             PrivateStreamInfo *privStream = new PrivateStreamInfo();
             privStream->alive = true;
-            privStream->registered = false;
 
-            switch (newStream->stream_type) {
-                case CAMERA3_STREAM_OUTPUT:
-                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
-                    break;
-                case CAMERA3_STREAM_INPUT:
-                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
-                    break;
-                case CAMERA3_STREAM_BIDIRECTIONAL:
-                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
-                            GRALLOC_USAGE_HW_CAMERA_WRITE;
-                    break;
-            }
             newStream->max_buffers = kMaxBufferCount;
             newStream->priv = privStream;
             mStreams.push_back(newStream);
@@ -364,6 +319,20 @@
                     static_cast<PrivateStreamInfo*>(newStream->priv);
             privStream->alive = true;
         }
+        // Always update usage and max buffers
+        newStream->max_buffers = kMaxBufferCount;
+        switch (newStream->stream_type) {
+            case CAMERA3_STREAM_OUTPUT:
+                newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            case CAMERA3_STREAM_INPUT:
+                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
+                break;
+            case CAMERA3_STREAM_BIDIRECTIONAL:
+                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
+                        GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+        }
     }
 
     /**
@@ -394,49 +363,11 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock l(mLock);
 
-    /**
-     * Sanity checks
-     */
+    // Should not be called in HAL versions >= 3.2
 
-    // OK: register streams at any time during configure
-    // (but only once per stream)
-    if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
-        ALOGE("%s: Cannot register buffers in state %d",
-                __FUNCTION__, mStatus);
-        return NO_INIT;
-    }
-
-    if (bufferSet == NULL) {
-        ALOGE("%s: NULL buffer set!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    StreamIterator s = mStreams.begin();
-    for (; s != mStreams.end(); ++s) {
-        if (bufferSet->stream == *s) break;
-    }
-    if (s == mStreams.end()) {
-        ALOGE("%s: Trying to register buffers for a non-configured stream!",
-                __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    /**
-     * Register the buffers. This doesn't mean anything to the emulator besides
-     * marking them off as registered.
-     */
-
-    PrivateStreamInfo *privStream =
-            static_cast<PrivateStreamInfo*>((*s)->priv);
-
-    if (privStream->registered) {
-        ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    privStream->registered = true;
-
-    return OK;
+    ALOGE("%s: Should not be invoked on new HALs!",
+            __FUNCTION__);
+    return NO_INIT;
 }
 
 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
@@ -444,12 +375,18 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock l(mLock);
 
-    if (type < 0 || type >= CAMERA2_TEMPLATE_COUNT) {
+    if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
         ALOGE("%s: Unknown request settings template: %d",
                 __FUNCTION__, type);
         return NULL;
     }
 
+    if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
+        ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
+                __FUNCTION__, type);
+        return NULL;
+    }
+
     /**
      * Cache is not just an optimization - pointer returned has to live at
      * least as long as the camera device instance does.
@@ -462,9 +399,6 @@
 
     /** android.request */
 
-    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
-    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
-
     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
 
@@ -476,162 +410,176 @@
 
     /** android.lens */
 
-    static const float focusDistance = 0;
-    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
-
-    static const float aperture = 2.8f;
-    settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
-
     static const float focalLength = 5.0f;
     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
 
-    static const float filterDensity = 0;
-    settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const float focusDistance = 0;
+        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
 
-    static const uint8_t opticalStabilizationMode =
-            ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
-    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
-            &opticalStabilizationMode, 1);
+        static const float aperture = 2.8f;
+        settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
 
-    // FOCUS_RANGE set only in frame
+        static const float filterDensity = 0;
+        settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
+
+        static const uint8_t opticalStabilizationMode =
+                ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+                &opticalStabilizationMode, 1);
+
+        // FOCUS_RANGE set only in frame
+    }
 
     /** android.sensor */
 
-    static const int64_t exposureTime = 10 * MSEC;
-    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
+    if (hasCapability(MANUAL_SENSOR)) {
+        static const int64_t exposureTime = 10 * MSEC;
+        settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
 
-    static const int64_t frameDuration = 33333333L; // 1/30 s
-    settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+        static const int64_t frameDuration = 33333333L; // 1/30 s
+        settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
 
-    static const int32_t sensitivity = 100;
-    settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
+        static const int32_t sensitivity = 100;
+        settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
+    }
 
     // TIMESTAMP set only in frame
 
     /** android.flash */
 
-    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
-    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+        settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
 
-    static const uint8_t flashPower = 10;
-    settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
+        static const uint8_t flashPower = 10;
+        settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
 
-    static const int64_t firingTime = 0;
-    settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
+        static const int64_t firingTime = 0;
+        settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
+    }
 
     /** Processing block modes */
-    uint8_t hotPixelMode = 0;
-    uint8_t demosaicMode = 0;
-    uint8_t noiseMode = 0;
-    uint8_t shadingMode = 0;
-    uint8_t colorMode = 0;
-    uint8_t tonemapMode = 0;
-    uint8_t edgeMode = 0;
-    switch (type) {
-      case CAMERA2_TEMPLATE_STILL_CAPTURE:
-        // fall-through
-      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
-        // fall-through
-      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
-        hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
-        demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
-        noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
-        shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
-        colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
-        tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
-        edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
-        break;
-      case CAMERA2_TEMPLATE_PREVIEW:
-        // fall-through
-      case CAMERA2_TEMPLATE_VIDEO_RECORD:
-        // fall-through
-      default:
-        hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
-        demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
-        noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
-        shadingMode = ANDROID_SHADING_MODE_FAST;
-        colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
-        tonemapMode = ANDROID_TONEMAP_MODE_FAST;
-        edgeMode = ANDROID_EDGE_MODE_FAST;
-        break;
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        uint8_t hotPixelMode = 0;
+        uint8_t demosaicMode = 0;
+        uint8_t noiseMode = 0;
+        uint8_t shadingMode = 0;
+        uint8_t colorMode = 0;
+        uint8_t tonemapMode = 0;
+        uint8_t edgeMode = 0;
+        switch (type) {
+            case CAMERA3_TEMPLATE_STILL_CAPTURE:
+                // fall-through
+            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+                // fall-through
+            case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+                hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
+                demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
+                noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
+                shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
+                colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
+                tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
+                edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
+                break;
+            case CAMERA3_TEMPLATE_PREVIEW:
+                // fall-through
+            case CAMERA3_TEMPLATE_VIDEO_RECORD:
+                // fall-through
+            default:
+                hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+                demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
+                noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+                shadingMode = ANDROID_SHADING_MODE_FAST;
+                colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
+                tonemapMode = ANDROID_TONEMAP_MODE_FAST;
+                edgeMode = ANDROID_EDGE_MODE_FAST;
+                break;
+        }
+        settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
+        settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+        settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
+        settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
+        settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
+        settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
+        settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
     }
-    settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
-    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
-    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
-    settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
-    settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
-    settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
-    settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
 
-    /** android.noise */
-    static const uint8_t noiseStrength = 5;
-    settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
+    /** android.colorCorrection */
 
-    /** android.color */
-    static const float colorTransform[9] = {
-        1.0f, 0.f, 0.f,
-        0.f, 1.f, 0.f,
-        0.f, 0.f, 1.f
-    };
-    settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const camera_metadata_rational colorTransform[9] = {
+            {1,1}, {0,1}, {0,1},
+            {0,1}, {1,1}, {0,1},
+            {0,1}, {0,1}, {1,1}
+        };
+        settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
+
+        static const float colorGains[4] = {
+            1.0f, 1.0f, 1.0f, 1.0f
+        };
+        settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
+    }
 
     /** android.tonemap */
-    static const float tonemapCurve[4] = {
-        0.f, 0.f,
-        1.f, 1.f
-    };
-    settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
-    settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
-    settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
 
-    /** android.edge */
-    static const uint8_t edgeStrength = 5;
-    settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const float tonemapCurve[4] = {
+            0.f, 0.f,
+            1.f, 1.f
+        };
+        settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
+        settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
+        settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
+    }
 
     /** android.scaler */
-    static const int32_t cropRegion[3] = {
-        0, 0, (int32_t)Sensor::kResolution[0]
-    };
-    settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t cropRegion[4] = {
+            0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1]
+        };
+        settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
+    }
 
     /** android.jpeg */
-    static const uint8_t jpegQuality = 80;
-    settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t jpegQuality = 80;
+        settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
 
-    static const int32_t thumbnailSize[2] = {
-        640, 480
-    };
-    settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
+        static const int32_t thumbnailSize[2] = {
+            640, 480
+        };
+        settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
 
-    static const uint8_t thumbnailQuality = 80;
-    settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
+        static const uint8_t thumbnailQuality = 80;
+        settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
 
-    static const double gpsCoordinates[2] = {
-        0, 0
-    };
-    settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
+        static const double gpsCoordinates[2] = {
+            0, 0
+        };
+        settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
 
-    static const uint8_t gpsProcessingMethod[32] = "None";
-    settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
+        static const uint8_t gpsProcessingMethod[32] = "None";
+        settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
 
-    static const int64_t gpsTimestamp = 0;
-    settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
+        static const int64_t gpsTimestamp = 0;
+        settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
 
-    static const int32_t jpegOrientation = 0;
-    settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+        static const int32_t jpegOrientation = 0;
+        settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+    }
 
     /** android.stats */
 
-    static const uint8_t faceDetectMode =
-        ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
-    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t faceDetectMode =
+                ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
 
-    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
-    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
-
-    static const uint8_t sharpnessMapMode =
-        ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
-    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
+        static const uint8_t hotPixelMapMode =
+                ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+    }
 
     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
     // sharpnessMap only in frames
@@ -640,99 +588,130 @@
 
     uint8_t controlIntent = 0;
     switch (type) {
-      case CAMERA2_TEMPLATE_PREVIEW:
+      case CAMERA3_TEMPLATE_PREVIEW:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
         break;
-      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+      case CAMERA3_TEMPLATE_STILL_CAPTURE:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
         break;
-      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+      case CAMERA3_TEMPLATE_VIDEO_RECORD:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
         break;
-      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
         break;
-      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
         break;
+      case CAMERA3_TEMPLATE_MANUAL:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
+        break;
       default:
         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
         break;
     }
     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
 
-    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+    const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+            ANDROID_CONTROL_MODE_OFF :
+            ANDROID_CONTROL_MODE_AUTO;
     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
 
-    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
-    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
-
-    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
-    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
-
-    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
-    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
-
-    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
-    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
-
-    static const int32_t controlRegions[5] = {
-        0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
-        1000
+    int32_t aeTargetFpsRange[2] = {
+        5, 30
     };
-    settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
-
-    static const int32_t aeExpCompensation = 0;
-    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
-
-    static const int32_t aeTargetFpsRange[2] = {
-        10, 30
-    };
+    if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
+        aeTargetFpsRange[0] = 30;
+    }
     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
 
-    static const uint8_t aeAntibandingMode =
-            ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
-    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
 
-    static const uint8_t awbMode =
-            ANDROID_CONTROL_AWB_MODE_AUTO;
-    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+        static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
+        settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
 
-    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
-    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+        static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+        settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
 
-    settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
+        const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+                ANDROID_CONTROL_AE_MODE_OFF :
+                ANDROID_CONTROL_AE_MODE_ON;
+        settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
 
-    uint8_t afMode = 0;
-    switch (type) {
-      case CAMERA2_TEMPLATE_PREVIEW:
-        afMode = ANDROID_CONTROL_AF_MODE_AUTO;
-        break;
-      case CAMERA2_TEMPLATE_STILL_CAPTURE:
-        afMode = ANDROID_CONTROL_AF_MODE_AUTO;
-        break;
-      case CAMERA2_TEMPLATE_VIDEO_RECORD:
-        afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
-        break;
-      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
-        afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
-        break;
-      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
-        afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
-        break;
-      default:
-        afMode = ANDROID_CONTROL_AF_MODE_AUTO;
-        break;
+        static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+        settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+
+        static const int32_t controlRegions[5] = {
+            0, 0, 0, 0, 0
+        };
+        settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
+
+        static const int32_t aeExpCompensation = 0;
+        settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
+
+
+        static const uint8_t aeAntibandingMode =
+                ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+        settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
+
+        static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+        settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
+
+        const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+                ANDROID_CONTROL_AWB_MODE_OFF :
+                ANDROID_CONTROL_AWB_MODE_AUTO;
+        settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+        static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+        settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+
+        uint8_t afMode = 0;
+        switch (type) {
+            case CAMERA3_TEMPLATE_PREVIEW:
+                afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                break;
+            case CAMERA3_TEMPLATE_STILL_CAPTURE:
+                afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                break;
+            case CAMERA3_TEMPLATE_VIDEO_RECORD:
+                afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+                break;
+            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+                afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+                break;
+            case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+                afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                break;
+            case CAMERA3_TEMPLATE_MANUAL:
+                afMode = ANDROID_CONTROL_AF_MODE_OFF;
+                break;
+            default:
+                afMode = ANDROID_CONTROL_AF_MODE_AUTO;
+                break;
+        }
+        settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
+
+        settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
+
+        static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+        settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
+
+        static const uint8_t vstabMode =
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+        settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
+
+        static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+        settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
+
+        static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
+
+        static const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
+
+        static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+        settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
     }
-    settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
-
-    settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
-
-    static const uint8_t vstabMode =
-        ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
-    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
-
-    // aeState, awbState, afState only in frame
 
     mDefaultTemplates[type] = settings.release();
 
@@ -805,8 +784,8 @@
                     __FUNCTION__, frameNumber, idx);
             return BAD_VALUE;
         }
-        if (!priv->alive || !priv->registered) {
-            ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream!",
+        if (!priv->alive) {
+            ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
                     __FUNCTION__, frameNumber, idx);
             return BAD_VALUE;
         }
@@ -860,10 +839,19 @@
     nsecs_t  frameDuration;
     uint32_t sensitivity;
     bool     needJpeg = false;
+    camera_metadata_entry_t entry;
 
-    exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
-    frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
-    sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
+    entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
+    exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
+    entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
+    frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
+    entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
+    sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
+
+    if (exposureTime > frameDuration) {
+        frameDuration = exposureTime + Sensor::kMinVerticalBlank;
+        settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+    }
 
     Buffers *sensorBuffers = new Buffers();
     HalBufferVector *buffers = new HalBufferVector();
@@ -883,6 +871,7 @@
         destBuf.height   = srcBuf.stream->height;
         destBuf.format   = privBuffer->format; // Use real private format
         destBuf.stride   = srcBuf.stream->width; // TODO: query from gralloc
+        destBuf.dataSpace = srcBuf.stream->data_space;
         destBuf.buffer   = srcBuf.buffer;
 
         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
@@ -1004,285 +993,530 @@
     return OK;
 }
 
+status_t EmulatedFakeCamera3::flush() {
+    ALOGW("%s: Not implemented; ignored", __FUNCTION__);
+    return OK;
+}
+
 /** Debug methods */
 
 void EmulatedFakeCamera3::dump(int fd) {
 
 }
 
-/** Tag query methods */
-const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
-    return NULL;
-}
-
-const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
-    return NULL;
-}
-
-int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
-    return 0;
-}
-
 /**
  * Private methods
  */
 
+status_t EmulatedFakeCamera3::getCameraCapabilities() {
+
+    const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
+
+    /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
+     * property doesn't exist, it is assumed to list FULL. */
+    char prop[PROPERTY_VALUE_MAX];
+    if (property_get(key, prop, NULL) > 0) {
+        char *saveptr = nullptr;
+        char *cap = strtok_r(prop, " ,", &saveptr);
+        while (cap != NULL) {
+            for (int i = 0; i < NUM_CAPABILITIES; i++) {
+                if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
+                    mCapabilities.add(static_cast<AvailableCapabilities>(i));
+                    break;
+                }
+            }
+            cap = strtok_r(NULL, " ,", &saveptr);
+        }
+        if (mCapabilities.size() == 0) {
+            ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
+        }
+    }
+    // Default to FULL_LEVEL plus RAW if nothing is defined
+    if (mCapabilities.size() == 0) {
+        mCapabilities.add(FULL_LEVEL);
+        mCapabilities.add(RAW);
+    }
+
+    // Add level-based caps
+    if (hasCapability(FULL_LEVEL)) {
+        mCapabilities.add(BURST_CAPTURE);
+        mCapabilities.add(READ_SENSOR_SETTINGS);
+        mCapabilities.add(MANUAL_SENSOR);
+        mCapabilities.add(MANUAL_POST_PROCESSING);
+    };
+
+    // Backwards-compatible is required for most other caps
+    // Not required for DEPTH_OUTPUT, though.
+    if (hasCapability(BURST_CAPTURE) ||
+            hasCapability(READ_SENSOR_SETTINGS) ||
+            hasCapability(RAW) ||
+            hasCapability(MANUAL_SENSOR) ||
+            hasCapability(MANUAL_POST_PROCESSING) ||
+            hasCapability(PRIVATE_REPROCESSING) ||
+            hasCapability(YUV_REPROCESSING) ||
+            hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
+        mCapabilities.add(BACKWARD_COMPATIBLE);
+    }
+
+    ALOGI("Camera %d capabilities:", mCameraID);
+    for (size_t i = 0; i < mCapabilities.size(); i++) {
+        ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
+    }
+
+    return OK;
+}
+
+bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
+    ssize_t idx = mCapabilities.indexOf(cap);
+    return idx >= 0;
+}
+
 status_t EmulatedFakeCamera3::constructStaticInfo() {
 
     CameraMetadata info;
-    // android.lens
+    Vector<int32_t> availableCharacteristicsKeys;
+    status_t res;
 
-    // 5 cm min focus distance for back camera, infinity (fixed focus) for front
-    const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
-    info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
-            &minFocusDistance, 1);
-
-    // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
-    const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
-    info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
-            &minFocusDistance, 1);
-
-    static const float focalLength = 3.30f; // mm
-    info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
-            &focalLength, 1);
-    static const float aperture = 2.8f;
-    info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
-            &aperture, 1);
-    static const float filterDensity = 0;
-    info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
-            &filterDensity, 1);
-    static const uint8_t availableOpticalStabilization =
-            ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
-    info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
-            &availableOpticalStabilization, 1);
-
-    static const int32_t lensShadingMapSize[] = {1, 1};
-    info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
-            sizeof(lensShadingMapSize)/sizeof(int32_t));
-
-    uint8_t lensFacing = mFacingBack ?
-            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
-    info.update(ANDROID_LENS_FACING, &lensFacing, 1);
-
-    float lensPosition[3];
-    if (mFacingBack) {
-        // Back-facing camera is center-top on device
-        lensPosition[0] = 0;
-        lensPosition[1] = 20;
-        lensPosition[2] = -5;
-    } else {
-        // Front-facing camera is center-right on device
-        lensPosition[0] = 20;
-        lensPosition[1] = 20;
-        lensPosition[2] = 0;
-    }
-    info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
-            sizeof(float));
+#define ADD_STATIC_ENTRY(name, varptr, count) \
+        availableCharacteristicsKeys.add(name);   \
+        res = info.update(name, varptr, count); \
+        if (res != OK) return res
 
     // android.sensor
 
-    info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
-            Sensor::kExposureTimeRange, 2);
+    if (hasCapability(MANUAL_SENSOR)) {
 
-    info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
-            &Sensor::kFrameDurationRange[1], 1);
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+                Sensor::kExposureTimeRange, 2);
 
-    info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
-            Sensor::kSensitivityRange,
-            sizeof(Sensor::kSensitivityRange)
-            /sizeof(int32_t));
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+                &Sensor::kFrameDurationRange[1], 1);
 
-    info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
-            &Sensor::kColorFilterArrangement, 1);
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+                Sensor::kSensitivityRange,
+                sizeof(Sensor::kSensitivityRange)
+                /sizeof(int32_t));
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+                &Sensor::kSensitivityRange[1], 1);
+    }
 
     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
-    info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
             sensorPhysicalSize, 2);
 
-    info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
             (int32_t*)Sensor::kResolution, 2);
 
-    info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
-            (int32_t*)Sensor::kResolution, 2);
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            (int32_t*)Sensor::kActiveArray, 4);
 
-    info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
-            (int32_t*)&Sensor::kMaxRawValue, 1);
+    static const int32_t orientation = 90; // Aligned with 'long edge'
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
 
-    static const int32_t blackLevelPattern[4] = {
+    static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
+
+    if (hasCapability(RAW)) {
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+                &Sensor::kColorFilterArrangement, 1);
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
+                (int32_t*)&Sensor::kMaxRawValue, 1);
+
+        static const int32_t blackLevelPattern[4] = {
             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
-    };
-    info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
-            blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
+        };
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+                blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
+    }
 
-    static const int32_t orientation = 0; // unrotated (0 degrees)
-    info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t availableTestPatternModes[] = {
+            ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+                availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
+    }
 
-    //TODO: sensor color calibration fields
+    // android.lens
+
+    static const float focalLength = 3.30f; // mm
+    ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+            &focalLength, 1);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        // 5 cm min focus distance for back camera, infinity (fixed focus) for front
+        const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+                &minFocusDistance, 1);
+
+        // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
+        const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+                &minFocusDistance, 1);
+
+        static const float aperture = 2.8f;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+                &aperture, 1);
+        static const float filterDensity = 0;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+                &filterDensity, 1);
+        static const uint8_t availableOpticalStabilization =
+                ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+                &availableOpticalStabilization, 1);
+
+        static const int32_t lensShadingMapSize[] = {1, 1};
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
+                sizeof(lensShadingMapSize)/sizeof(int32_t));
+
+        static const uint8_t lensFocusCalibration =
+                ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
+    }
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+        // These could be included for non-DEPTH capability as well, but making this variable for
+        // testing coverage
+
+        // 90 degree rotation to align with long edge of a phone device that's by default portrait
+        static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
+
+        // Either a 180-degree rotation for back-facing, or no rotation for front-facing
+        const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0, (mFacingBack ? 0.f : 1.f)};
+
+        // Quarternion product, orientation change then facing
+        const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
+                                          qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
+                                          qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
+                                          qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
+                sizeof(lensPoseRotation)/sizeof(float));
+
+        // Only one camera facing each way, so 0 translation needed to the center of the 'main'
+        // camera
+        static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
+                sizeof(lensPoseTranslation)/sizeof(float));
+
+        // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
+        float f_x = focalLength * Sensor::kActiveArray[2] / sensorPhysicalSize[0];
+        float f_y = focalLength * Sensor::kActiveArray[3] / sensorPhysicalSize[1];
+        float c_x = Sensor::kActiveArray[2] / 2.f;
+        float c_y = Sensor::kActiveArray[3] / 2.f;
+        float s = 0.f;
+        const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
+                sizeof(lensIntrinsics)/sizeof(float));
+
+        // No radial or tangential distortion
+
+        float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
+                sizeof(lensRadialDistortion)/sizeof(float));
+
+    }
+
+
+    static const uint8_t lensFacing = mFacingBack ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
 
     // android.flash
-    static const uint8_t flashAvailable = 0;
-    info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
 
-    static const int64_t flashChargeDuration = 0;
-    info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
+    static const uint8_t flashAvailable = 0;
+    ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
 
     // android.tonemap
 
-    static const int32_t tonemapCurvePoints = 128;
-    info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const int32_t tonemapCurvePoints = 128;
+        ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
+
+        static const uint8_t availableToneMapModes[] = {
+            ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
+            ANDROID_TONEMAP_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
+                sizeof(availableToneMapModes));
+    }
 
     // android.scaler
 
-    info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
-            kAvailableFormats,
-            sizeof(kAvailableFormats)/sizeof(int32_t));
+    const std::vector<int32_t> availableStreamConfigurationsBasic = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+    };
+    const std::vector<int32_t> availableStreamConfigurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+    };
+    const std::vector<int32_t> availableStreamConfigurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+    };
 
-    info.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
-            (int32_t*)kAvailableRawSizes,
-            sizeof(kAvailableRawSizes)/sizeof(uint32_t));
+    std::vector<int32_t> availableStreamConfigurations;
 
-    info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
-            (int64_t*)kAvailableRawMinDurations,
-            sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
-
-    if (mFacingBack) {
-        info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
-                (int32_t*)kAvailableProcessedSizesBack,
-                sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
-    } else {
-        info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
-                (int32_t*)kAvailableProcessedSizesFront,
-                sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsBasic.begin(),
+                availableStreamConfigurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsRaw.begin(),
+                availableStreamConfigurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsBurst.begin(),
+                availableStreamConfigurationsBurst.end());
     }
 
-    info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
-            (int64_t*)kAvailableProcessedMinDurations,
-            sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
-
-    if (mFacingBack) {
-        info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
-                (int32_t*)kAvailableJpegSizesBack,
-                sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
-    } else {
-        info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
-                (int32_t*)kAvailableJpegSizesFront,
-                sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
+    if (availableStreamConfigurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                &availableStreamConfigurations[0],
+                availableStreamConfigurations.size());
     }
 
-    info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
-            (int64_t*)kAvailableJpegMinDurations,
-            sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
+    const std::vector<int64_t> availableMinFrameDurationsBasic = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
+    };
+    const std::vector<int64_t> availableMinFrameDurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
+    };
+    const std::vector<int64_t> availableMinFrameDurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, Sensor::kFrameDurationRange[0],
+    };
 
-    static const float maxZoom = 10;
-    info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
-            &maxZoom, 1);
+    std::vector<int64_t> availableMinFrameDurations;
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsBasic.begin(),
+                availableMinFrameDurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsRaw.begin(),
+                availableMinFrameDurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsBurst.begin(),
+                availableMinFrameDurationsBurst.end());
+    }
+
+    if (availableMinFrameDurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                &availableMinFrameDurations[0],
+                availableMinFrameDurations.size());
+    }
+
+    const std::vector<int64_t> availableStallDurationsBasic = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
+        HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
+    };
+    const std::vector<int64_t> availableStallDurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
+    };
+    const std::vector<int64_t> availableStallDurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
+        HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, 0
+    };
+
+    std::vector<int64_t> availableStallDurations;
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsBasic.begin(),
+                availableStallDurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsRaw.begin(),
+                availableStallDurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsBurst.begin(),
+                availableStallDurationsBurst.end());
+    }
+
+    if (availableStallDurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+                &availableStallDurations[0],
+                availableStallDurations.size());
+    }
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
+        ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
+                &croppingType, 1);
+
+        static const float maxZoom = 10;
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+                &maxZoom, 1);
+    }
 
     // android.jpeg
 
-    static const int32_t jpegThumbnailSizes[] = {
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t jpegThumbnailSizes[] = {
             0, 0,
             160, 120,
             320, 240
-     };
-    info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
-            jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
+        };
+        ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
 
-    static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
-    info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+        static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
+        ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+    }
 
     // android.stats
 
-    static const uint8_t availableFaceDetectModes[] = {
-        ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
-        ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
-        ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
-    };
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableFaceDetectModes[] = {
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
+            ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
+            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
+        };
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+                availableFaceDetectModes,
+                sizeof(availableFaceDetectModes));
 
-    info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
-            availableFaceDetectModes,
-            sizeof(availableFaceDetectModes));
+        static const int32_t maxFaceCount = 8;
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+                &maxFaceCount, 1);
 
-    static const int32_t maxFaceCount = 8;
-    info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
-            &maxFaceCount, 1);
 
-    static const int32_t histogramSize = 64;
-    info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
-            &histogramSize, 1);
+        static const uint8_t availableShadingMapModes[] = {
+            ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+                availableShadingMapModes, sizeof(availableShadingMapModes));
+    }
 
-    static const int32_t maxHistogramCount = 1000;
-    info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
-            &maxHistogramCount, 1);
+    // android.sync
 
-    static const int32_t sharpnessMapSize[2] = {64, 64};
-    info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
-            sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
-
-    static const int32_t maxSharpnessMapValue = 1000;
-    info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
-            &maxSharpnessMapValue, 1);
+    static const int32_t maxLatency =
+            hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
+    ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
 
     // android.control
 
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableControlModes[] = {
+            ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
+                availableControlModes, sizeof(availableControlModes));
+    } else {
+        static const uint8_t availableControlModes[] = {
+            ANDROID_CONTROL_MODE_AUTO
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
+                availableControlModes, sizeof(availableControlModes));
+    }
+
     static const uint8_t availableSceneModes[] = {
+        hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
             ANDROID_CONTROL_SCENE_MODE_DISABLED
     };
-    info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
             availableSceneModes, sizeof(availableSceneModes));
 
-    static const uint8_t availableEffects[] = {
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableEffects[] = {
             ANDROID_CONTROL_EFFECT_MODE_OFF
-    };
-    info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
-            availableEffects, sizeof(availableEffects));
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+                availableEffects, sizeof(availableEffects));
+    }
 
-    static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
-    info.update(ANDROID_CONTROL_MAX_REGIONS,
-            max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
+                max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
 
-    static const uint8_t availableAeModes[] = {
+        static const uint8_t availableAeModes[] = {
             ANDROID_CONTROL_AE_MODE_OFF,
             ANDROID_CONTROL_AE_MODE_ON
-    };
-    info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
-            availableAeModes, sizeof(availableAeModes));
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+                availableAeModes, sizeof(availableAeModes));
 
-    static const camera_metadata_rational exposureCompensationStep = {
+        static const camera_metadata_rational exposureCompensationStep = {
             1, 3
-    };
-    info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
-            &exposureCompensationStep, 1);
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+                &exposureCompensationStep, 1);
 
-    int32_t exposureCompensationRange[] = {-9, 9};
-    info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
-            exposureCompensationRange,
-            sizeof(exposureCompensationRange)/sizeof(int32_t));
+        int32_t exposureCompensationRange[] = {-9, 9};
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+                exposureCompensationRange,
+                sizeof(exposureCompensationRange)/sizeof(int32_t));
+    }
 
     static const int32_t availableTargetFpsRanges[] = {
-            5, 30, 15, 30
+            5, 30, 15, 30, 15, 15, 30, 30
     };
-    info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
             availableTargetFpsRanges,
             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
 
-    static const uint8_t availableAntibandingModes[] = {
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableAntibandingModes[] = {
             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
-    };
-    info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
-            availableAntibandingModes, sizeof(availableAntibandingModes));
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+                availableAntibandingModes, sizeof(availableAntibandingModes));
+    }
 
-    static const uint8_t availableAwbModes[] = {
+    static const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+            &aeLockAvailable, 1);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableAwbModes[] = {
             ANDROID_CONTROL_AWB_MODE_OFF,
             ANDROID_CONTROL_AWB_MODE_AUTO,
             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
             ANDROID_CONTROL_AWB_MODE_SHADE
-    };
-    info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
-            availableAwbModes, sizeof(availableAwbModes));
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+                availableAwbModes, sizeof(availableAwbModes));
+    }
+
+    static const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+            &awbLockAvailable, 1);
 
     static const uint8_t availableAfModesBack[] = {
             ANDROID_CONTROL_AF_MODE_OFF,
@@ -1296,30 +1530,235 @@
             ANDROID_CONTROL_AF_MODE_OFF
     };
 
-    if (mFacingBack) {
-        info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
-                    availableAfModesBack, sizeof(availableAfModesBack));
+    if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                availableAfModesBack, sizeof(availableAfModesBack));
     } else {
-        info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
-                    availableAfModesFront, sizeof(availableAfModesFront));
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                availableAfModesFront, sizeof(availableAfModesFront));
     }
 
     static const uint8_t availableVstabModes[] = {
-            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
+        ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
     };
-    info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
             availableVstabModes, sizeof(availableVstabModes));
 
+    // android.colorCorrection
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableAberrationModes[] = {
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+                availableAberrationModes, sizeof(availableAberrationModes));
+    } else {
+        static const uint8_t availableAberrationModes[] = {
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+        };
+        ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+                availableAberrationModes, sizeof(availableAberrationModes));
+    }
+    // android.edge
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableEdgeModes[] = {
+            ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+                availableEdgeModes, sizeof(availableEdgeModes));
+    } else {
+        static const uint8_t availableEdgeModes[] = {
+            ANDROID_EDGE_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+                availableEdgeModes, sizeof(availableEdgeModes));
+    }
+
     // android.info
-    const uint8_t supportedHardwareLevel =
-        mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
+
+    static const uint8_t supportedHardwareLevel =
+            hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
-    info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+    ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
                 &supportedHardwareLevel,
                 /*count*/1);
 
+    // android.noiseReduction
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableNoiseReductionModes[] = {
+            ANDROID_NOISE_REDUCTION_MODE_OFF,
+            ANDROID_NOISE_REDUCTION_MODE_FAST,
+            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
+    } else {
+        static const uint8_t availableNoiseReductionModes[] = {
+            ANDROID_NOISE_REDUCTION_MODE_OFF,
+        };
+        ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
+    }
+
+    // android.depth
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+
+        static const int32_t maxDepthSamples = 100;
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
+                &maxDepthSamples, 1);
+
+        static const int32_t availableDepthStreamConfigurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                availableDepthStreamConfigurations,
+                sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
+
+        static const int64_t availableDepthMinFrameDurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
+                availableDepthMinFrameDurations,
+                sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
+
+        static const int64_t availableDepthStallDurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
+                availableDepthStallDurations,
+                sizeof(availableDepthStallDurations)/sizeof(int64_t));
+
+        uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
+                &depthIsExclusive, 1);
+    }
+
+    // android.shading
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableShadingModes[] = {
+            ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
+                sizeof(availableShadingModes));
+    } else {
+        static const uint8_t availableShadingModes[] = {
+            ANDROID_SHADING_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
+                sizeof(availableShadingModes));
+    }
+
+    // android.request
+
+    static const int32_t maxNumOutputStreams[] = {
+            kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
+    };
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
+
+    static const uint8_t maxPipelineDepth = kMaxBufferCount;
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
+
+    static const int32_t partialResultCount = 1;
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+            &partialResultCount, /*count*/1);
+
+    SortedVector<uint8_t> caps;
+    for (size_t i = 0; i < mCapabilities.size(); i++) {
+        switch(mCapabilities[i]) {
+            case BACKWARD_COMPATIBLE:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+                break;
+            case MANUAL_SENSOR:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
+                break;
+            case MANUAL_POST_PROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
+                break;
+            case RAW:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+                break;
+            case PRIVATE_REPROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
+                break;
+            case READ_SENSOR_SETTINGS:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
+                break;
+            case BURST_CAPTURE:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
+                break;
+            case YUV_REPROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+                break;
+            case DEPTH_OUTPUT:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+                break;
+            case CONSTRAINED_HIGH_SPEED_VIDEO:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
+                break;
+            default:
+                // Ignore LEVELs
+                break;
+        }
+    }
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
+
+    // Scan a default request template for included request keys
+    Vector<int32_t> availableRequestKeys;
+    const camera_metadata_t *previewRequest =
+        constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
+    for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
+        camera_metadata_ro_entry_t entry;
+        get_camera_metadata_ro_entry(previewRequest, i, &entry);
+        availableRequestKeys.add(entry.tag);
+    }
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
+            availableRequestKeys.size());
+
+    // Add a few more result keys. Must be kept up to date with the various places that add these
+
+    Vector<int32_t> availableResultKeys(availableRequestKeys);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
+        availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
+        availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
+        availableResultKeys.add(ANDROID_FLASH_STATE);
+        availableResultKeys.add(ANDROID_LENS_STATE);
+        availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
+        availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
+        availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
+    }
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+        availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
+        availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
+        availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
+        availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
+    }
+
+    availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
+    availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
+
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
+            availableResultKeys.size());
+
+    // Needs to be last, to collect all the keys set
+
+    availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+            availableCharacteristicsKeys);
+
     mCameraInfo = info.release();
 
+#undef ADD_STATIC_ENTRY
     return OK;
 }
 
@@ -1340,13 +1779,6 @@
     }
     uint8_t controlMode = e.data.u8[0];
 
-    e = settings.find(ANDROID_CONTROL_SCENE_MODE);
-    if (e.count == 0) {
-        ALOGE("%s: No scene mode entry!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    uint8_t sceneMode = e.data.u8[0];
-
     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
@@ -1354,6 +1786,19 @@
         update3A(settings);
         return OK;
     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+        if (!hasCapability(BACKWARD_COMPATIBLE)) {
+            ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
+                  __FUNCTION__);
+            return BAD_VALUE;
+        }
+
+        e = settings.find(ANDROID_CONTROL_SCENE_MODE);
+        if (e.count == 0) {
+            ALOGE("%s: No scene mode entry!", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        uint8_t sceneMode = e.data.u8[0];
+
         switch(sceneMode) {
             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
                 mFacePriority = true;
@@ -1387,11 +1832,11 @@
     camera_metadata_entry e;
 
     e = settings.find(ANDROID_CONTROL_AE_MODE);
-    if (e.count == 0) {
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
         ALOGE("%s: No AE mode entry!", __FUNCTION__);
         return BAD_VALUE;
     }
-    uint8_t aeMode = e.data.u8[0];
+    uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
 
     switch (aeMode) {
         case ANDROID_CONTROL_AE_MODE_OFF:
@@ -1402,17 +1847,14 @@
             // OK for AUTO modes
             break;
         default:
-            ALOGE("%s: Emulator doesn't support AE mode %d",
+            // Mostly silently ignore unsupported modes
+            ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
                     __FUNCTION__, aeMode);
-            return BAD_VALUE;
+            break;
     }
 
     e = settings.find(ANDROID_CONTROL_AE_LOCK);
-    if (e.count == 0) {
-        ALOGE("%s: No AE lock entry!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
+    bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
 
     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
     bool precaptureTrigger = false;
@@ -1429,20 +1871,6 @@
               e.count);
     }
 
-    // If we have an aePrecaptureTrigger, aePrecaptureId should be set too
-    if (e.count != 0) {
-        e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
-
-        if (e.count == 0) {
-            ALOGE("%s: When android.control.aePrecaptureTrigger is set "
-                  " in the request, aePrecaptureId needs to be set as well",
-                  __FUNCTION__);
-            return BAD_VALUE;
-        }
-
-        mAeTriggerId = e.data.i32[0];
-    }
-
     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
         // Run precapture sequence
         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
@@ -1520,32 +1948,19 @@
     camera_metadata_entry e;
 
     e = settings.find(ANDROID_CONTROL_AF_MODE);
-    if (e.count == 0) {
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
         ALOGE("%s: No AF mode entry!", __FUNCTION__);
         return BAD_VALUE;
     }
-    uint8_t afMode = e.data.u8[0];
+    uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
 
     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
     af_trigger_t afTrigger;
-    // If we have an afTrigger, afTriggerId should be set too
     if (e.count != 0) {
         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
 
-        e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
-
-        if (e.count == 0) {
-            ALOGE("%s: When android.control.afTrigger is set "
-                  " in the request, afTriggerId needs to be set as well",
-                  __FUNCTION__);
-            return BAD_VALUE;
-        }
-
-        mAfTriggerId = e.data.i32[0];
-
         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
-        ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
     } else {
         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
@@ -1742,11 +2157,11 @@
     camera_metadata_entry e;
 
     e = settings.find(ANDROID_CONTROL_AWB_MODE);
-    if (e.count == 0) {
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
         return BAD_VALUE;
     }
-    uint8_t awbMode = e.data.u8[0];
+    uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
 
     // TODO: Add white balance simulation
 
@@ -1772,7 +2187,7 @@
 
 
 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
-    if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
+    if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
                 &mAeCurrentExposureTime, 1);
         settings.update(ANDROID_SENSOR_SENSITIVITY,
@@ -1785,13 +2200,24 @@
             &mAfState, 1);
     settings.update(ANDROID_CONTROL_AWB_STATE,
             &mAwbState, 1);
-    /**
-     * TODO: Trigger IDs need a think-through
-     */
-    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
-            &mAeTriggerId, 1);
-    settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
-            &mAfTriggerId, 1);
+
+    uint8_t lensState;
+    switch (mAfState) {
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+        case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
+            lensState = ANDROID_LENS_STATE_MOVING;
+            break;
+        case ANDROID_CONTROL_AF_STATE_INACTIVE:
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+        case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+        case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
+        default:
+            lensState = ANDROID_LENS_STATE_STATIONARY;
+            break;
+    }
+    settings.update(ANDROID_LENS_STATE, &lensState, 1);
+
 }
 
 void EmulatedFakeCamera3::signalReadoutIdle() {
@@ -1925,7 +2351,7 @@
     while(buf != mCurrentRequest.buffers->end()) {
         bool goodBuffer = true;
         if ( buf->stream->format ==
-                HAL_PIXEL_FORMAT_BLOB) {
+                HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
             Mutex::Autolock jl(mJpegLock);
             if (mJpegWaiting) {
                 // This shouldn't happen, because processCaptureRequest should
@@ -1969,13 +2395,59 @@
 
     camera3_capture_result result;
 
+    if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
+        mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
+                &sceneFlicker, 1);
+
+        static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
+        mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
+                &flashState, 1);
+
+        nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
+        mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
+                &rollingShutterSkew, 1);
+
+        float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
+        mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
+                focusRange, sizeof(focusRange)/sizeof(float));
+    }
+
+    if (mParent->hasCapability(DEPTH_OUTPUT)) {
+        camera_metadata_entry_t entry;
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
+                entry.data.f, entry.count);
+    }
+
     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
             &captureTime, 1);
 
+
+    // JPEGs take a stage longer
+    const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
+    mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
+            &pipelineDepth, 1);
+
     result.frame_number = mCurrentRequest.frameNumber;
     result.result = mCurrentRequest.settings.getAndLock();
     result.num_output_buffers = mCurrentRequest.buffers->size();
     result.output_buffers = mCurrentRequest.buffers->array();
+    result.input_buffer = nullptr;
+    result.partial_result = 1;
 
     // Go idle if queue is empty, before sending result
     bool signalIdle = false;
diff --git a/camera/EmulatedFakeCamera3.h b/camera/EmulatedFakeCamera3.h
index 0889813..4835009 100644
--- a/camera/EmulatedFakeCamera3.h
+++ b/camera/EmulatedFakeCamera3.h
@@ -28,6 +28,7 @@
 #include "fake-pipeline2/Sensor.h"
 #include "fake-pipeline2/JpegCompressor.h"
 #include <camera/CameraMetadata.h>
+#include <utils/SortedVector.h>
 #include <utils/List.h>
 #include <utils/Mutex.h>
 
@@ -87,20 +88,22 @@
 
     virtual status_t processCaptureRequest(camera3_capture_request *request);
 
+    virtual status_t flush();
+
     /** Debug methods */
 
     virtual void dump(int fd);
 
-    /** Tag query methods */
-    virtual const char *getVendorSectionName(uint32_t tag);
-
-    virtual const char *getVendorTagName(uint32_t tag);
-
-    virtual int getVendorTagType(uint32_t tag);
-
 private:
 
     /**
+     * Get the requested capability set for this camera
+     */
+    status_t getCameraCapabilities();
+
+    bool hasCapability(AvailableCapabilities cap);
+
+    /**
      * Build the static info metadata buffer for this device
      */
     status_t constructStaticInfo();
@@ -136,14 +139,6 @@
     // no concept of a stream id.
     static const uint32_t kGenericStreamId = 1;
     static const int32_t  kAvailableFormats[];
-    static const uint32_t kAvailableRawSizes[];
-    static const uint64_t kAvailableRawMinDurations[];
-    static const uint32_t kAvailableProcessedSizesBack[];
-    static const uint32_t kAvailableProcessedSizesFront[];
-    static const uint64_t kAvailableProcessedMinDurations[];
-    static const uint32_t kAvailableJpegSizesBack[];
-    static const uint32_t kAvailableJpegSizesFront[];
-    static const uint64_t kAvailableJpegMinDurations[];
 
     static const int64_t  kSyncWaitTimeout     = 10000000; // 10 ms
     static const int32_t  kMaxSyncTimeoutCount = 1000; // 1000 kSyncWaitTimeouts
@@ -159,8 +154,7 @@
     /* Facing back (true) or front (false) switch. */
     bool               mFacingBack;
 
-    /* Full mode (true) or limited mode (false) switch */
-    bool               mFullMode;
+    SortedVector<AvailableCapabilities> mCapabilities;
 
     /**
      * Cache for default templates. Once one is requested, the pointer must be
@@ -173,7 +167,6 @@
      */
     struct PrivateStreamInfo {
         bool alive;
-        bool registered;
     };
 
     // Shortcut to the input stream
@@ -283,8 +276,6 @@
     uint8_t mAeMode;
     uint8_t mAfMode;
     uint8_t mAwbMode;
-    int     mAfTriggerId;
-    int     mAeTriggerId;
 
     int     mAeCounter;
     nsecs_t mAeCurrentExposureTime;
diff --git a/camera/fake-pipeline2/Base.h b/camera/fake-pipeline2/Base.h
index 057629b..724b3f9 100644
--- a/camera/fake-pipeline2/Base.h
+++ b/camera/fake-pipeline2/Base.h
@@ -37,6 +37,7 @@
     int streamId;
     uint32_t width, height;
     uint32_t format;
+    uint32_t dataSpace;
     uint32_t stride;
     buffer_handle_t *buffer;
     uint8_t *img;
diff --git a/camera/fake-pipeline2/Sensor.cpp b/camera/fake-pipeline2/Sensor.cpp
index aa54ef4..61c7f28 100644
--- a/camera/fake-pipeline2/Sensor.cpp
+++ b/camera/fake-pipeline2/Sensor.cpp
@@ -35,11 +35,17 @@
 namespace android {
 
 const unsigned int Sensor::kResolution[2]  = {640, 480};
+const unsigned int Sensor::kActiveArray[4]  = {0, 0, 640, 480};
 
+//const nsecs_t Sensor::kExposureTimeRange[2] =
+//    {1000L, 30000000000L} ; // 1 us - 30 sec
+//const nsecs_t Sensor::kFrameDurationRange[2] =
+//    {33331760L, 30000000000L}; // ~1/30 s - 30 sec
 const nsecs_t Sensor::kExposureTimeRange[2] =
-    {1000L, 30000000000L} ; // 1 us - 30 sec
+    {1000L, 300000000L} ; // 1 us - 0.3 sec
 const nsecs_t Sensor::kFrameDurationRange[2] =
-    {33331760L, 30000000000L}; // ~1/30 s - 30 sec
+    {33331760L, 300000000L}; // ~1/30 s - 0.3 sec
+
 const nsecs_t Sensor::kMinVerticalBlank = 10000L;
 
 const uint8_t Sensor::kColorFilterArrangement =
@@ -323,7 +329,7 @@
                     i, b.streamId, b.width, b.height, b.format, b.stride,
                     b.buffer, b.img);
             switch(b.format) {
-                case HAL_PIXEL_FORMAT_RAW_SENSOR:
+                case HAL_PIXEL_FORMAT_RAW16:
                     captureRaw(b.img, gain, b.stride);
                     break;
                 case HAL_PIXEL_FORMAT_RGB_888:
@@ -333,19 +339,23 @@
                     captureRGBA(b.img, gain, b.stride);
                     break;
                 case HAL_PIXEL_FORMAT_BLOB:
-                    // Add auxillary buffer of the right size
-                    // Assumes only one BLOB (JPEG) buffer in
-                    // mNextCapturedBuffers
-                    StreamBuffer bAux;
-                    bAux.streamId = 0;
-                    bAux.width = b.width;
-                    bAux.height = b.height;
-                    bAux.format = HAL_PIXEL_FORMAT_RGB_888;
-                    bAux.stride = b.width;
-                    bAux.buffer = NULL;
-                    // TODO: Reuse these
-                    bAux.img = new uint8_t[b.width * b.height * 3];
-                    mNextCapturedBuffers->push_back(bAux);
+                    if (b.dataSpace != HAL_DATASPACE_DEPTH) {
+                        // Add auxillary buffer of the right size
+                        // Assumes only one BLOB (JPEG) buffer in
+                        // mNextCapturedBuffers
+                        StreamBuffer bAux;
+                        bAux.streamId = 0;
+                        bAux.width = b.width;
+                        bAux.height = b.height;
+                        bAux.format = HAL_PIXEL_FORMAT_RGB_888;
+                        bAux.stride = b.width;
+                        bAux.buffer = NULL;
+                        // TODO: Reuse these
+                        bAux.img = new uint8_t[b.width * b.height * 3];
+                        mNextCapturedBuffers->push_back(bAux);
+                    } else {
+                        captureDepthCloud(b.img);
+                    }
                     break;
                 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
                     captureNV21(b.img, gain, b.stride);
@@ -354,6 +364,9 @@
                     // TODO:
                     ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
                     break;
+                case HAL_PIXEL_FORMAT_Y16:
+                    captureDepth(b.img, gain, b.stride);
+                    break;
                 default:
                     ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
                             b.format);
@@ -537,4 +550,61 @@
     ALOGVV("NV21 sensor image captured");
 }
 
+void Sensor::captureDepth(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate scaling factor to 13bpp millimeters
+    int scale64x = 64 * totalGain * 8191 / kMaxRawValue;
+    uint32_t inc = kResolution[0] / stride;
+
+    for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
+        mScene.setReadoutPixel(0, y);
+        uint16_t *px = ((uint16_t*)img) + outY * stride;
+        for (unsigned int x = 0; x < kResolution[0]; x += inc) {
+            uint32_t depthCount;
+            // TODO: Make up real depth scene instead of using green channel
+            // as depth
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            depthCount = pixel[Scene::Gr] * scale64x;
+
+            *px++ = depthCount < 8191*64 ? depthCount / 64 : 0;
+            for (unsigned int j = 1; j < inc; j++)
+                mScene.getPixelElectrons();
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("Depth sensor image captured");
+}
+
+void Sensor::captureDepthCloud(uint8_t *img) {
+
+    android_depth_points *cloud = reinterpret_cast<android_depth_points*>(img);
+
+    cloud->num_points = 16;
+
+    // TODO: Create point cloud values that match RGB scene
+    const int FLOATS_PER_POINT = 4;
+    const float JITTER_STDDEV = 0.1f;
+    for (size_t y = 0, i = 0; y < 4; y++) {
+        for (size_t x = 0; x < 4; x++, i++) {
+            float randSampleX = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
+            randSampleX *= JITTER_STDDEV;
+
+            float randSampleY = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
+            randSampleY *= JITTER_STDDEV;
+
+            float randSampleZ = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
+            randSampleZ *= JITTER_STDDEV;
+
+            cloud->xyzc_points[i * FLOATS_PER_POINT + 0] = x - 1.5f + randSampleX;
+            cloud->xyzc_points[i * FLOATS_PER_POINT + 1] = y - 1.5f + randSampleY;
+            cloud->xyzc_points[i * FLOATS_PER_POINT + 2] = 3.f + randSampleZ;
+            cloud->xyzc_points[i * FLOATS_PER_POINT + 3] = 0.8f;
+        }
+    }
+
+    ALOGVV("Depth point cloud captured");
+
+}
+
 } // namespace android
diff --git a/camera/fake-pipeline2/Sensor.h b/camera/fake-pipeline2/Sensor.h
index b485844..720fbc2 100644
--- a/camera/fake-pipeline2/Sensor.h
+++ b/camera/fake-pipeline2/Sensor.h
@@ -158,6 +158,7 @@
      * Static sensor characteristics
      */
     static const unsigned int kResolution[2];
+    static const unsigned int kActiveArray[4];
 
     static const nsecs_t kExposureTimeRange[2];
     static const nsecs_t kFrameDurationRange[2];
@@ -234,6 +235,9 @@
     void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
     void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
     void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureDepth(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureDepthCloud(uint8_t *img);
+
 };
 
 }
diff --git a/camera/media_profiles.xml b/camera/media_profiles.xml
index 42ceb8d..cd99857 100644
--- a/camera/media_profiles.xml
+++ b/camera/media_profiles.xml
@@ -70,16 +70,6 @@
 <!ELEMENT AudioDecoderCap EMPTY>
 <!ATTLIST AudioDecoderCap name (wma) #REQUIRED>
 <!ATTLIST AudioDecoderCap enabled (true|false) #REQUIRED>
-<!ELEMENT VideoEditorCap EMPTY>
-<!ATTLIST VideoEditorCap maxInputFrameWidth CDATA #REQUIRED>
-<!ATTLIST VideoEditorCap maxInputFrameHeight CDATA #REQUIRED>
-<!ATTLIST VideoEditorCap maxOutputFrameWidth CDATA #REQUIRED>
-<!ATTLIST VideoEditorCap maxOutputFrameHeight CDATA #REQUIRED>
-<!ATTLIST VideoEditorCap maxPrefetchYUVFrames CDATA #REQUIRED>
-<!ELEMENT ExportVideoProfile EMPTY>
-<!ATTLIST ExportVideoProfile name (h264|h263|m4v) #REQUIRED>
-<!ATTLIST ExportVideoProfile profile CDATA #REQUIRED>
-<!ATTLIST ExportVideoProfile level CDATA #REQUIRED>
 ]>
 <!--
      This file is used to declare the multimedia profiles and capabilities
@@ -375,40 +365,4 @@
     -->
     <VideoDecoderCap name="wmv" enabled="false"/>
     <AudioDecoderCap name="wma" enabled="false"/>
-
-    <!--
-        The VideoEditor Capability configuration:
-        - maxInputFrameWidth: maximum video width of imported video clip.
-        - maxInputFrameHeight: maximum video height of imported video clip.
-        - maxOutputFrameWidth: maximum video width of exported video clip.
-        - maxOutputFrameHeight: maximum video height of exported video clip.
-        - maxPrefetchYUVFrames: maximum prefetch YUV frames for encoder,
-        used to limit the amount of memory for prefetched YUV frames.
-        For this platform, it allows maximum ~1MB(~0.1MB per QVGA frame x 10
-        frames) memory.
-    -->
-
-    <VideoEditorCap  maxInputFrameWidth="320"
-        maxInputFrameHeight="240" maxOutputFrameWidth="320"
-        maxOutputFrameHeight="240" maxPrefetchYUVFrames="10" />
-    <!--
-        The VideoEditor Export codec profile and level values
-        correspond to the values in OMX_Video.h.
-        E.g. for h264, profile value 1 means OMX_VIDEO_AVCProfileBaseline
-        and  level 4096 means OMX_VIDEO_AVCLevel41.
-        Please note that the values are in decimal.
-        These values are for video encoder.
-    -->
-    <!--
-      Codec = h.264, Baseline profile, level 4.1
-    -->
-    <ExportVideoProfile name="h264" profile= "1" level="512"/>
-    <!--
-      Codec = h.263, Baseline profile, level 0
-    -->
-    <ExportVideoProfile name="h263" profile= "1" level="1"/>
-    <!--
-      Codec = mpeg4, Simple profile, level 3
-    -->
-    <ExportVideoProfile name="m4v" profile= "1" level="16"/>
 </MediaSettings>
diff --git a/data/etc/handheld_core_hardware.xml b/data/etc/handheld_core_hardware.xml
new file mode 100644
index 0000000..7da8ae1
--- /dev/null
+++ b/data/etc/handheld_core_hardware.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2009 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- These are the hardware components that all handheld devices
+     must include. Devices with optional hardware must also include extra
+     hardware files, per the comments below.
+
+     Handheld devices include phones, mobile Internet devices (MIDs),
+     Personal Media Players (PMPs), small tablets (7" or less), and similar
+     devices.
+
+     This file is identical to frameworks/native/data/etc/handheld_core_hardware.xml,
+     except the android.hardware.bluetooth
+-->
+<permissions>
+    <feature name="android.hardware.audio.output" />
+    <feature name="android.hardware.camera" />
+    <feature name="android.hardware.location" />
+    <feature name="android.hardware.location.network" />
+    <feature name="android.hardware.sensor.compass" />
+    <feature name="android.hardware.sensor.accelerometer" />
+    <feature name="android.hardware.touchscreen" />
+    <feature name="android.hardware.microphone" />
+    <feature name="android.hardware.screen.portrait" />
+    <feature name="android.hardware.screen.landscape" />
+
+    <!-- basic system services -->
+    <feature name="android.software.app_widgets" />
+    <feature name="android.software.connectionservice" />
+    <feature name="android.software.voice_recognizers" notLowRam="true" />
+    <feature name="android.software.backup" />
+    <feature name="android.software.home_screen" />
+    <feature name="android.software.input_methods" />
+    <feature name="android.software.midi" />
+    <feature name="android.software.print" />
+
+    <!-- Feature to specify if the device supports adding device admins. -->
+    <feature name="android.software.device_admin" />
+
+    <!-- Feature to specify if the device support managed users. -->
+    <feature name="android.software.managed_users" />
+
+    <!-- devices with GPS must include android.hardware.location.gps.xml -->
+    <!-- devices with an autofocus camera and/or flash must include either
+         android.hardware.camera.autofocus.xml or
+         android.hardware.camera.autofocus-flash.xml -->
+    <!-- devices with a front facing camera must include
+         android.hardware.camera.front.xml -->
+    <!-- devices with WiFi must also include android.hardware.wifi.xml -->
+    <!-- devices that support multitouch must include the most appropriate one
+         of these files:
+
+         If only partial (non-independent) pointers are supported:
+         android.hardware.touchscreen.multitouch.xml
+
+         If up to 4 independently tracked pointers are supported:
+         include android.hardware.touchscreen.multitouch.distinct.xml
+
+         If 5 or more independently tracked pointers are supported:
+         include android.hardware.touchscreen.multitouch.jazzhand.xml
+
+         ONLY ONE of the above should be included. -->
+    <!-- devices with an ambient light sensor must also include
+         android.hardware.sensor.light.xml -->
+    <!-- devices with a proximity sensor must also include
+         android.hardware.sensor.proximity.xml -->
+    <!-- GSM phones must also include android.hardware.telephony.gsm.xml -->
+    <!-- CDMA phones must also include android.hardware.telephony.cdma.xml -->
+    <!-- Devices that have low-latency audio stacks suitable for apps like
+         VoIP may include android.hardware.audio.low_latency.xml. ONLY apps
+         that meet the requirements specified in the CDD may include this. -->
+</permissions>
diff --git a/fingerprint/Android.mk b/fingerprint/Android.mk
new file mode 100644
index 0000000..f4b76c9
--- /dev/null
+++ b/fingerprint/Android.mk
@@ -0,0 +1,33 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fingerprint.goldfish
+LOCAL_MODULE_RELATIVE_PATH := hw
+LOCAL_SRC_FILES := fingerprint.c
+LOCAL_SHARED_LIBRARIES := liblog
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fingerprint.ranchu
+LOCAL_MODULE_RELATIVE_PATH := hw
+LOCAL_SRC_FILES := fingerprint.c
+LOCAL_SHARED_LIBRARIES := liblog
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/fingerprint/fingerprint.c b/fingerprint/fingerprint.c
new file mode 100644
index 0000000..c3dc787
--- /dev/null
+++ b/fingerprint/fingerprint.c
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define  FINGERPRINT_LISTEN_SERVICE_NAME "fingerprintlisten"
+#define  FINGERPRINT_TXT_FILENAME "/data/fingerprint.txt"
+
+#define LOG_TAG "FingerprintHal"
+#define MAX_NUM_FINGERS 32
+
+#include <errno.h>
+#include <endian.h>
+#include <inttypes.h>
+#include <malloc.h>
+#include <string.h>
+#include <cutils/log.h>
+#include <hardware/hardware.h>
+#include <hardware/fingerprint.h>
+#include <hardware/qemud.h>
+
+typedef enum worker_state_t {
+    STATE_ENROLL = 1,
+    STATE_SCAN = 2,
+    STATE_IDLE = 3,
+    STATE_EXIT = 4
+} worker_state_t;
+
+typedef struct worker_thread_t {
+    pthread_t thread;
+    pthread_mutex_t mutex;
+    int request;
+    worker_state_t state;
+    int fingerid;
+    int finger_is_on;
+    int all_fingerids[MAX_NUM_FINGERS];
+    uint64_t all_secureids[MAX_NUM_FINGERS];
+    uint64_t all_authenids[MAX_NUM_FINGERS];
+    int num_fingers_enrolled;
+    FILE *fp_write;;
+} worker_thread_t;
+
+typedef struct emu_fingerprint_hal_device_t {
+    fingerprint_device_t device; //inheritance
+    worker_thread_t listener;
+    uint64_t op_id;
+    uint64_t challenge;
+    uint64_t secure_user_id;
+    uint64_t user_id;
+    uint64_t authenticator_id;
+    pthread_mutex_t lock;
+} emu_fingerprint_hal_device_t;
+
+static uint64_t get_64bit_rand() {
+    return (((uint64_t) rand()) << 32) | ((uint64_t) rand());
+}
+
+static void destroyListenerThread(emu_fingerprint_hal_device_t* dev)
+{
+    pthread_join(dev->listener.thread, NULL);
+    pthread_mutex_destroy(&dev->listener.mutex);
+}
+
+bool finger_already_enrolled(emu_fingerprint_hal_device_t* dev) {
+    int i;
+    for (i = 0; i < dev->listener.num_fingers_enrolled; ++ i) {
+        if (dev->listener.fingerid == dev->listener.all_fingerids[i % MAX_NUM_FINGERS]) {
+            dev->secure_user_id = dev->listener.all_secureids[i % MAX_NUM_FINGERS];
+            dev->authenticator_id = dev->listener.all_authenids[i % MAX_NUM_FINGERS];
+            return true;
+        }
+    }
+    return false;
+}
+
+static void save_fingerid(FILE* fp, int fingerid, uint64_t secureid, uint64_t authenid) {
+    if (!fp) return;
+    fprintf(fp, " %d %" PRIu64 " %" PRIu64, fingerid, secureid, authenid);
+    fflush(fp);
+}
+
+static void listener_send_notice(emu_fingerprint_hal_device_t* dev)
+{
+    fingerprint_msg_t message = {0};
+    bool is_authentication = false;
+    bool is_valid_finger = false;
+    pthread_mutex_lock(&dev->listener.mutex);
+    if (dev->listener.state == STATE_ENROLL) {
+        message.type = FINGERPRINT_TEMPLATE_ENROLLING;
+        message.data.enroll.finger.fid = dev->listener.fingerid;
+        message.data.enroll.samples_remaining = 0;
+        dev->authenticator_id = get_64bit_rand();
+        dev->listener.state = STATE_SCAN;
+        if (!finger_already_enrolled(dev)) {
+            dev->listener.all_fingerids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = dev->listener.fingerid;
+            dev->listener.all_secureids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = dev->secure_user_id;
+            dev->listener.all_authenids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = dev->authenticator_id;
+            ++ dev->listener.num_fingers_enrolled;
+            save_fingerid(dev->listener.fp_write, dev->listener.fingerid, dev->secure_user_id, dev->authenticator_id);
+            is_valid_finger = true;
+        }
+    } else {
+        is_authentication = true;
+        is_valid_finger = finger_already_enrolled(dev);
+        message.type = FINGERPRINT_AUTHENTICATED;
+        message.data.authenticated.finger.gid = 0;
+        message.data.authenticated.finger.fid = is_valid_finger ? dev->listener.fingerid : 0;
+        message.data.authenticated.hat.version = HW_AUTH_TOKEN_VERSION;
+        message.data.authenticated.hat.authenticator_type = htobe32(HW_AUTH_FINGERPRINT);
+        message.data.authenticated.hat.challenge = dev->op_id;
+        message.data.authenticated.hat.authenticator_id = dev->authenticator_id;
+        message.data.authenticated.hat.user_id = dev->secure_user_id;
+        struct timespec ts;
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+        message.data.authenticated.hat.timestamp =
+            htobe64((uint64_t)ts.tv_sec * 1000 + ts.tv_nsec / 1000000);
+    }
+    pthread_mutex_unlock(&dev->listener.mutex);
+
+    pthread_mutex_lock(&dev->lock);
+    if (is_authentication) {
+        fingerprint_msg_t acquired_message = {0};
+        acquired_message.type = FINGERPRINT_ACQUIRED;
+        message.data.acquired.acquired_info = FINGERPRINT_ACQUIRED_GOOD;
+        dev->device.notify(&acquired_message);
+    }
+    if (is_valid_finger || is_authentication) {
+        dev->device.notify(&message);
+    }
+    pthread_mutex_unlock(&dev->lock);
+}
+
+static void* listenerFunction(void* data)
+{
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) data;
+
+    int fd = qemud_channel_open(FINGERPRINT_LISTEN_SERVICE_NAME);
+    if (fd < 0) {
+        ALOGE("listener cannot open fingerprint listener service exit");
+        return NULL;
+    }
+
+    const char* cmd = "listen";
+    if (qemud_channel_send(fd, cmd, strlen(cmd)) < 0) {
+        ALOGE("cannot write fingerprint 'listen' to host");
+        return NULL;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_FINGERS; ++ i) {
+        dev->listener.all_fingerids[i] = 0;
+    }
+    //read registered fingerprint ids from /data/local/fingerprint.txt
+    //TODO: store it in a better location
+    dev->listener.num_fingers_enrolled = 0;
+    FILE* fp_stored = fopen(FINGERPRINT_TXT_FILENAME, "r");
+    if (fp_stored) {
+        while (1) {
+            int fingerid = 0;
+            uint64_t secureid = 0;
+            uint64_t authenid = 0;
+            if(fscanf(fp_stored, "%d %" SCNu64 " %" SCNu64, &fingerid, &secureid, &authenid) == 3) {
+                dev->listener.all_fingerids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = fingerid;
+                dev->listener.all_secureids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = secureid;
+                dev->listener.all_authenids[dev->listener.num_fingers_enrolled % MAX_NUM_FINGERS] = authenid;
+                ++ dev->listener.num_fingers_enrolled;
+            } else {
+                break;
+            }
+        }
+        fclose(fp_stored);
+    }
+
+    dev->listener.fp_write = fopen(FINGERPRINT_TXT_FILENAME, "a");
+
+    char buffer[128];
+    int fingerid=-1;
+    int size;
+    while (1) {
+        //simply listen in blocking mode
+        if ((size = qemud_channel_recv(fd, buffer, sizeof buffer - 1)) >0) {
+            buffer[size] = '\0';
+            if (sscanf(buffer, "on:%d", &fingerid) == 1) {
+                if (fingerid > 0 ) {
+                    dev->listener.fingerid = fingerid;
+                    dev->listener.finger_is_on = 1;
+                    ALOGD("got finger %d", fingerid);
+                    listener_send_notice(dev);
+                    ALOGD("send notice finger %d", fingerid);
+                }
+                else {
+                    ALOGE("finger id should be positive");
+                }
+            } else if (strncmp("off", buffer, 3) == 0) {
+                dev->listener.finger_is_on = 0;
+                ALOGD("finger off %d", fingerid);
+            } else {
+                ALOGE("error: '%s'", buffer);
+            }
+        } else {
+            ALOGE("receive failure");
+            // return NULL;
+        }
+        //TODO: check for request to exit thread
+    }
+
+    ALOGD("listener exit");
+    return NULL;
+}
+
+static void createListenerThread(emu_fingerprint_hal_device_t* dev)
+{
+    pthread_mutex_init(&dev->listener.mutex, NULL);
+    pthread_create(&dev->listener.thread, NULL, listenerFunction, dev);
+}
+
+static int fingerprint_close(hw_device_t *dev)
+{
+    if (dev) {
+        destroyListenerThread((emu_fingerprint_hal_device_t*) dev);
+        free(dev);
+        return 0;
+    } else {
+        return -1;
+    }
+}
+
+static void setListenerState(emu_fingerprint_hal_device_t* dev, worker_state_t state) {
+    pthread_mutex_lock(&dev->listener.mutex);
+    dev->listener.state = state;
+    pthread_mutex_unlock(&dev->listener.mutex);
+}
+
+static uint64_t fingerprint_get_auth_id(struct fingerprint_device __unused *device) {
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) device;
+    return dev->authenticator_id;
+}
+
+static int fingerprint_set_active_group(struct fingerprint_device __unused *device, uint32_t gid,
+        const char *path) {
+    // TODO: implements me
+    return 0;
+}
+
+static int fingerprint_authenticate(struct fingerprint_device __unused *device,
+    uint64_t __unused operation_id, __unused uint32_t gid)
+{
+    ALOGD("fingerprint_authenticate");
+
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) device;
+    pthread_mutex_lock(&dev->lock);
+    dev->op_id = operation_id;
+    pthread_mutex_unlock(&dev->lock);
+    setListenerState(dev, STATE_SCAN);
+    return 0;
+}
+
+static int fingerprint_enroll(struct fingerprint_device *device,
+        const hw_auth_token_t *hat,
+        uint32_t __unused gid,
+        uint32_t __unused timeout_sec) {
+    ALOGD("fingerprint_enroll");
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) device;
+    if (hat && hat->challenge == dev->challenge) {
+        dev->secure_user_id = hat->user_id;
+    } else {
+        ALOGW("%s: invalid or null auth token", __func__);
+    }
+
+    if (hat->version != HW_AUTH_TOKEN_VERSION) {
+        return -EPROTONOSUPPORT;
+    }
+    if (hat->challenge != dev->challenge && !(hat->authenticator_type & HW_AUTH_FINGERPRINT)) {
+        return -EPERM;
+    }
+
+    dev->user_id = hat->user_id;
+
+    // TODO: store enrolled fingerprints, authenticator id, and secure_user_id
+    setListenerState(dev, STATE_ENROLL);
+    return 0;
+
+}
+
+static uint64_t fingerprint_pre_enroll(struct fingerprint_device *device) {
+    ALOGD("fingerprint_pre_enroll");
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) device;
+    dev->challenge = get_64bit_rand();
+    return dev->challenge;
+}
+
+static int fingerprint_cancel(struct fingerprint_device __unused *device) {
+    ALOGD("fingerprint_cancel");
+    emu_fingerprint_hal_device_t* dev = (emu_fingerprint_hal_device_t*) device;
+    setListenerState(dev, STATE_IDLE);
+    return 0;
+}
+
+static int fingerprint_enumerate(struct fingerprint_device *device,
+        fingerprint_finger_id_t *results, uint32_t *max_size) {
+    // TODO: implement me
+    return 0;
+}
+
+static int fingerprint_remove(struct fingerprint_device __unused *dev,
+        uint32_t __unused gid, uint32_t __unused fid) {
+    // TODO: implement enroll and remove, and set dev->authenticator_id = 0 when no FPs enrolled
+    return FINGERPRINT_ERROR;
+}
+
+static int set_notify_callback(struct fingerprint_device *device,
+                                fingerprint_notify_t notify) {
+    ALOGD("set_notify");
+    emu_fingerprint_hal_device_t* dev =(emu_fingerprint_hal_device_t*) device;
+    pthread_mutex_lock(&dev->lock);
+    device->notify = notify;
+    pthread_mutex_unlock(&dev->lock);
+    return 0;
+}
+
+static int fingerprint_open(const hw_module_t* module, const char __unused *id,
+                            hw_device_t** device)
+{
+    if (device == NULL) {
+        ALOGE("NULL device on open");
+        return -EINVAL;
+    } else {
+        ALOGD("fingerprint open\n");
+    }
+
+    emu_fingerprint_hal_device_t *dev = malloc(sizeof(emu_fingerprint_hal_device_t));
+    memset(dev, 0, sizeof(emu_fingerprint_hal_device_t));
+
+    dev->device.common.tag = HARDWARE_DEVICE_TAG;
+    dev->device.common.version = HARDWARE_MODULE_API_VERSION(2, 0);
+    dev->device.common.module = (struct hw_module_t*) module;
+    dev->device.common.close = fingerprint_close;
+    dev->device.pre_enroll = fingerprint_pre_enroll;
+    dev->device.enroll = fingerprint_enroll;
+    dev->device.get_authenticator_id = fingerprint_get_auth_id;
+    dev->device.set_active_group = fingerprint_set_active_group;
+    dev->device.authenticate = fingerprint_authenticate;
+    dev->device.cancel = fingerprint_cancel;
+    dev->device.enumerate = fingerprint_enumerate;
+    dev->device.remove = fingerprint_remove;
+    dev->device.set_notify = set_notify_callback;
+    dev->device.notify = NULL;
+
+    dev->authenticator_id = 0xdeadbeef;
+
+    pthread_mutex_init(&dev->lock, NULL);
+    createListenerThread(dev);
+    *device = (hw_device_t*) dev;
+    return 0;
+}
+
+static struct hw_module_methods_t fingerprint_module_methods = {
+    .open = fingerprint_open,
+};
+
+fingerprint_module_t HAL_MODULE_INFO_SYM = {
+    .common = {
+        .tag                = HARDWARE_MODULE_TAG,
+        .module_api_version = FINGERPRINT_MODULE_API_VERSION_2_0,
+        .hal_api_version    = HARDWARE_HAL_API_VERSION,
+        .id                 = FINGERPRINT_HARDWARE_MODULE_ID,
+        .name               = "Emulator Fingerprint HAL",
+        .author             = "The Android Open Source Project",
+        .methods            = &fingerprint_module_methods,
+    },
+};
diff --git a/fstab.goldfish b/fstab.goldfish
index d2d42b7..1e94443 100644
--- a/fstab.goldfish
+++ b/fstab.goldfish
@@ -5,4 +5,4 @@
 /dev/block/mtdblock0                                    /system             ext4      ro,barrier=1                                         wait
 /dev/block/mtdblock1                                    /data               ext4      noatime,nosuid,nodev,barrier=1,nomblk_io_submit      wait,check
 /dev/block/mtdblock2                                    /cache              ext4      noatime,nosuid,nodev  wait,check
-/devices/platform/goldfish_mmc.0                        auto                vfat      defaults                                             voldmanaged=sdcard:auto,noemulatedsd
+/devices/platform/goldfish_mmc.0*                       auto                auto      defaults                                             voldmanaged=sdcard:auto,noemulatedsd
diff --git a/init.goldfish.rc b/init.goldfish.rc
index ead468d..2058bad 100644
--- a/init.goldfish.rc
+++ b/init.goldfish.rc
@@ -3,15 +3,11 @@
     mount debugfs debugfs /sys/kernel/debug
 
 on init
-    # See storage config details at http://source.android.com/tech/storage/
-    mkdir /mnt/media_rw/sdcard 0700 media_rw media_rw
-    mkdir /storage/sdcard 0700 root root
-
-    export EXTERNAL_STORAGE /storage/sdcard
-
     # Support legacy paths
-    symlink /storage/sdcard /sdcard
-    symlink /storage/sdcard /mnt/sdcard
+    symlink /sdcard /mnt/sdcard
+
+    # By default, primary storage is physical
+    setprop ro.vold.primary_physical 1
 
 on boot
     setprop ARGH ARGH
@@ -90,7 +86,6 @@
 service goldfish-logcat /system/bin/logcat -Q
     oneshot
 
-# fusewrapped external sdcard daemon running as media_rw (1023)
-service fuse_sdcard /system/bin/sdcard -u 1023 -g 1023 -d /mnt/media_rw/sdcard /storage/sdcard
+service fingerprintd /system/bin/fingerprintd
     class late_start
-    disabled
+    user system
diff --git a/opengl/system/gralloc/gralloc.cpp b/opengl/system/gralloc/gralloc.cpp
index 059bf12..ef18511 100644
--- a/opengl/system/gralloc/gralloc.cpp
+++ b/opengl/system/gralloc/gralloc.cpp
@@ -211,11 +211,12 @@
             glFormat = GL_RGB;
             glType = GL_UNSIGNED_SHORT_5_6_5;
             break;
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_Y16:
             bpp = 2;
             align = 16*bpp;
             if (! ((sw_read || hw_cam_read) && (sw_write || hw_cam_write) ) ) {
-                // Raw sensor data only goes between camera and CPU
+                // Raw sensor data or Y16 only goes between camera and CPU
                 return -EINVAL;
             }
             // Not expecting to actually create any GL surfaces for this